diff --git a/.gitignore b/.gitignore index c9dae8e685c2..430247c396c5 100644 --- a/.gitignore +++ b/.gitignore @@ -58,3 +58,5 @@ gradlew.bat .project .classpath .settings +packages/camera/camera_avfoundation/example/ios/Runner.xcworkspace/xcshareddata/swiftpm/Package.resolved +packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved diff --git a/flutter_packages.code-workspace b/flutter_packages.code-workspace new file mode 100644 index 000000000000..bbb59b0a5b2d --- /dev/null +++ b/flutter_packages.code-workspace @@ -0,0 +1,27 @@ +{ + "folders": [ + { + "path": "." + }, + { + "path": "./packages/camera" + }, + { + "path": "./packages/camera/camera_avfoundation" + }, + { + "path": "./packages/camera/camera_avfoundation" + } + ], + "settings": { + "editor.quickSuggestions": { + "comments": "off", + "strings": "off", + "other": "off" + }, + "files.associations": { + "MLTable": "yaml", + "cstring": "cpp" + } + } +} diff --git a/packages/camera/camera/example/.gitattributes b/packages/camera/camera/example/.gitattributes new file mode 100644 index 000000000000..f087b429e2f8 --- /dev/null +++ b/packages/camera/camera/example/.gitattributes @@ -0,0 +1 @@ +*.tar.gz filter=lfs diff=lfs merge=lfs -text diff --git a/packages/camera/camera/example/.metadata b/packages/camera/camera/example/.metadata new file mode 100644 index 000000000000..9c11960fa8d4 --- /dev/null +++ b/packages/camera/camera/example/.metadata @@ -0,0 +1,30 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: "603104015dd692ea3403755b55d07813d5cf8965" + channel: "[user-branch]" + +project_type: app + +# Tracks metadata for the flutter migrate command +migration: + platforms: + - platform: root + create_revision: 603104015dd692ea3403755b55d07813d5cf8965 + base_revision: 603104015dd692ea3403755b55d07813d5cf8965 + - platform: linux + create_revision: 603104015dd692ea3403755b55d07813d5cf8965 + base_revision: 603104015dd692ea3403755b55d07813d5cf8965 + + # User provided section + + # List of Local paths (relative to this file) that should be + # ignored by the migrate tool. + # + # Files that are not part of the templates will be ignored by default. + unmanaged_files: + - 'lib/main.dart' + - 'ios/Runner.xcodeproj/project.pbxproj' diff --git a/packages/camera/camera/example/README.md b/packages/camera/camera/example/README.md new file mode 100644 index 000000000000..2b3fce4c86a5 --- /dev/null +++ b/packages/camera/camera/example/README.md @@ -0,0 +1,16 @@ +# example + +A new Flutter project. + +## Getting Started + +This project is a starting point for a Flutter application. + +A few resources to get you started if this is your first Flutter project: + +- [Lab: Write your first Flutter app](https://docs.flutter.dev/get-started/codelab) +- [Cookbook: Useful Flutter samples](https://docs.flutter.dev/cookbook) + +For help getting started with Flutter development, view the +[online documentation](https://docs.flutter.dev/), which offers tutorials, +samples, guidance on mobile development, and a full API reference. diff --git a/packages/camera/camera/example/analysis_options.yaml b/packages/camera/camera/example/analysis_options.yaml new file mode 100644 index 000000000000..0d2902135cae --- /dev/null +++ b/packages/camera/camera/example/analysis_options.yaml @@ -0,0 +1,28 @@ +# This file configures the analyzer, which statically analyzes Dart code to +# check for errors, warnings, and lints. +# +# The issues identified by the analyzer are surfaced in the UI of Dart-enabled +# IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be +# invoked from the command line by running `flutter analyze`. + +# The following line activates a set of recommended lints for Flutter apps, +# packages, and plugins designed to encourage good coding practices. +include: package:flutter_lints/flutter.yaml + +linter: + # The lint rules applied to this project can be customized in the + # section below to disable rules from the `package:flutter_lints/flutter.yaml` + # included above or to enable additional rules. A list of all available lints + # and their documentation is published at https://dart.dev/lints. + # + # Instead of disabling a lint rule for the entire project in the + # section below, it can also be suppressed for a single line of code + # or a specific dart file by using the `// ignore: name_of_lint` and + # `// ignore_for_file: name_of_lint` syntax on the line or in the file + # producing the lint. + rules: + # avoid_print: false # Uncomment to disable the `avoid_print` rule + # prefer_single_quotes: true # Uncomment to enable the `prefer_single_quotes` rule + +# Additional information about this file can be found at +# https://dart.dev/guides/language/analysis-options diff --git a/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist b/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist index 7c5696400627..1dc6cf7652ba 100644 --- a/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist +++ b/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist @@ -21,6 +21,6 @@ CFBundleVersion 1.0 MinimumOSVersion - 12.0 + 13.0 diff --git a/packages/camera/camera/example/ios/Podfile b/packages/camera/camera/example/ios/Podfile index 01d4aa611bb9..17adeb14132e 100644 --- a/packages/camera/camera/example/ios/Podfile +++ b/packages/camera/camera/example/ios/Podfile @@ -1,5 +1,5 @@ # Uncomment this line to define a global platform for your project -# platform :ios, '12.0' +# platform :ios, '13.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' diff --git a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj index 92de69f31b7a..e9a714f09ec6 100644 --- a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj @@ -141,6 +141,7 @@ 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + 786AFE0EBD00D4651BA88F35 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -222,6 +223,23 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; + 786AFE0EBD00D4651BA88F35 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; alwaysOutOfDate = 1; @@ -336,7 +354,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; @@ -415,7 +433,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -466,7 +484,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; diff --git a/packages/camera/camera/example/lib/main.dart b/packages/camera/camera/example/lib/main.dart index cca528c04fda..75cc60897b8f 100644 --- a/packages/camera/camera/example/lib/main.dart +++ b/packages/camera/camera/example/lib/main.dart @@ -6,6 +6,9 @@ import 'dart:async'; import 'dart:io'; import 'package:camera/camera.dart'; +import 'package:camera_avfoundation/camera_avfoundation.dart'; +import 'package:camera_linux/camera_linux.dart'; +import 'package:camera_platform_interface/camera_platform_interface.dart'; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; import 'package:flutter/scheduler.dart'; @@ -64,6 +67,13 @@ class _CameraExampleHomeState extends State double _maxAvailableZoom = 1.0; double _currentScale = 1.0; double _baseScale = 1.0; + double _currentLensPosition = 0.0; + + // Transform state (iOS 17+) + double _transformRotation = 0; + bool _transformFlipH = false; + bool _transformFlipV = false; + bool _transformCropEnabled = false; // Counting pointers (number of user fingers on screen) int _pointers = 0; @@ -155,6 +165,44 @@ class _CameraExampleHomeState extends State ), _captureControlRowWidget(), _modeControlRowWidget(), + if (!kIsWeb && Platform.isIOS) _lensPositionWidget(), + if (!kIsWeb && Platform.isIOS) _transformControlRowWidget(), + Row( + children: [ + ElevatedButton( + onPressed: () { + setState(() { + if (TargetPlatform.linux != defaultTargetPlatform) { + return; + } + final CameraLinux nativeCamera = + CameraPlatform.instance as CameraLinux; + nativeCamera.setImageFormatGroup( + controller!.cameraId, + PlatformImageFormatGroup.mono8, + ); + }); + }, + child: Text('mono8'), + ), + ElevatedButton( + onPressed: () { + setState(() { + if (TargetPlatform.linux != defaultTargetPlatform) { + return; + } + final CameraLinux nativeCamera = + CameraPlatform.instance as CameraLinux; + nativeCamera.setImageFormatGroup( + controller!.cameraId, + PlatformImageFormatGroup.rgb8, + ); + }); + }, + child: Text('rgb8'), + ), + ], + ), Padding( padding: const EdgeInsets.all(5.0), child: Row( @@ -489,6 +537,168 @@ class _CameraExampleHomeState extends State ); } + /// Lens position slider, shown on iOS only. + Widget _lensPositionWidget() { + return ColoredBox( + color: Colors.grey.shade50, + child: Column( + children: [ + const Center(child: Text('Lens Position (lock focus first)')), + Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + const Text('0.0'), + Expanded( + child: Slider( + value: _currentLensPosition, + min: 0.0, + max: 1.0, + divisions: 100, + label: _currentLensPosition.toStringAsFixed(2), + onChanged: + controller != null && + controller!.value.focusMode == FocusMode.locked + ? (double value) { + setState(() => _currentLensPosition = value); + (CameraPlatform.instance as AVFoundationCamera) + .setLensPosition(value); + } + : null, + ), + ), + const Text('1.0'), + ], + ), + ], + ), + ); + } + + Future _applyTransform() async { + if (controller == null || !controller!.value.isInitialized) { + return; + } + await (CameraPlatform.instance as AVFoundationCamera).setTransform( + controller!.cameraId, + CameraTransform( + rotationDegrees: _transformRotation, + flipHorizontally: _transformFlipH, + flipVertically: _transformFlipV, + cropRect: _transformCropEnabled + ? const CameraTransformRect( + x: 0.1, + y: 0.1, + width: 0.8, + height: 0.8, + ) + : null, + ), + ); + } + + /// Transform controls panel (iOS 17+ only). + Widget _transformControlRowWidget() { + final bool enabled = + controller != null && controller!.value.isInitialized; + return ColoredBox( + color: Colors.grey.shade50, + child: Column( + children: [ + const Center( + child: Text( + 'Camera Transform (iOS 17+)', + style: TextStyle(fontWeight: FontWeight.bold), + ), + ), + // Rotation + Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + const Text('Rotation:'), + for (final double deg in [0, 90, 180, 270]) + TextButton( + style: TextButton.styleFrom( + foregroundColor: _transformRotation == deg + ? Colors.orange + : Colors.blue, + ), + onPressed: enabled + ? () { + setState(() => _transformRotation = deg); + _applyTransform(); + } + : null, + child: Text('${deg.toInt()}\u00b0'), + ), + ], + ), + // Flip, crop, reset + Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + TextButton( + style: TextButton.styleFrom( + foregroundColor: + _transformFlipH ? Colors.orange : Colors.blue, + ), + onPressed: enabled + ? () { + setState(() => _transformFlipH = !_transformFlipH); + _applyTransform(); + } + : null, + child: const Text('Flip H'), + ), + TextButton( + style: TextButton.styleFrom( + foregroundColor: + _transformFlipV ? Colors.orange : Colors.blue, + ), + onPressed: enabled + ? () { + setState(() => _transformFlipV = !_transformFlipV); + _applyTransform(); + } + : null, + child: const Text('Flip V'), + ), + TextButton( + style: TextButton.styleFrom( + foregroundColor: + _transformCropEnabled ? Colors.orange : Colors.blue, + ), + onPressed: enabled + ? () { + setState( + () => + _transformCropEnabled = !_transformCropEnabled, + ); + _applyTransform(); + } + : null, + child: const Text('Crop 80%'), + ), + TextButton( + onPressed: enabled + ? () { + setState(() { + _transformRotation = 0; + _transformFlipH = false; + _transformFlipV = false; + _transformCropEnabled = false; + }); + _applyTransform(); + } + : null, + child: const Text('Reset'), + ), + ], + ), + ], + ), + ); + } + /// Display the control bar with buttons to take pictures and record videos. Widget _captureControlRowWidget() { final CameraController? cameraController = controller; @@ -635,7 +845,7 @@ class _CameraExampleHomeState extends State ) async { final cameraController = CameraController( cameraDescription, - kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium, + ResolutionPreset.max, enableAudio: enableAudio, imageFormatGroup: ImageFormatGroup.jpeg, ); diff --git a/packages/camera/camera/example/linux/.gitignore b/packages/camera/camera/example/linux/.gitignore new file mode 100644 index 000000000000..d3896c98444f --- /dev/null +++ b/packages/camera/camera/example/linux/.gitignore @@ -0,0 +1 @@ +flutter/ephemeral diff --git a/packages/camera/camera/example/linux/CMakeLists.txt b/packages/camera/camera/example/linux/CMakeLists.txt new file mode 100644 index 000000000000..c6d1dcf8e0e3 --- /dev/null +++ b/packages/camera/camera/example/linux/CMakeLists.txt @@ -0,0 +1,145 @@ +# Project-level configuration. +cmake_minimum_required(VERSION 3.10) +project(runner LANGUAGES CXX) + +# The name of the executable created for the application. Change this to change +# the on-disk name of your application. +set(BINARY_NAME "example") +# The unique GTK application identifier for this application. See: +# https://wiki.gnome.org/HowDoI/ChooseApplicationID +set(APPLICATION_ID "io.flutter.plugins.example") + +# Explicitly opt in to modern CMake behaviors to avoid warnings with recent +# versions of CMake. +cmake_policy(SET CMP0063 NEW) + +# Load bundled libraries from the lib/ directory relative to the binary. +set(CMAKE_INSTALL_RPATH "$ORIGIN/lib") + +# Root filesystem for cross-building. +if(FLUTTER_TARGET_PLATFORM_SYSROOT) + set(CMAKE_SYSROOT ${FLUTTER_TARGET_PLATFORM_SYSROOT}) + set(CMAKE_FIND_ROOT_PATH ${CMAKE_SYSROOT}) + set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) + set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +endif() + +# Define build configuration options. +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + set(CMAKE_BUILD_TYPE "Debug" CACHE + STRING "Flutter build mode" FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS + "Debug" "Profile" "Release") +endif() + +# Compilation settings that should be applied to most targets. +# +# Be cautious about adding new options here, as plugins use this function by +# default. In most cases, you should add new options to specific targets instead +# of modifying this function. +function(APPLY_STANDARD_SETTINGS TARGET) + target_compile_features(${TARGET} PUBLIC cxx_std_14) + target_compile_options(${TARGET} PRIVATE -Wall -Werror) + target_compile_options(${TARGET} PRIVATE "$<$>:-O3>") + target_compile_definitions(${TARGET} PRIVATE "$<$>:NDEBUG>") +endfunction() + +# Flutter library and tool build rules. +set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter") +add_subdirectory(${FLUTTER_MANAGED_DIR}) + +# System-level dependencies. +find_package(PkgConfig REQUIRED) +pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) + +add_definitions(-DAPPLICATION_ID="${APPLICATION_ID}") + +# Define the application target. To change its name, change BINARY_NAME above, +# not the value here, or `flutter run` will no longer work. +# +# Any new source files that you add to the application should be added here. +add_executable(${BINARY_NAME} + "main.cc" + "my_application.cc" + "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc" +) + +# Apply the standard set of build settings. This can be removed for applications +# that need different build settings. +apply_standard_settings(${BINARY_NAME}) + +# Add dependency libraries. Add any application-specific dependencies here. +target_link_libraries(${BINARY_NAME} PRIVATE flutter) +target_link_libraries(${BINARY_NAME} PRIVATE PkgConfig::GTK) + +# Run the Flutter tool portions of the build. This must not be removed. +add_dependencies(${BINARY_NAME} flutter_assemble) + +# Only the install-generated bundle's copy of the executable will launch +# correctly, since the resources must in the right relative locations. To avoid +# people trying to run the unbundled copy, put it in a subdirectory instead of +# the default top-level location. +set_target_properties(${BINARY_NAME} + PROPERTIES + RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/intermediates_do_not_run" +) + + +# Generated plugin build rules, which manage building the plugins and adding +# them to the application. +include(flutter/generated_plugins.cmake) + + +# === Installation === +# By default, "installing" just makes a relocatable bundle in the build +# directory. +set(BUILD_BUNDLE_DIR "${PROJECT_BINARY_DIR}/bundle") +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE) +endif() + +# Start with a clean build bundle directory every time. +install(CODE " + file(REMOVE_RECURSE \"${BUILD_BUNDLE_DIR}/\") + " COMPONENT Runtime) + +set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data") +set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}/lib") + +install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +foreach(bundled_library ${PLUGIN_BUNDLED_LIBRARIES}) + install(FILES "${bundled_library}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endforeach(bundled_library) + +# Copy the native assets provided by the build.dart from all packages. +set(NATIVE_ASSETS_DIR "${PROJECT_BUILD_DIR}native_assets/linux/") +install(DIRECTORY "${NATIVE_ASSETS_DIR}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +# Fully re-copy the assets directory on each build to avoid having stale files +# from a previous install. +set(FLUTTER_ASSET_DIR_NAME "flutter_assets") +install(CODE " + file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\") + " COMPONENT Runtime) +install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}" + DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime) + +# Install the AOT library on non-Debug builds only. +if(NOT CMAKE_BUILD_TYPE MATCHES "Debug") + install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() diff --git a/packages/camera/camera/example/linux/flutter/CMakeLists.txt b/packages/camera/camera/example/linux/flutter/CMakeLists.txt new file mode 100644 index 000000000000..d5bd01648a96 --- /dev/null +++ b/packages/camera/camera/example/linux/flutter/CMakeLists.txt @@ -0,0 +1,88 @@ +# This file controls Flutter-level build steps. It should not be edited. +cmake_minimum_required(VERSION 3.10) + +set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral") + +# Configuration provided via flutter tool. +include(${EPHEMERAL_DIR}/generated_config.cmake) + +# TODO: Move the rest of this into files in ephemeral. See +# https://github.com/flutter/flutter/issues/57146. + +# Serves the same purpose as list(TRANSFORM ... PREPEND ...), +# which isn't available in 3.10. +function(list_prepend LIST_NAME PREFIX) + set(NEW_LIST "") + foreach(element ${${LIST_NAME}}) + list(APPEND NEW_LIST "${PREFIX}${element}") + endforeach(element) + set(${LIST_NAME} "${NEW_LIST}" PARENT_SCOPE) +endfunction() + +# === Flutter Library === +# System-level dependencies. +find_package(PkgConfig REQUIRED) +pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) +pkg_check_modules(GLIB REQUIRED IMPORTED_TARGET glib-2.0) +pkg_check_modules(GIO REQUIRED IMPORTED_TARGET gio-2.0) + +set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/libflutter_linux_gtk.so") + +# Published to parent scope for install step. +set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE) +set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/" PARENT_SCOPE) +set(AOT_LIBRARY "${PROJECT_DIR}/build/lib/libapp.so" PARENT_SCOPE) + +list(APPEND FLUTTER_LIBRARY_HEADERS + "fl_basic_message_channel.h" + "fl_binary_codec.h" + "fl_binary_messenger.h" + "fl_dart_project.h" + "fl_engine.h" + "fl_json_message_codec.h" + "fl_json_method_codec.h" + "fl_message_codec.h" + "fl_method_call.h" + "fl_method_channel.h" + "fl_method_codec.h" + "fl_method_response.h" + "fl_plugin_registrar.h" + "fl_plugin_registry.h" + "fl_standard_message_codec.h" + "fl_standard_method_codec.h" + "fl_string_codec.h" + "fl_value.h" + "fl_view.h" + "flutter_linux.h" +) +list_prepend(FLUTTER_LIBRARY_HEADERS "${EPHEMERAL_DIR}/flutter_linux/") +add_library(flutter INTERFACE) +target_include_directories(flutter INTERFACE + "${EPHEMERAL_DIR}" +) +target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}") +target_link_libraries(flutter INTERFACE + PkgConfig::GTK + PkgConfig::GLIB + PkgConfig::GIO +) +add_dependencies(flutter flutter_assemble) + +# === Flutter tool backend === +# _phony_ is a non-existent file to force this command to run every time, +# since currently there's no way to get a full input/output list from the +# flutter tool. +add_custom_command( + OUTPUT ${FLUTTER_LIBRARY} ${FLUTTER_LIBRARY_HEADERS} + ${CMAKE_CURRENT_BINARY_DIR}/_phony_ + COMMAND ${CMAKE_COMMAND} -E env + ${FLUTTER_TOOL_ENVIRONMENT} + "${FLUTTER_ROOT}/packages/flutter_tools/bin/tool_backend.sh" + ${FLUTTER_TARGET_PLATFORM} ${CMAKE_BUILD_TYPE} + VERBATIM +) +add_custom_target(flutter_assemble DEPENDS + "${FLUTTER_LIBRARY}" + ${FLUTTER_LIBRARY_HEADERS} +) diff --git a/packages/camera/camera/example/linux/flutter/generated_plugins.cmake b/packages/camera/camera/example/linux/flutter/generated_plugins.cmake new file mode 100644 index 000000000000..1aa0c3eb78bd --- /dev/null +++ b/packages/camera/camera/example/linux/flutter/generated_plugins.cmake @@ -0,0 +1,24 @@ +# +# Generated file, do not edit. +# + +list(APPEND FLUTTER_PLUGIN_LIST + camera_linux +) + +list(APPEND FLUTTER_FFI_PLUGIN_LIST +) + +set(PLUGIN_BUNDLED_LIBRARIES) + +foreach(plugin ${FLUTTER_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${plugin}/linux plugins/${plugin}) + target_link_libraries(${BINARY_NAME} PRIVATE ${plugin}_plugin) + list(APPEND PLUGIN_BUNDLED_LIBRARIES $) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${plugin}_bundled_libraries}) +endforeach(plugin) + +foreach(ffi_plugin ${FLUTTER_FFI_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${ffi_plugin}/linux plugins/${ffi_plugin}) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${ffi_plugin}_bundled_libraries}) +endforeach(ffi_plugin) diff --git a/packages/camera/camera/example/linux/main.cc b/packages/camera/camera/example/linux/main.cc new file mode 100644 index 000000000000..e7c5c5437037 --- /dev/null +++ b/packages/camera/camera/example/linux/main.cc @@ -0,0 +1,6 @@ +#include "my_application.h" + +int main(int argc, char** argv) { + g_autoptr(MyApplication) app = my_application_new(); + return g_application_run(G_APPLICATION(app), argc, argv); +} diff --git a/packages/camera/camera/example/linux/my_application.cc b/packages/camera/camera/example/linux/my_application.cc new file mode 100644 index 000000000000..c0530d422cdd --- /dev/null +++ b/packages/camera/camera/example/linux/my_application.cc @@ -0,0 +1,124 @@ +#include "my_application.h" + +#include +#ifdef GDK_WINDOWING_X11 +#include +#endif + +#include "flutter/generated_plugin_registrant.h" + +struct _MyApplication { + GtkApplication parent_instance; + char** dart_entrypoint_arguments; +}; + +G_DEFINE_TYPE(MyApplication, my_application, GTK_TYPE_APPLICATION) + +// Implements GApplication::activate. +static void my_application_activate(GApplication* application) { + MyApplication* self = MY_APPLICATION(application); + GtkWindow* window = + GTK_WINDOW(gtk_application_window_new(GTK_APPLICATION(application))); + + // Use a header bar when running in GNOME as this is the common style used + // by applications and is the setup most users will be using (e.g. Ubuntu + // desktop). + // If running on X and not using GNOME then just use a traditional title bar + // in case the window manager does more exotic layout, e.g. tiling. + // If running on Wayland assume the header bar will work (may need changing + // if future cases occur). + gboolean use_header_bar = TRUE; +#ifdef GDK_WINDOWING_X11 + GdkScreen* screen = gtk_window_get_screen(window); + if (GDK_IS_X11_SCREEN(screen)) { + const gchar* wm_name = gdk_x11_screen_get_window_manager_name(screen); + if (g_strcmp0(wm_name, "GNOME Shell") != 0) { + use_header_bar = FALSE; + } + } +#endif + if (use_header_bar) { + GtkHeaderBar* header_bar = GTK_HEADER_BAR(gtk_header_bar_new()); + gtk_widget_show(GTK_WIDGET(header_bar)); + gtk_header_bar_set_title(header_bar, "example"); + gtk_header_bar_set_show_close_button(header_bar, TRUE); + gtk_window_set_titlebar(window, GTK_WIDGET(header_bar)); + } else { + gtk_window_set_title(window, "example"); + } + + gtk_window_set_default_size(window, 1280, 720); + gtk_widget_show(GTK_WIDGET(window)); + + g_autoptr(FlDartProject) project = fl_dart_project_new(); + fl_dart_project_set_dart_entrypoint_arguments(project, self->dart_entrypoint_arguments); + + FlView* view = fl_view_new(project); + gtk_widget_show(GTK_WIDGET(view)); + gtk_container_add(GTK_CONTAINER(window), GTK_WIDGET(view)); + + fl_register_plugins(FL_PLUGIN_REGISTRY(view)); + + gtk_widget_grab_focus(GTK_WIDGET(view)); +} + +// Implements GApplication::local_command_line. +static gboolean my_application_local_command_line(GApplication* application, gchar*** arguments, int* exit_status) { + MyApplication* self = MY_APPLICATION(application); + // Strip out the first argument as it is the binary name. + self->dart_entrypoint_arguments = g_strdupv(*arguments + 1); + + g_autoptr(GError) error = nullptr; + if (!g_application_register(application, nullptr, &error)) { + g_warning("Failed to register: %s", error->message); + *exit_status = 1; + return TRUE; + } + + g_application_activate(application); + *exit_status = 0; + + return TRUE; +} + +// Implements GApplication::startup. +static void my_application_startup(GApplication* application) { + //MyApplication* self = MY_APPLICATION(object); + + // Perform any actions required at application startup. + + G_APPLICATION_CLASS(my_application_parent_class)->startup(application); +} + +// Implements GApplication::shutdown. +static void my_application_shutdown(GApplication* application) { + //MyApplication* self = MY_APPLICATION(object); + + // Perform any actions required at application shutdown. + + G_APPLICATION_CLASS(my_application_parent_class)->shutdown(application); +} + +// Implements GObject::dispose. +static void my_application_dispose(GObject* object) { + MyApplication* self = MY_APPLICATION(object); + g_clear_pointer(&self->dart_entrypoint_arguments, g_strfreev); + G_OBJECT_CLASS(my_application_parent_class)->dispose(object); +} + +static void my_application_class_init(MyApplicationClass* klass) { + G_APPLICATION_CLASS(klass)->activate = my_application_activate; + G_APPLICATION_CLASS(klass)->local_command_line = my_application_local_command_line; + G_APPLICATION_CLASS(klass)->startup = my_application_startup; + G_APPLICATION_CLASS(klass)->shutdown = my_application_shutdown; + G_OBJECT_CLASS(klass)->dispose = my_application_dispose; +} + +static void my_application_init(MyApplication* self) {} + +MyApplication* my_application_new() { + return MY_APPLICATION(g_object_new(my_application_get_type(), + "application-id", APPLICATION_ID, + "flags", G_APPLICATION_NON_UNIQUE, + nullptr)); +} diff --git a/packages/camera/camera/example/linux/my_application.h b/packages/camera/camera/example/linux/my_application.h new file mode 100644 index 000000000000..72271d5e4170 --- /dev/null +++ b/packages/camera/camera/example/linux/my_application.h @@ -0,0 +1,18 @@ +#ifndef FLUTTER_MY_APPLICATION_H_ +#define FLUTTER_MY_APPLICATION_H_ + +#include + +G_DECLARE_FINAL_TYPE(MyApplication, my_application, MY, APPLICATION, + GtkApplication) + +/** + * my_application_new: + * + * Creates a new Flutter-based application. + * + * Returns: a new #MyApplication. + */ +MyApplication* my_application_new(); + +#endif // FLUTTER_MY_APPLICATION_H_ diff --git a/packages/camera/camera/example/pubspec.yaml b/packages/camera/camera/example/pubspec.yaml index b5d9dff6e913..af6c4b52fa02 100644 --- a/packages/camera/camera/example/pubspec.yaml +++ b/packages/camera/camera/example/pubspec.yaml @@ -29,5 +29,13 @@ dev_dependencies: sdk: flutter leak_tracker_flutter_testing: any +dependency_overrides: + camera_avfoundation: + path: ../../camera_avfoundation + camera_web: + path: ../../camera_web + camera_linux: + path: ../../camera_linux + flutter: uses-material-design: true diff --git a/packages/camera/camera/example/test/widget_test.dart b/packages/camera/camera/example/test/widget_test.dart new file mode 100644 index 000000000000..092d222f7e16 --- /dev/null +++ b/packages/camera/camera/example/test/widget_test.dart @@ -0,0 +1,30 @@ +// This is a basic Flutter widget test. +// +// To perform an interaction with a widget in your test, use the WidgetTester +// utility in the flutter_test package. For example, you can send tap and scroll +// gestures. You can also use WidgetTester to find child widgets in the widget +// tree, read text, and verify that the values of widget properties are correct. + +import 'package:flutter/material.dart'; +import 'package:flutter_test/flutter_test.dart'; + +import 'package:example/main.dart'; + +void main() { + testWidgets('Counter increments smoke test', (WidgetTester tester) async { + // Build our app and trigger a frame. + await tester.pumpWidget(const MyApp()); + + // Verify that our counter starts at 0. + expect(find.text('0'), findsOneWidget); + expect(find.text('1'), findsNothing); + + // Tap the '+' icon and trigger a frame. + await tester.tap(find.byIcon(Icons.add)); + await tester.pump(); + + // Verify that our counter has incremented. + expect(find.text('0'), findsNothing); + expect(find.text('1'), findsOneWidget); + }); +} diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 1c9e8bb0b145..26673869459a 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -19,11 +19,18 @@ flutter: default_package: camera_avfoundation web: default_package: camera_web + linux: + default_package: camera_linux dependencies: camera_android_camerax: ^0.7.0 camera_avfoundation: ^0.10.0 camera_platform_interface: ^2.12.0 + camera_linux: + git: + url: git@github.com:LightX-Innovations/flutter_packages.git + path: packages/camera/camera_linux + ref: camera_0.6 camera_web: ^0.3.3 flutter: sdk: flutter diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index ca97b101df8b..9a532157ea2e 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.10.2 + +* Adds setLensPosition support for manually controlling lens focus position on iOS. + ## 0.10.1 * Fixes fatal crash on iPhone 17 when using `ResolutionPreset.max`. diff --git a/packages/camera/camera_avfoundation/example/lib/main.dart b/packages/camera/camera_avfoundation/example/lib/main.dart index f9576b96398f..cda6e362fc30 100644 --- a/packages/camera/camera_avfoundation/example/lib/main.dart +++ b/packages/camera/camera_avfoundation/example/lib/main.dart @@ -6,6 +6,7 @@ import 'dart:async'; import 'dart:io'; import 'dart:math'; +import 'package:camera_avfoundation/camera_avfoundation.dart'; import 'package:camera_platform_interface/camera_platform_interface.dart'; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; @@ -47,8 +48,7 @@ void _logError(String code, String? message) { print('Error: $code${message == null ? '' : '\nError Message: $message'}'); } -class _CameraExampleHomeState extends State - with WidgetsBindingObserver, TickerProviderStateMixin { +class _CameraExampleHomeState extends State with WidgetsBindingObserver, TickerProviderStateMixin { CameraController? controller; XFile? imageFile; XFile? videoFile; @@ -68,6 +68,7 @@ class _CameraExampleHomeState extends State double _maxAvailableZoom = 1.0; double _currentScale = 1.0; double _baseScale = 1.0; + double _lenPosition = 0.0; // Counting pointers (number of user fingers on screen) int _pointers = 0; @@ -151,6 +152,7 @@ class _CameraExampleHomeState extends State ), ), ), + _buildLensPositionSettings(), _captureControlRowWidget(), _modeControlRowWidget(), Padding( @@ -164,6 +166,37 @@ class _CameraExampleHomeState extends State ); } + Widget _buildLensPositionSettings() { + return ButtonBar( + layoutBehavior: ButtonBarLayoutBehavior.constrained, + alignment: MainAxisAlignment.center, + children: [ + ElevatedButton( + onPressed: () { + final camera = CameraPlatform.instance as AVFoundationCamera; + setState(() { + _lenPosition = (_lenPosition + 0.1).clamp(0.0, 1.0); + }); + camera.setFocusMode(controller!.cameraId, FocusMode.locked); + camera.setLensPosition(_lenPosition); + }, + child: const Text('Increment Lens Position'), + ), + ElevatedButton( + onPressed: () { + final camera = CameraPlatform.instance as AVFoundationCamera; + setState(() { + _lenPosition = (_lenPosition - 0.1).clamp(0.0, 1.0); + }); + camera.setFocusMode(controller!.cameraId, FocusMode.locked); + camera.setLensPosition(_lenPosition); + }, + child: const Text('Decrement Lens Position'), + ), + ], + ); + } + /// Display the preview from the camera (or a message if the preview is not available). Widget _cameraPreviewWidget() { final CameraController? cameraController = controller; @@ -281,9 +314,7 @@ class _CameraExampleHomeState extends State IconButton( icon: const Icon(Icons.exposure), color: Colors.blue, - onPressed: controller != null - ? onExposureModeButtonPressed - : null, + onPressed: controller != null ? onExposureModeButtonPressed : null, ), IconButton( icon: const Icon(Icons.filter_center_focus), @@ -306,9 +337,7 @@ class _CameraExampleHomeState extends State : Icons.screen_rotation, ), color: Colors.blue, - onPressed: controller != null - ? onCaptureOrientationLockButtonPressed - : null, + onPressed: controller != null ? onCaptureOrientationLockButtonPressed : null, ), ], ), @@ -328,39 +357,23 @@ class _CameraExampleHomeState extends State children: [ IconButton( icon: const Icon(Icons.flash_off), - color: controller?.value.flashMode == FlashMode.off - ? Colors.orange - : Colors.blue, - onPressed: controller != null - ? () => onSetFlashModeButtonPressed(FlashMode.off) - : null, + color: controller?.value.flashMode == FlashMode.off ? Colors.orange : Colors.blue, + onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.off) : null, ), IconButton( icon: const Icon(Icons.flash_auto), - color: controller?.value.flashMode == FlashMode.auto - ? Colors.orange - : Colors.blue, - onPressed: controller != null - ? () => onSetFlashModeButtonPressed(FlashMode.auto) - : null, + color: controller?.value.flashMode == FlashMode.auto ? Colors.orange : Colors.blue, + onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.auto) : null, ), IconButton( icon: const Icon(Icons.flash_on), - color: controller?.value.flashMode == FlashMode.always - ? Colors.orange - : Colors.blue, - onPressed: controller != null - ? () => onSetFlashModeButtonPressed(FlashMode.always) - : null, + color: controller?.value.flashMode == FlashMode.always ? Colors.orange : Colors.blue, + onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.always) : null, ), IconButton( icon: const Icon(Icons.highlight), - color: controller?.value.flashMode == FlashMode.torch - ? Colors.orange - : Colors.blue, - onPressed: controller != null - ? () => onSetFlashModeButtonPressed(FlashMode.torch) - : null, + color: controller?.value.flashMode == FlashMode.torch ? Colors.orange : Colors.blue, + onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.torch) : null, ), ], ), @@ -370,14 +383,10 @@ class _CameraExampleHomeState extends State Widget _exposureModeControlRowWidget() { final ButtonStyle styleAuto = TextButton.styleFrom( - foregroundColor: controller?.value.exposureMode == ExposureMode.auto - ? Colors.orange - : Colors.blue, + foregroundColor: controller?.value.exposureMode == ExposureMode.auto ? Colors.orange : Colors.blue, ); final ButtonStyle styleLocked = TextButton.styleFrom( - foregroundColor: controller?.value.exposureMode == ExposureMode.locked - ? Colors.orange - : Colors.blue, + foregroundColor: controller?.value.exposureMode == ExposureMode.locked ? Colors.orange : Colors.blue, ); return SizeTransition( @@ -419,9 +428,7 @@ class _CameraExampleHomeState extends State ), TextButton( style: styleLocked, - onPressed: controller != null - ? () => controller!.setExposureOffset(0.0) - : null, + onPressed: controller != null ? () => controller!.setExposureOffset(0.0) : null, child: const Text('RESET OFFSET'), ), ], @@ -454,14 +461,10 @@ class _CameraExampleHomeState extends State Widget _focusModeControlRowWidget() { final ButtonStyle styleAuto = TextButton.styleFrom( - foregroundColor: controller?.value.focusMode == FocusMode.auto - ? Colors.orange - : Colors.blue, + foregroundColor: controller?.value.focusMode == FocusMode.auto ? Colors.orange : Colors.blue, ); final ButtonStyle styleLocked = TextButton.styleFrom( - foregroundColor: controller?.value.focusMode == FocusMode.locked - ? Colors.orange - : Colors.blue, + foregroundColor: controller?.value.focusMode == FocusMode.locked ? Colors.orange : Colors.blue, ); return SizeTransition( @@ -477,9 +480,7 @@ class _CameraExampleHomeState extends State children: [ TextButton( style: styleAuto, - onPressed: controller != null - ? () => onSetFocusModeButtonPressed(FocusMode.auto) - : null, + onPressed: controller != null ? () => onSetFocusModeButtonPressed(FocusMode.auto) : null, onLongPress: () { if (controller != null) { CameraPlatform.instance.setFocusPoint( @@ -493,9 +494,7 @@ class _CameraExampleHomeState extends State ), TextButton( style: styleLocked, - onPressed: controller != null - ? () => onSetFocusModeButtonPressed(FocusMode.locked) - : null, + onPressed: controller != null ? () => onSetFocusModeButtonPressed(FocusMode.locked) : null, child: const Text('LOCKED'), ), ], diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift index d86662a9b8b3..c57ee1dfa569 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift @@ -27,6 +27,9 @@ protocol AssetWriterInput: NSObjectProtocol { var expectsMediaDataInRealTime: Bool { get set } var isReadyForMoreMediaData: Bool { get } + /// The transform to apply to the visual media data before writing it. + var transform: CGAffineTransform { get set } + func append(_ sampleBuffer: CMSampleBuffer) -> Bool } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift index 117fd909d32e..0dbd949aa786 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift @@ -95,6 +95,9 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, completion: @escaping (Result) -> Void ) + /// Sets the lens position to the given value in the (0,1) range and locks focus. + func setLensPosition(_ position: Float, completion: @escaping (Result) -> Void) + func setZoomLevel(_ zoom: CGFloat, withCompletion: @escaping (Result) -> Void) func setVideoStabilizationMode( @@ -111,6 +114,9 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, func pausePreview() func resumePreview() + /// Applies a geometric transform (rotation, mirroring, optional crop) to all camera outputs. + func setTransform(_ transform: PlatformCameraTransform) + func setDescriptionWhileRecording( _ cameraName: String, withCompletion: @escaping (Result) -> Void diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift index 43ef5f48916c..ff8d63609ba9 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift @@ -477,6 +477,12 @@ extension CameraPlugin: CameraApi { } } + func setLensPosition(position: Double, completion: @escaping (Result) -> Void) { + captureSessionQueue.async { [weak self] in + self?.camera?.setLensPosition(Float(position), completion: completion) + } + } + func getMinZoomLevel(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in if let minZoom = self?.camera?.minimumAvailableZoomFactor { @@ -555,4 +561,13 @@ extension CameraPlugin: CameraApi { completion(.success(())) } } + + func setTransform( + transform: PlatformCameraTransform, completion: @escaping (Result) -> Void + ) { + captureSessionQueue.async { [weak self] in + self?.camera?.setTransform(transform) + completion(.success(())) + } + } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift index d119afb9474a..74b297a75192 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift @@ -25,6 +25,14 @@ protocol CaptureConnection: NSObjectProtocol { /// Corresponds to the preferredVideoStabilizationMode property of `AVCaptureConnection` var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode { get set } + /// Corresponds to the `videoRotationAngle` property of `AVCaptureConnection` (iOS 17+). + @available(iOS 17.0, *) + var videoRotationAngle: CGFloat { get set } + + /// Corresponds to the `isVideoRotationAngleSupported(_:)` method of `AVCaptureConnection` (iOS 17+). + @available(iOS 17.0, *) + func isVideoRotationAngleSupported(_ angle: CGFloat) -> Bool + } extension AVCaptureConnection: CaptureConnection {} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift index b007cb39b4d1..a87c5b1a7ba8 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift @@ -37,6 +37,8 @@ protocol CaptureDevice: NSObjectProtocol { func isFocusModeSupported(_ mode: AVCaptureDevice.FocusMode) -> Bool var focusMode: AVCaptureDevice.FocusMode { get set } var focusPointOfInterest: CGPoint { get set } + var lensPosition: Float { get } + func setFocusModeLocked(lensPosition: Float, completionHandler handler: ((CMTime) -> Void)?) // Exposure var isExposurePointOfInterestSupported: Bool { get } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index 16d1637d23fa..3fa1af49d782 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -126,6 +126,12 @@ final class DefaultCamera: NSObject, Camera { private var focusMode = PlatformFocusMode.auto private var flashMode: PlatformFlashMode + /// The current camera transform applied to all outputs. + private var cameraTransform: PlatformCameraTransform? + + /// Metal-backed Core Image context, lazily initialised. Used only when a crop is active. + private lazy var ciContext = CIContext(options: [.useSoftwareRenderer: false]) + private static func pigeonErrorFromNSError(_ error: NSError) -> PigeonError { return PigeonError( code: "Error \(error.code)", @@ -752,7 +758,9 @@ final class DefaultCamera: NSObject, Camera { assert(path != nil, "Path must not be nil if no error.") completion(.success(path!)) } - } + }, + cropRect: cameraTransform?.cropRect, + ciContext: cameraTransform?.cropRect != nil ? ciContext : nil ) assert( @@ -796,6 +804,8 @@ final class DefaultCamera: NSObject, Camera { updateOrientation(orientation, forCaptureOutput: capturePhotoOutput) updateOrientation(orientation, forCaptureOutput: captureVideoOutput) + + applyConnectionTransform() } private func updateOrientation( @@ -808,6 +818,84 @@ final class DefaultCamera: NSObject, Camera { } } + // MARK: - Transform + + func setTransform(_ transform: PlatformCameraTransform) { + cameraTransform = transform + applyConnectionTransform() + } + + /// Applies the current rotation/mirror transform at the hardware connection level. + /// + /// `AVCaptureConnection.videoRotationAngle` (iOS 17+) instructs the camera ISP to rotate + /// the pixel data in hardware – zero CPU/GPU cost – and the effect propagates to the + /// preview texture, image stream, video recording, and photo capture simultaneously. + private func applyConnectionTransform() { + guard let transform = cameraTransform else { return } + + for output in [captureVideoOutput as CaptureOutput, capturePhotoOutput as CaptureOutput] { + guard let connection = output.connection(with: .video) else { continue } + + if #available(iOS 17.0, *) { + // The iOS camera sensor's native orientation is landscape (0°). The + // system normally compensates with 90° to produce an upright portrait + // image. We offset the caller's angle by 90° so that + // rotationDegrees = 0 means "upright / no rotation" from the API + // user's point of view. + let angle = (transform.rotationDegrees + 90).truncatingRemainder(dividingBy: 360) + if connection.isVideoRotationAngleSupported(angle) { + connection.videoRotationAngle = angle + } + } + + // Vertical flip is implemented as a composition: mirror horizontally + rotate 180°. + let mirrorH = transform.flipHorizontally != transform.flipVertically + if connection.isVideoMirroringSupported { + connection.isVideoMirrored = mirrorH + } + } + } + + /// Crops `pixelBuffer` to the normalised rect from `transform.cropRect`. + /// + /// The crop is performed on the GPU via Metal-backed Core Image (`ciContext`). + /// Returns `nil` when allocation fails; callers should fall back to the original buffer. + private func applyCrop( + _ pixelBuffer: CVPixelBuffer, cropRect: PlatformRect + ) -> CVPixelBuffer? { + let fullWidth = CVPixelBufferGetWidth(pixelBuffer) + let fullHeight = CVPixelBufferGetHeight(pixelBuffer) + + let cropX = cropRect.x * Double(fullWidth) + let cropY = cropRect.y * Double(fullHeight) + let cropW = cropRect.width * Double(fullWidth) + let cropH = cropRect.height * Double(fullHeight) + + // Core Image origin is bottom-left; convert from top-left. + let ciCropRect = CGRect( + x: cropX, + y: Double(fullHeight) - cropY - cropH, + width: cropW, + height: cropH) + + let ciImage = CIImage(cvPixelBuffer: pixelBuffer).cropped(to: ciCropRect) + .transformed(by: CGAffineTransform(translationX: -ciCropRect.origin.x, y: -ciCropRect.origin.y)) + + var outBuffer: CVPixelBuffer? + let attrs: [String: Any] = [ + kCVPixelBufferPixelFormatTypeKey as String: videoFormat, + kCVPixelBufferWidthKey as String: Int(cropW), + kCVPixelBufferHeightKey as String: Int(cropH), + kCVPixelBufferIOSurfacePropertiesKey as String: [:], + ] + guard CVPixelBufferCreate(kCFAllocatorDefault, Int(cropW), Int(cropH), videoFormat, attrs as CFDictionary, &outBuffer) == kCVReturnSuccess, + let out = outBuffer + else { return nil } + + ciContext.render(ciImage, to: out) + return out + } + private func videoOrientation(forDeviceOrientation deviceOrientation: UIDeviceOrientation) -> AVCaptureVideoOrientation { @@ -928,6 +1016,25 @@ final class DefaultCamera: NSObject, Camera { completion(.success(())) } + func setLensPosition( + _ position: Float, completion: @escaping (Result) -> Void + ) { + guard position >= 0, position <= 1 else { + completion( + .failure( + PigeonError( + code: "LENS_POSITION_ERROR", + message: + "Lens position out of bounds (should be between 0.0 and 1.0).", + details: nil))) + return + } + try? captureDevice.lockForConfiguration() + captureDevice.setFocusModeLocked(lensPosition: position, completionHandler: nil) + captureDevice.unlockForConfiguration() + completion(.success(())) + } + private func applyFocusMode() { applyFocusMode(focusMode, onDevice: captureDevice) } @@ -1160,6 +1267,9 @@ final class DefaultCamera: NSObject, Camera { newConnection.videoOrientation = oldConnection.videoOrientation } + // Re-apply any camera transform that was set by the caller. + applyConnectionTransform() + // Add the new connections to the session. if !videoCaptureSession.canAddInput(captureVideoInput) { completion( @@ -1250,9 +1360,17 @@ final class DefaultCamera: NSObject, Camera { ) { if output == captureVideoOutput.avOutput { if let newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { + // Apply crop transform if one is active (GPU path via Core Image / Metal). + // When no crop is set this is a zero-overhead fast path. + let processedBuffer: CVPixelBuffer + if let cropRect = cameraTransform?.cropRect { + processedBuffer = applyCrop(newBuffer, cropRect: cropRect) ?? newBuffer + } else { + processedBuffer = newBuffer + } pixelBufferSynchronizationQueue.sync { - latestPixelBuffer = newBuffer + latestPixelBuffer = processedBuffer } onFrameAvailable?() @@ -1323,10 +1441,17 @@ final class DefaultCamera: NSObject, Camera { } if output == captureVideoOutput.avOutput { - let nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) + let rawBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! let nextSampleTime = CMTimeSubtract(sampleTime, recordingTimeOffset) if nextSampleTime > lastAppendedVideoSampleTime { - let _ = videoAdaptor?.append(nextBuffer!, withPresentationTime: nextSampleTime) + // Apply crop transform to the recorded frame if needed. + let writeBuffer: CVPixelBuffer + if let cropRect = cameraTransform?.cropRect { + writeBuffer = applyCrop(rawBuffer, cropRect: cropRect) ?? rawBuffer + } else { + writeBuffer = rawBuffer + } + let _ = videoAdaptor?.append(writeBuffer, withPresentationTime: nextSampleTime) lastAppendedVideoSampleTime = nextSampleTime } } else { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift index 47e6abbbe750..f2b909166992 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift @@ -1,7 +1,7 @@ // Copyright 2013 The Flutter Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Autogenerated from Pigeon (v26.1.5), do not edit directly. +// Autogenerated from Pigeon (v26.2.3), do not edit directly. // See also: https://pub.dev/packages/pigeon import Foundation @@ -59,9 +59,7 @@ private func wrapError(_ error: Any) -> [Any?] { } private func createConnectionError(withChannelName channelName: String) -> PigeonError { - return PigeonError( - code: "channel-error", message: "Unable to establish connection on channel: '\(channelName)'.", - details: "") + return PigeonError(code: "channel-error", message: "Unable to establish connection on channel: '\(channelName)'.", details: "") } private func isNullish(_ value: Any?) -> Bool { @@ -116,12 +114,12 @@ func deepEqualsMessages(_ lhs: Any?, _ rhs: Any?) -> Bool { func deepHashMessages(value: Any?, hasher: inout Hasher) { if let valueList = value as? [AnyHashable] { - for item in valueList { deepHashMessages(value: item, hasher: &hasher) } - return + for item in valueList { deepHashMessages(value: item, hasher: &hasher) } + return } if let valueDict = value as? [AnyHashable: AnyHashable] { - for key in valueDict.keys { + for key in valueDict.keys { hasher.combine(key) deepHashMessages(value: valueDict[key]!, hasher: &hasher) } @@ -135,6 +133,8 @@ func deepHashMessages(value: Any?, hasher: inout Hasher) { return hasher.combine(String(describing: value)) } + + enum PlatformCameraLensDirection: Int { /// Front facing camera (a user looking at the screen is seen by the camera). case front = 0 @@ -215,6 +215,7 @@ struct PlatformCameraDescription: Hashable { /// The type of the camera lens. var lensType: PlatformCameraLensType + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraDescription? { let name = pigeonVar_list[0] as! String @@ -235,8 +236,7 @@ struct PlatformCameraDescription: Hashable { ] } static func == (lhs: PlatformCameraDescription, rhs: PlatformCameraDescription) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -255,6 +255,7 @@ struct PlatformCameraState: Hashable { /// Whether setting focus points is supported. var focusPointSupported: Bool + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraState? { let previewSize = pigeonVar_list[0] as! PlatformSize @@ -281,8 +282,7 @@ struct PlatformCameraState: Hashable { ] } static func == (lhs: PlatformCameraState, rhs: PlatformCameraState) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -299,6 +299,7 @@ struct PlatformCameraImageData: Hashable { var sensorExposureTimeNanoseconds: Int64 var sensorSensitivity: Double + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraImageData? { let formatCode = pigeonVar_list[0] as! Int64 @@ -331,8 +332,7 @@ struct PlatformCameraImageData: Hashable { ] } static func == (lhs: PlatformCameraImageData, rhs: PlatformCameraImageData) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -345,6 +345,7 @@ struct PlatformCameraImagePlane: Hashable { var width: Int64 var height: Int64 + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraImagePlane? { let bytes = pigeonVar_list[0] as! FlutterStandardTypedData @@ -368,8 +369,7 @@ struct PlatformCameraImagePlane: Hashable { ] } static func == (lhs: PlatformCameraImagePlane, rhs: PlatformCameraImagePlane) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -383,6 +383,7 @@ struct PlatformMediaSettings: Hashable { var audioBitrate: Int64? = nil var enableAudio: Bool + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformMediaSettings? { let resolutionPreset = pigeonVar_list[0] as! PlatformResolutionPreset @@ -409,8 +410,7 @@ struct PlatformMediaSettings: Hashable { ] } static func == (lhs: PlatformMediaSettings, rhs: PlatformMediaSettings) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -421,6 +421,7 @@ struct PlatformPoint: Hashable { var x: Double var y: Double + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformPoint? { let x = pigeonVar_list[0] as! Double @@ -438,8 +439,7 @@ struct PlatformPoint: Hashable { ] } static func == (lhs: PlatformPoint, rhs: PlatformPoint) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -450,6 +450,7 @@ struct PlatformSize: Hashable { var width: Double var height: Double + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformSize? { let width = pigeonVar_list[0] as! Double @@ -467,8 +468,99 @@ struct PlatformSize: Hashable { ] } static func == (lhs: PlatformSize, rhs: PlatformSize) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) + return deepEqualsMessages(lhs.toList(), rhs.toList()) } + func hash(into hasher: inout Hasher) { + deepHashMessages(value: toList(), hasher: &hasher) + } +} + +/// Generated class from Pigeon that represents data sent in messages. +struct PlatformRect: Hashable { + var x: Double + var y: Double + var width: Double + var height: Double + + + // swift-format-ignore: AlwaysUseLowerCamelCase + static func fromList(_ pigeonVar_list: [Any?]) -> PlatformRect? { + let x = pigeonVar_list[0] as! Double + let y = pigeonVar_list[1] as! Double + let width = pigeonVar_list[2] as! Double + let height = pigeonVar_list[3] as! Double + + return PlatformRect( + x: x, + y: y, + width: width, + height: height + ) + } + func toList() -> [Any?] { + return [ + x, + y, + width, + height, + ] } + static func == (lhs: PlatformRect, rhs: PlatformRect) -> Bool { + return deepEqualsMessages(lhs.toList(), rhs.toList()) } + func hash(into hasher: inout Hasher) { + deepHashMessages(value: toList(), hasher: &hasher) + } +} + +/// Pigeon version of a geometric camera transform. +/// +/// Rotation and mirroring are applied at the hardware connection level +/// (AVCaptureConnection.videoRotationAngle / isVideoMirrored), which means no +/// CPU/GPU cost and the effect is visible in the preview, image stream, photos, +/// and recorded video simultaneously. +/// +/// Crop is applied per-frame via Core Image on the GPU (Metal) and has a small +/// (~1–3 ms) cost per frame. +/// +/// Generated class from Pigeon that represents data sent in messages. +struct PlatformCameraTransform: Hashable { + /// Clockwise rotation in degrees. Must be 0, 90, 180, or 270. + var rotationDegrees: Double + /// Whether to flip the image along the horizontal axis (left–right mirror). + var flipHorizontally: Bool + /// Whether to flip the image along the vertical axis (upside-down mirror). + /// + /// Implemented as a 180° rotation composed with a horizontal flip. + var flipVertically: Bool + /// Optional crop rectangle in normalized (0,1) coordinate space. + /// + /// Applied after rotation/mirroring. Null means no crop. + var cropRect: PlatformRect? = nil + + + // swift-format-ignore: AlwaysUseLowerCamelCase + static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraTransform? { + let rotationDegrees = pigeonVar_list[0] as! Double + let flipHorizontally = pigeonVar_list[1] as! Bool + let flipVertically = pigeonVar_list[2] as! Bool + let cropRect: PlatformRect? = nilOrValue(pigeonVar_list[3]) + + return PlatformCameraTransform( + rotationDegrees: rotationDegrees, + flipHorizontally: flipHorizontally, + flipVertically: flipVertically, + cropRect: cropRect + ) + } + func toList() -> [Any?] { + return [ + rotationDegrees, + flipHorizontally, + flipVertically, + cropRect, + ] + } + static func == (lhs: PlatformCameraTransform, rhs: PlatformCameraTransform) -> Bool { + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -551,6 +643,10 @@ private class MessagesPigeonCodecReader: FlutterStandardReader { return PlatformPoint.fromList(self.readValue() as! [Any?]) case 145: return PlatformSize.fromList(self.readValue() as! [Any?]) + case 146: + return PlatformRect.fromList(self.readValue() as! [Any?]) + case 147: + return PlatformCameraTransform.fromList(self.readValue() as! [Any?]) default: return super.readValue(ofType: type) } @@ -610,6 +706,12 @@ private class MessagesPigeonCodecWriter: FlutterStandardWriter { } else if let value = value as? PlatformSize { super.writeByte(145) super.writeValue(value.toList()) + } else if let value = value as? PlatformRect { + super.writeByte(146) + super.writeValue(value.toList()) + } else if let value = value as? PlatformCameraTransform { + super.writeByte(147) + super.writeValue(value.toList()) } else { super.writeValue(value) } @@ -630,22 +732,17 @@ class MessagesPigeonCodec: FlutterStandardMessageCodec, @unchecked Sendable { static let shared = MessagesPigeonCodec(readerWriter: MessagesPigeonCodecReaderWriter()) } -var messagesPigeonMethodCodec = FlutterStandardMethodCodec( - readerWriter: MessagesPigeonCodecReaderWriter()) +var messagesPigeonMethodCodec = FlutterStandardMethodCodec(readerWriter: MessagesPigeonCodecReaderWriter()); + /// Generated protocol from Pigeon that represents a handler of messages from Flutter. protocol CameraApi { /// Returns the list of available cameras. - func getAvailableCameras( - completion: @escaping (Result<[PlatformCameraDescription], Error>) -> Void) + func getAvailableCameras(completion: @escaping (Result<[PlatformCameraDescription], Error>) -> Void) /// Create a new camera with the given settings, and returns its ID. - func create( - cameraName: String, settings: PlatformMediaSettings, - completion: @escaping (Result) -> Void) + func create(cameraName: String, settings: PlatformMediaSettings, completion: @escaping (Result) -> Void) /// Initializes the camera with the given ID. - func initialize( - cameraId: Int64, imageFormat: PlatformImageFormatGroup, - completion: @escaping (Result) -> Void) + func initialize(cameraId: Int64, imageFormat: PlatformImageFormatGroup, completion: @escaping (Result) -> Void) /// Begins streaming frames from the camera. func startImageStream(completion: @escaping (Result) -> Void) /// Stops streaming frames from the camera. @@ -659,8 +756,7 @@ protocol CameraApi { /// and any associated resources can be cleaned up. func dispose(cameraId: Int64, completion: @escaping (Result) -> Void) /// Locks the camera capture to the current device orientation. - func lockCaptureOrientation( - orientation: PlatformDeviceOrientation, completion: @escaping (Result) -> Void) + func lockCaptureOrientation(orientation: PlatformDeviceOrientation, completion: @escaping (Result) -> Void) /// Unlocks camera capture orientation, allowing it to automatically adapt to /// device orientation. func unlockCaptureOrientation(completion: @escaping (Result) -> Void) @@ -681,12 +777,16 @@ protocol CameraApi { /// Switches the camera to the given flash mode. func setFlashMode(mode: PlatformFlashMode, completion: @escaping (Result) -> Void) /// Switches the camera to the given exposure mode. - func setExposureMode( - mode: PlatformExposureMode, completion: @escaping (Result) -> Void) + func setExposureMode(mode: PlatformExposureMode, completion: @escaping (Result) -> Void) /// Anchors auto-exposure to the given point in (0,1) coordinate space. /// /// A null value resets to the default exposure point. func setExposurePoint(point: PlatformPoint?, completion: @escaping (Result) -> Void) + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + func setLensPosition(position: Double, completion: @escaping (Result) -> Void) /// Returns the minimum exposure offset supported by the camera. func getMinExposureOffset(completion: @escaping (Result) -> Void) /// Returns the maximum exposure offset supported by the camera. @@ -706,11 +806,9 @@ protocol CameraApi { /// Sets the zoom factor. func setZoomLevel(zoom: Double, completion: @escaping (Result) -> Void) /// Sets the video stabilization mode. - func setVideoStabilizationMode( - mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void) + func setVideoStabilizationMode(mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void) /// Gets if the given video stabilization mode is supported. - func isVideoStabilizationModeSupported( - mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void) + func isVideoStabilizationModeSupported(mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void) /// Pauses streaming of preview frames. func pausePreview(completion: @escaping (Result) -> Void) /// Resumes a previously paused preview stream. @@ -718,25 +816,27 @@ protocol CameraApi { /// Changes the camera used while recording video. /// /// This should only be called while video recording is active. - func updateDescriptionWhileRecording( - cameraName: String, completion: @escaping (Result) -> Void) + func updateDescriptionWhileRecording(cameraName: String, completion: @escaping (Result) -> Void) /// Sets the file format used for taking pictures. - func setImageFileFormat( - format: PlatformImageFileFormat, completion: @escaping (Result) -> Void) + func setImageFileFormat(format: PlatformImageFileFormat, completion: @escaping (Result) -> Void) + /// Applies a geometric transform (rotation, mirroring, crop) to the camera + /// output. The transform is applied to the preview, image stream, captured + /// photos, and recorded video simultaneously. + /// + /// Requires iOS 17+ for hardware-accelerated rotation. On earlier iOS + /// versions the rotation part of the transform is silently ignored and only + /// the crop (if any) is applied in software. + func setTransform(transform: PlatformCameraTransform, completion: @escaping (Result) -> Void) } /// Generated setup class from Pigeon to handle messages through the `binaryMessenger`. class CameraApiSetup { static var codec: FlutterStandardMessageCodec { MessagesPigeonCodec.shared } /// Sets up an instance of `CameraApi` to handle messages through the `binaryMessenger`. - static func setUp( - binaryMessenger: FlutterBinaryMessenger, api: CameraApi?, messageChannelSuffix: String = "" - ) { + static func setUp(binaryMessenger: FlutterBinaryMessenger, api: CameraApi?, messageChannelSuffix: String = "") { let channelSuffix = messageChannelSuffix.count > 0 ? ".\(messageChannelSuffix)" : "" /// Returns the list of available cameras. - let getAvailableCamerasChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getAvailableCameras\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let getAvailableCamerasChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getAvailableCameras\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { getAvailableCamerasChannel.setMessageHandler { _, reply in api.getAvailableCameras { result in @@ -752,9 +852,7 @@ class CameraApiSetup { getAvailableCamerasChannel.setMessageHandler(nil) } /// Create a new camera with the given settings, and returns its ID. - let createChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.create\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let createChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.create\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { createChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -773,9 +871,7 @@ class CameraApiSetup { createChannel.setMessageHandler(nil) } /// Initializes the camera with the given ID. - let initializeChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.initialize\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let initializeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.initialize\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { initializeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -794,9 +890,7 @@ class CameraApiSetup { initializeChannel.setMessageHandler(nil) } /// Begins streaming frames from the camera. - let startImageStreamChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startImageStream\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let startImageStreamChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startImageStream\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { startImageStreamChannel.setMessageHandler { _, reply in api.startImageStream { result in @@ -812,9 +906,7 @@ class CameraApiSetup { startImageStreamChannel.setMessageHandler(nil) } /// Stops streaming frames from the camera. - let stopImageStreamChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopImageStream\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let stopImageStreamChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopImageStream\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { stopImageStreamChannel.setMessageHandler { _, reply in api.stopImageStream { result in @@ -833,10 +925,7 @@ class CameraApiSetup { /// frame sent. /// /// This is used to throttle sending frames across the channel. - let receivedImageStreamDataChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.receivedImageStreamData\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let receivedImageStreamDataChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.receivedImageStreamData\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { receivedImageStreamDataChannel.setMessageHandler { _, reply in api.receivedImageStreamData { result in @@ -853,9 +942,7 @@ class CameraApiSetup { } /// Indicates that the given camera is no longer being used on the Dart side, /// and any associated resources can be cleaned up. - let disposeChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.dispose\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let disposeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.dispose\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { disposeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -873,10 +960,7 @@ class CameraApiSetup { disposeChannel.setMessageHandler(nil) } /// Locks the camera capture to the current device orientation. - let lockCaptureOrientationChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.lockCaptureOrientation\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let lockCaptureOrientationChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.lockCaptureOrientation\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { lockCaptureOrientationChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -895,10 +979,7 @@ class CameraApiSetup { } /// Unlocks camera capture orientation, allowing it to automatically adapt to /// device orientation. - let unlockCaptureOrientationChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.unlockCaptureOrientation\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let unlockCaptureOrientationChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.unlockCaptureOrientation\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { unlockCaptureOrientationChannel.setMessageHandler { _, reply in api.unlockCaptureOrientation { result in @@ -915,9 +996,7 @@ class CameraApiSetup { } /// Takes a picture with the current settings, and returns the path to the /// resulting file. - let takePictureChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.takePicture\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let takePictureChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.takePicture\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { takePictureChannel.setMessageHandler { _, reply in api.takePicture { result in @@ -933,10 +1012,7 @@ class CameraApiSetup { takePictureChannel.setMessageHandler(nil) } /// Does any preprocessing necessary before beginning to record video. - let prepareForVideoRecordingChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.prepareForVideoRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let prepareForVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.prepareForVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { prepareForVideoRecordingChannel.setMessageHandler { _, reply in api.prepareForVideoRecording { result in @@ -953,9 +1029,7 @@ class CameraApiSetup { } /// Begins recording video, optionally enabling streaming to Dart at the same /// time. - let startVideoRecordingChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startVideoRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let startVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { startVideoRecordingChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -973,9 +1047,7 @@ class CameraApiSetup { startVideoRecordingChannel.setMessageHandler(nil) } /// Stops recording video, and results the path to the resulting file. - let stopVideoRecordingChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopVideoRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let stopVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { stopVideoRecordingChannel.setMessageHandler { _, reply in api.stopVideoRecording { result in @@ -991,9 +1063,7 @@ class CameraApiSetup { stopVideoRecordingChannel.setMessageHandler(nil) } /// Pauses video recording. - let pauseVideoRecordingChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pauseVideoRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let pauseVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pauseVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { pauseVideoRecordingChannel.setMessageHandler { _, reply in api.pauseVideoRecording { result in @@ -1009,9 +1079,7 @@ class CameraApiSetup { pauseVideoRecordingChannel.setMessageHandler(nil) } /// Resumes a previously paused video recording. - let resumeVideoRecordingChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumeVideoRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let resumeVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumeVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { resumeVideoRecordingChannel.setMessageHandler { _, reply in api.resumeVideoRecording { result in @@ -1027,9 +1095,7 @@ class CameraApiSetup { resumeVideoRecordingChannel.setMessageHandler(nil) } /// Switches the camera to the given flash mode. - let setFlashModeChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFlashMode\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setFlashModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFlashMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setFlashModeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1047,9 +1113,7 @@ class CameraApiSetup { setFlashModeChannel.setMessageHandler(nil) } /// Switches the camera to the given exposure mode. - let setExposureModeChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureMode\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setExposureModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setExposureModeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1069,9 +1133,7 @@ class CameraApiSetup { /// Anchors auto-exposure to the given point in (0,1) coordinate space. /// /// A null value resets to the default exposure point. - let setExposurePointChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposurePoint\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setExposurePointChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposurePoint\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setExposurePointChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1088,10 +1150,29 @@ class CameraApiSetup { } else { setExposurePointChannel.setMessageHandler(nil) } + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + let setLensPositionChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setLensPosition\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setLensPositionChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let positionArg = args[0] as! Double + api.setLensPosition(position: positionArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setLensPositionChannel.setMessageHandler(nil) + } /// Returns the minimum exposure offset supported by the camera. - let getMinExposureOffsetChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinExposureOffset\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let getMinExposureOffsetChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinExposureOffset\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { getMinExposureOffsetChannel.setMessageHandler { _, reply in api.getMinExposureOffset { result in @@ -1107,9 +1188,7 @@ class CameraApiSetup { getMinExposureOffsetChannel.setMessageHandler(nil) } /// Returns the maximum exposure offset supported by the camera. - let getMaxExposureOffsetChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxExposureOffset\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let getMaxExposureOffsetChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxExposureOffset\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { getMaxExposureOffsetChannel.setMessageHandler { _, reply in api.getMaxExposureOffset { result in @@ -1125,9 +1204,7 @@ class CameraApiSetup { getMaxExposureOffsetChannel.setMessageHandler(nil) } /// Sets the exposure offset manually to the given value. - let setExposureOffsetChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureOffset\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setExposureOffsetChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureOffset\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setExposureOffsetChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1145,9 +1222,7 @@ class CameraApiSetup { setExposureOffsetChannel.setMessageHandler(nil) } /// Switches the camera to the given focus mode. - let setFocusModeChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusMode\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setFocusModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setFocusModeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1167,9 +1242,7 @@ class CameraApiSetup { /// Anchors auto-focus to the given point in (0,1) coordinate space. /// /// A null value resets to the default focus point. - let setFocusPointChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusPoint\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setFocusPointChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusPoint\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setFocusPointChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1187,9 +1260,7 @@ class CameraApiSetup { setFocusPointChannel.setMessageHandler(nil) } /// Returns the minimum zoom level supported by the camera. - let getMinZoomLevelChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinZoomLevel\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let getMinZoomLevelChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinZoomLevel\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { getMinZoomLevelChannel.setMessageHandler { _, reply in api.getMinZoomLevel { result in @@ -1205,9 +1276,7 @@ class CameraApiSetup { getMinZoomLevelChannel.setMessageHandler(nil) } /// Returns the maximum zoom level supported by the camera. - let getMaxZoomLevelChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxZoomLevel\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let getMaxZoomLevelChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxZoomLevel\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { getMaxZoomLevelChannel.setMessageHandler { _, reply in api.getMaxZoomLevel { result in @@ -1223,9 +1292,7 @@ class CameraApiSetup { getMaxZoomLevelChannel.setMessageHandler(nil) } /// Sets the zoom factor. - let setZoomLevelChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setZoomLevel\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setZoomLevelChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setZoomLevel\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setZoomLevelChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1243,10 +1310,7 @@ class CameraApiSetup { setZoomLevelChannel.setMessageHandler(nil) } /// Sets the video stabilization mode. - let setVideoStabilizationModeChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.setVideoStabilizationMode\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setVideoStabilizationModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setVideoStabilizationMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setVideoStabilizationModeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1264,10 +1328,7 @@ class CameraApiSetup { setVideoStabilizationModeChannel.setMessageHandler(nil) } /// Gets if the given video stabilization mode is supported. - let isVideoStabilizationModeSupportedChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.isVideoStabilizationModeSupported\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let isVideoStabilizationModeSupportedChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.isVideoStabilizationModeSupported\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { isVideoStabilizationModeSupportedChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1285,9 +1346,7 @@ class CameraApiSetup { isVideoStabilizationModeSupportedChannel.setMessageHandler(nil) } /// Pauses streaming of preview frames. - let pausePreviewChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pausePreview\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let pausePreviewChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pausePreview\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { pausePreviewChannel.setMessageHandler { _, reply in api.pausePreview { result in @@ -1303,9 +1362,7 @@ class CameraApiSetup { pausePreviewChannel.setMessageHandler(nil) } /// Resumes a previously paused preview stream. - let resumePreviewChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumePreview\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let resumePreviewChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumePreview\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { resumePreviewChannel.setMessageHandler { _, reply in api.resumePreview { result in @@ -1323,10 +1380,7 @@ class CameraApiSetup { /// Changes the camera used while recording video. /// /// This should only be called while video recording is active. - let updateDescriptionWhileRecordingChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.updateDescriptionWhileRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let updateDescriptionWhileRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.updateDescriptionWhileRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { updateDescriptionWhileRecordingChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1344,9 +1398,7 @@ class CameraApiSetup { updateDescriptionWhileRecordingChannel.setMessageHandler(nil) } /// Sets the file format used for taking pictures. - let setImageFileFormatChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setImageFileFormat\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setImageFileFormatChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setImageFileFormat\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setImageFileFormatChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1363,6 +1415,30 @@ class CameraApiSetup { } else { setImageFileFormatChannel.setMessageHandler(nil) } + /// Applies a geometric transform (rotation, mirroring, crop) to the camera + /// output. The transform is applied to the preview, image stream, captured + /// photos, and recorded video simultaneously. + /// + /// Requires iOS 17+ for hardware-accelerated rotation. On earlier iOS + /// versions the rotation part of the transform is silently ignored and only + /// the crop (if any) is applied in software. + let setTransformChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setTransform\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setTransformChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let transformArg = args[0] as! PlatformCameraTransform + api.setTransform(transform: transformArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setTransformChannel.setMessageHandler(nil) + } } } @@ -1416,31 +1492,25 @@ class PigeonEventSink { } class ImageDataStreamStreamHandler: PigeonEventChannelWrapper { - static func register( - with messenger: FlutterBinaryMessenger, - instanceName: String = "", - streamHandler: ImageDataStreamStreamHandler - ) { - var channelName = - "dev.flutter.pigeon.camera_avfoundation.CameraImageStreamEventApi.imageDataStream" + static func register(with messenger: FlutterBinaryMessenger, + instanceName: String = "", + streamHandler: ImageDataStreamStreamHandler) { + var channelName = "dev.flutter.pigeon.camera_avfoundation.CameraImageStreamEventApi.imageDataStream" if !instanceName.isEmpty { channelName += ".\(instanceName)" } let internalStreamHandler = PigeonStreamHandler(wrapper: streamHandler) - let channel = FlutterEventChannel( - name: channelName, binaryMessenger: messenger, codec: messagesPigeonMethodCodec) + let channel = FlutterEventChannel(name: channelName, binaryMessenger: messenger, codec: messagesPigeonMethodCodec) channel.setStreamHandler(internalStreamHandler) } } - + /// Handler for native callbacks that are not tied to a specific camera ID. /// /// Generated protocol from Pigeon that represents Flutter messages that can be called from Swift. protocol CameraGlobalEventApiProtocol { /// Called when the device's physical orientation changes. - func deviceOrientationChanged( - orientation orientationArg: PlatformDeviceOrientation, - completion: @escaping (Result) -> Void) + func deviceOrientationChanged(orientation orientationArg: PlatformDeviceOrientation, completion: @escaping (Result) -> Void) } class CameraGlobalEventApi: CameraGlobalEventApiProtocol { private let binaryMessenger: FlutterBinaryMessenger @@ -1453,14 +1523,9 @@ class CameraGlobalEventApi: CameraGlobalEventApiProtocol { return MessagesPigeonCodec.shared } /// Called when the device's physical orientation changes. - func deviceOrientationChanged( - orientation orientationArg: PlatformDeviceOrientation, - completion: @escaping (Result) -> Void - ) { - let channelName: String = - "dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged\(messageChannelSuffix)" - let channel = FlutterBasicMessageChannel( - name: channelName, binaryMessenger: binaryMessenger, codec: codec) + func deviceOrientationChanged(orientation orientationArg: PlatformDeviceOrientation, completion: @escaping (Result) -> Void) { + let channelName: String = "dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged\(messageChannelSuffix)" + let channel = FlutterBasicMessageChannel(name: channelName, binaryMessenger: binaryMessenger, codec: codec) channel.sendMessage([orientationArg] as [Any?]) { response in guard let listResponse = response as? [Any?] else { completion(.failure(createConnectionError(withChannelName: channelName))) @@ -1484,9 +1549,7 @@ class CameraGlobalEventApi: CameraGlobalEventApiProtocol { /// Generated protocol from Pigeon that represents Flutter messages that can be called from Swift. protocol CameraEventApiProtocol { /// Called when the camera is inialitized for use. - func initialized( - initialState initialStateArg: PlatformCameraState, - completion: @escaping (Result) -> Void) + func initialized(initialState initialStateArg: PlatformCameraState, completion: @escaping (Result) -> Void) /// Called when an error occurs in the camera. /// /// This should be used for errors that occur outside of the context of @@ -1504,14 +1567,9 @@ class CameraEventApi: CameraEventApiProtocol { return MessagesPigeonCodec.shared } /// Called when the camera is inialitized for use. - func initialized( - initialState initialStateArg: PlatformCameraState, - completion: @escaping (Result) -> Void - ) { - let channelName: String = - "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized\(messageChannelSuffix)" - let channel = FlutterBasicMessageChannel( - name: channelName, binaryMessenger: binaryMessenger, codec: codec) + func initialized(initialState initialStateArg: PlatformCameraState, completion: @escaping (Result) -> Void) { + let channelName: String = "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized\(messageChannelSuffix)" + let channel = FlutterBasicMessageChannel(name: channelName, binaryMessenger: binaryMessenger, codec: codec) channel.sendMessage([initialStateArg] as [Any?]) { response in guard let listResponse = response as? [Any?] else { completion(.failure(createConnectionError(withChannelName: channelName))) @@ -1531,12 +1589,9 @@ class CameraEventApi: CameraEventApiProtocol { /// /// This should be used for errors that occur outside of the context of /// handling a specific HostApi call, such as during streaming. - func error(message messageArg: String, completion: @escaping (Result) -> Void) - { - let channelName: String = - "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error\(messageChannelSuffix)" - let channel = FlutterBasicMessageChannel( - name: channelName, binaryMessenger: binaryMessenger, codec: codec) + func error(message messageArg: String, completion: @escaping (Result) -> Void) { + let channelName: String = "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error\(messageChannelSuffix)" + let channel = FlutterBasicMessageChannel(name: channelName, binaryMessenger: binaryMessenger, codec: codec) channel.sendMessage([messageArg] as [Any?]) { response in guard let listResponse = response as? [Any?] else { completion(.failure(createConnectionError(withChannelName: channelName))) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift index 35050120e118..824cd4dd37f6 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift @@ -3,6 +3,7 @@ // found in the LICENSE file. import AVFoundation +import CoreImage import Flutter import Foundation @@ -25,6 +26,13 @@ class SavePhotoDelegate: NSObject, AVCapturePhotoCaptureDelegate { /// The completion handler block for capture and save photo operations. let completionHandler: SavePhotoDelegateCompletionHandler + /// Optional crop rectangle in normalised (0,1) coordinate space. + /// When non-nil the photo is cropped (GPU path) before it is written to disk. + private let cropRect: PlatformRect? + + /// Core Image context shared with the camera (Metal-backed). Only used when `cropRect` is set. + private let ciContext: CIContext? + /// The path for captured photo file. /// Exposed for unit tests to verify the captured photo file path. var filePath: String { @@ -36,14 +44,20 @@ class SavePhotoDelegate: NSObject, AVCapturePhotoCaptureDelegate { /// ioQueue - the queue on which captured photos are written to disk. /// completionHandler - The completion handler block for save photo operations. Can /// be called from either main queue or IO queue. + /// cropRect - optional crop in normalised (0,1) coordinates; applied before writing. + /// ciContext - Core Image context to use for crop rendering; must be non-nil when cropRect is set. init( path: String, ioQueue: DispatchQueue, - completionHandler: @escaping SavePhotoDelegateCompletionHandler + completionHandler: @escaping SavePhotoDelegateCompletionHandler, + cropRect: PlatformRect? = nil, + ciContext: CIContext? = nil ) { self.path = path self.ioQueue = ioQueue self.completionHandler = completionHandler + self.cropRect = cropRect + self.ciContext = ciContext super.init() } @@ -65,7 +79,42 @@ class SavePhotoDelegate: NSObject, AVCapturePhotoCaptureDelegate { do { let data = photoDataProvider() - try data?.writeToPath(strongSelf.path, options: .atomic) + let finalData: WritableData? + + // If a crop is requested, apply it in Core Image before writing. + if let crop = strongSelf.cropRect, + let ctx = strongSelf.ciContext, + let rawData = data as? Data + { + let ci = CIImage(data: rawData) + let fullW = ci.map { Double($0.extent.width) } ?? 0 + let fullH = ci.map { Double($0.extent.height) } ?? 0 + if let ci = ci, fullW > 0, fullH > 0 { + // Core Image origin is bottom-left; convert from top-left. + let ciCrop = CGRect( + x: crop.x * fullW, + y: (1.0 - crop.y - crop.height) * fullH, + width: crop.width * fullW, + height: crop.height * fullH) + let cropped = ci.cropped(to: ciCrop) + .transformed( + by: CGAffineTransform(translationX: -ciCrop.origin.x, y: -ciCrop.origin.y)) + if let encoded = ctx.jpegRepresentation( + of: cropped, + colorSpace: CGColorSpaceCreateDeviceRGB()) + { + finalData = encoded + } else { + finalData = data + } + } else { + finalData = data + } + } else { + finalData = data + } + + try finalData?.writeToPath(strongSelf.path, options: .atomic) strongSelf.completionHandler(strongSelf.path, nil) } catch { strongSelf.completionHandler(nil, error) diff --git a/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart b/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart index e6f7340ed7a4..da12c0d6ec34 100644 --- a/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart +++ b/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart @@ -3,3 +3,4 @@ // found in the LICENSE file. export 'src/avfoundation_camera.dart'; +export 'src/camera_transform.dart'; diff --git a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart index 3907ed89219b..8dd7019596b3 100644 --- a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart +++ b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart @@ -11,6 +11,7 @@ import 'package:flutter/services.dart'; import 'package:flutter/widgets.dart'; import 'package:stream_transform/stream_transform.dart'; +import 'camera_transform.dart'; import 'messages.g.dart'; import 'type_conversion.dart'; import 'utils.dart'; @@ -191,6 +192,14 @@ class AVFoundationCamera extends CameraPlatform { await _hostApi.unlockCaptureOrientation(); } + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + Future setLensPosition(double position) async { + await _hostApi.setLensPosition(position); + } + @override Future takePicture(int cameraId) async { final String path = await _hostApi.takePicture(); @@ -442,6 +451,33 @@ class AVFoundationCamera extends CameraPlatform { await _hostApi.setImageFileFormat(_pigeonImageFileFormat(format)); } + /// Applies a geometric [transform] to all camera outputs on iOS. + /// + /// The [cameraId] parameter is currently unused on iOS (there is only ever + /// one active camera) but is included for API consistency. + /// + /// - Rotation and mirroring are applied at the hardware AVCaptureConnection + /// level (requires iOS 17+) and cost nothing in CPU / GPU. + /// - Crop is applied per-frame by Core Image on the GPU and costs ~1–3 ms + /// per frame. Pass `null` (or omit `cropRect`) to disable it. + Future setTransform(int cameraId, CameraTransform transform) async { + await _hostApi.setTransform( + PlatformCameraTransform( + rotationDegrees: transform.rotationDegrees, + flipHorizontally: transform.flipHorizontally, + flipVertically: transform.flipVertically, + cropRect: transform.cropRect == null + ? null + : PlatformRect( + x: transform.cropRect!.x, + y: transform.cropRect!.y, + width: transform.cropRect!.width, + height: transform.cropRect!.height, + ), + ), + ); + } + @override Widget buildPreview(int cameraId) { return Texture(textureId: cameraId); diff --git a/packages/camera/camera_avfoundation/lib/src/camera_transform.dart b/packages/camera/camera_avfoundation/lib/src/camera_transform.dart new file mode 100644 index 000000000000..6c3e26cc3c0b --- /dev/null +++ b/packages/camera/camera_avfoundation/lib/src/camera_transform.dart @@ -0,0 +1,76 @@ +// Copyright 2013 The Flutter Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +/// A normalized rectangle within the (0,1) coordinate space used to describe +/// a crop region for [CameraTransform]. +/// +/// The origin (0,0) is the top-left corner of the image. +class CameraTransformRect { + /// Creates a normalized crop rectangle. + const CameraTransformRect({ + required this.x, + required this.y, + required this.width, + required this.height, + }) : assert(x >= 0 && x <= 1, 'x must be in [0, 1]'), + assert(y >= 0 && y <= 1, 'y must be in [0, 1]'), + assert(width > 0 && width <= 1, 'width must be in (0, 1]'), + assert(height > 0 && height <= 1, 'height must be in (0, 1]'), + assert(x + width <= 1, 'x + width must be <= 1'), + assert(y + height <= 1, 'y + height must be <= 1'); + + /// Left edge in normalized [0,1] coordinates. + final double x; + + /// Top edge in normalized [0,1] coordinates. + final double y; + + /// Width in normalized [0,1] coordinates. + final double width; + + /// Height in normalized [0,1] coordinates. + final double height; +} + +/// A geometric transform to apply to all camera outputs simultaneously: +/// the preview texture, the image stream, captured photos, and recorded video. +/// +/// On iOS 17+ rotation and mirroring are applied at the hardware +/// `AVCaptureConnection` level (zero CPU / GPU cost). Crop uses Core Image on +/// the GPU (~1–3 ms per frame). +class CameraTransform { + /// Creates a camera transform. + /// + /// Defaults to identity (no rotation, no flip, no crop). + const CameraTransform({ + this.rotationDegrees = 0, + this.flipHorizontally = false, + this.flipVertically = false, + this.cropRect, + }) : assert( + rotationDegrees == 0 || + rotationDegrees == 90 || + rotationDegrees == 180 || + rotationDegrees == 270, + 'rotationDegrees must be 0, 90, 180, or 270', + ); + + /// Clockwise rotation in degrees. + /// + /// Must be one of: `0`, `90`, `180`, `270`. + final double rotationDegrees; + + /// Flip the image left–right (horizontal mirror). + final bool flipHorizontally; + + /// Flip the image upside-down (vertical mirror). + /// + /// Implemented as a horizontal flip composed with a 180° rotation. + final bool flipVertically; + + /// Optional crop region in normalized (0,1) coordinate space. + /// + /// Applied after rotation and mirroring. `null` means no crop. + final CameraTransformRect? cropRect; +} diff --git a/packages/camera/camera_avfoundation/lib/src/messages.g.dart b/packages/camera/camera_avfoundation/lib/src/messages.g.dart index 46c94d58f8a1..4df80c329803 100644 --- a/packages/camera/camera_avfoundation/lib/src/messages.g.dart +++ b/packages/camera/camera_avfoundation/lib/src/messages.g.dart @@ -1,21 +1,40 @@ // Copyright 2013 The Flutter Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Autogenerated from Pigeon (v26.1.5), do not edit directly. +// Autogenerated from Pigeon (v26.2.3), do not edit directly. // See also: https://pub.dev/packages/pigeon -// ignore_for_file: public_member_api_docs, non_constant_identifier_names, avoid_as, unused_import, unnecessary_parenthesis, prefer_null_aware_operators, omit_local_variable_types, omit_obvious_local_variable_types, unused_shown_name, unnecessary_import, no_leading_underscores_for_local_identifiers +// ignore_for_file: unused_import, unused_shown_name +// ignore_for_file: type=lint import 'dart:async'; -import 'dart:typed_data' show Float64List, Int32List, Int64List, Uint8List; +import 'dart:typed_data' show Float64List, Int32List, Int64List; -import 'package:flutter/foundation.dart' show ReadBuffer, WriteBuffer; import 'package:flutter/services.dart'; +import 'package:meta/meta.dart' show immutable, protected, visibleForTesting; -PlatformException _createConnectionError(String channelName) { - return PlatformException( - code: 'channel-error', - message: 'Unable to establish connection on channel: "$channelName".', - ); +Object? _extractReplyValueOrThrow( + List? replyList, + String channelName, { + required bool isNullValid, +}) { + if (replyList == null) { + throw PlatformException( + code: 'channel-error', + message: 'Unable to establish connection on channel: "$channelName".', + ); + } else if (replyList.length > 1) { + throw PlatformException( + code: replyList[0]! as String, + message: replyList[1] as String?, + details: replyList[2], + ); + } else if (!isNullValid && (replyList.isNotEmpty && replyList[0] == null)) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } + return replyList.firstOrNull; } List wrapResponse({ @@ -267,7 +286,7 @@ class PlatformCameraImageData { formatCode: result[0]! as int, width: result[1]! as int, height: result[2]! as int, - planes: (result[3] as List?)!.cast(), + planes: (result[3]! as List).cast(), lensAperture: result[4]! as double, sensorExposureTimeNanoseconds: result[5]! as int, sensorSensitivity: result[6]! as double, @@ -481,6 +500,130 @@ class PlatformSize { int get hashCode => Object.hashAll(_toList()); } +class PlatformRect { + PlatformRect({ + required this.x, + required this.y, + required this.width, + required this.height, + }); + + double x; + + double y; + + double width; + + double height; + + List _toList() { + return [x, y, width, height]; + } + + Object encode() { + return _toList(); + } + + static PlatformRect decode(Object result) { + result as List; + return PlatformRect( + x: result[0]! as double, + y: result[1]! as double, + width: result[2]! as double, + height: result[3]! as double, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! PlatformRect || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Pigeon version of a geometric camera transform. +/// +/// Rotation and mirroring are applied at the hardware connection level +/// (AVCaptureConnection.videoRotationAngle / isVideoMirrored), which means no +/// CPU/GPU cost and the effect is visible in the preview, image stream, photos, +/// and recorded video simultaneously. +/// +/// Crop is applied per-frame via Core Image on the GPU (Metal) and has a small +/// (~1–3 ms) cost per frame. +class PlatformCameraTransform { + PlatformCameraTransform({ + required this.rotationDegrees, + required this.flipHorizontally, + required this.flipVertically, + this.cropRect, + }); + + /// Clockwise rotation in degrees. Must be 0, 90, 180, or 270. + double rotationDegrees; + + /// Whether to flip the image along the horizontal axis (left–right mirror). + bool flipHorizontally; + + /// Whether to flip the image along the vertical axis (upside-down mirror). + /// + /// Implemented as a 180° rotation composed with a horizontal flip. + bool flipVertically; + + /// Optional crop rectangle in normalized (0,1) coordinate space. + /// + /// Applied after rotation/mirroring. Null means no crop. + PlatformRect? cropRect; + + List _toList() { + return [ + rotationDegrees, + flipHorizontally, + flipVertically, + cropRect, + ]; + } + + Object encode() { + return _toList(); + } + + static PlatformCameraTransform decode(Object result) { + result as List; + return PlatformCameraTransform( + rotationDegrees: result[0]! as double, + flipHorizontally: result[1]! as bool, + flipVertically: result[2]! as bool, + cropRect: result[3] as PlatformRect?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! PlatformCameraTransform || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + class _PigeonCodec extends StandardMessageCodec { const _PigeonCodec(); @override @@ -539,6 +682,12 @@ class _PigeonCodec extends StandardMessageCodec { } else if (value is PlatformSize) { buffer.putUint8(145); writeValue(buffer, value.encode()); + } else if (value is PlatformRect) { + buffer.putUint8(146); + writeValue(buffer, value.encode()); + } else if (value is PlatformCameraTransform) { + buffer.putUint8(147); + writeValue(buffer, value.encode()); } else { super.writeValue(buffer, value); } @@ -593,6 +742,10 @@ class _PigeonCodec extends StandardMessageCodec { return PlatformPoint.decode(readValue(buffer)!); case 145: return PlatformSize.decode(readValue(buffer)!); + case 146: + return PlatformRect.decode(readValue(buffer)!); + case 147: + return PlatformCameraTransform.decode(readValue(buffer)!); default: return super.readValueOfType(type, buffer); } @@ -631,23 +784,14 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as List?)! - .cast(); - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return (pigeonVar_replyValue! as List) + .cast(); } /// Create a new camera with the given settings, and returns its ID. @@ -663,22 +807,13 @@ class CameraApi { [cameraName, settings], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as int?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as int; } /// Initializes the camera with the given ID. @@ -697,17 +832,12 @@ class CameraApi { [cameraId, imageFormat], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Begins streaming frames from the camera. @@ -721,17 +851,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Stops streaming frames from the camera. @@ -745,17 +870,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Called by the Dart side of the plugin when it has received the last image @@ -772,17 +892,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Indicates that the given camera is no longer being used on the Dart side, @@ -799,17 +914,12 @@ class CameraApi { [cameraId], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Locks the camera capture to the current device orientation. @@ -827,17 +937,12 @@ class CameraApi { [orientation], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Unlocks camera capture orientation, allowing it to automatically adapt to @@ -852,17 +957,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Takes a picture with the current settings, and returns the path to the @@ -877,22 +977,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as String?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as String; } /// Does any preprocessing necessary before beginning to record video. @@ -906,17 +997,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Begins recording video, optionally enabling streaming to Dart at the same @@ -933,17 +1019,12 @@ class CameraApi { [enableStream], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Stops recording video, and results the path to the resulting file. @@ -957,22 +1038,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as String?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as String; } /// Pauses video recording. @@ -986,17 +1058,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Resumes a previously paused video recording. @@ -1010,17 +1077,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Switches the camera to the given flash mode. @@ -1036,17 +1098,12 @@ class CameraApi { [mode], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Switches the camera to the given exposure mode. @@ -1062,17 +1119,12 @@ class CameraApi { [mode], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Anchors auto-exposure to the given point in (0,1) coordinate space. @@ -1090,17 +1142,36 @@ class CameraApi { [point], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); + } + + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + Future setLensPosition(double position) async { + final pigeonVar_channelName = + 'dev.flutter.pigeon.camera_avfoundation.CameraApi.setLensPosition$pigeonVar_messageChannelSuffix'; + final pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send( + [position], + ); + final pigeonVar_replyList = await pigeonVar_sendFuture as List?; + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Returns the minimum exposure offset supported by the camera. @@ -1114,22 +1185,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as double; } /// Returns the maximum exposure offset supported by the camera. @@ -1143,22 +1205,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as double; } /// Sets the exposure offset manually to the given value. @@ -1174,17 +1227,12 @@ class CameraApi { [offset], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Switches the camera to the given focus mode. @@ -1200,17 +1248,12 @@ class CameraApi { [mode], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Anchors auto-focus to the given point in (0,1) coordinate space. @@ -1228,17 +1271,12 @@ class CameraApi { [point], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Returns the minimum zoom level supported by the camera. @@ -1252,22 +1290,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as double; } /// Returns the maximum zoom level supported by the camera. @@ -1281,22 +1310,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as double; } /// Sets the zoom factor. @@ -1312,17 +1332,12 @@ class CameraApi { [zoom], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Sets the video stabilization mode. @@ -1340,17 +1355,12 @@ class CameraApi { [mode], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Gets if the given video stabilization mode is supported. @@ -1368,22 +1378,13 @@ class CameraApi { [mode], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as bool?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as bool; } /// Pauses streaming of preview frames. @@ -1397,17 +1398,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Resumes a previously paused preview stream. @@ -1421,17 +1417,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Changes the camera used while recording video. @@ -1449,17 +1440,12 @@ class CameraApi { [cameraName], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Sets the file format used for taking pictures. @@ -1475,17 +1461,39 @@ class CameraApi { [format], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); + } + + /// Applies a geometric transform (rotation, mirroring, crop) to the camera + /// output. The transform is applied to the preview, image stream, captured + /// photos, and recorded video simultaneously. + /// + /// Requires iOS 17+ for hardware-accelerated rotation. On earlier iOS + /// versions the rotation part of the transform is silently ignored and only + /// the crop (if any) is applied in software. + Future setTransform(PlatformCameraTransform transform) async { + final pigeonVar_channelName = + 'dev.flutter.pigeon.camera_avfoundation.CameraApi.setTransform$pigeonVar_messageChannelSuffix'; + final pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send( + [transform], + ); + final pigeonVar_replyList = await pigeonVar_sendFuture as List?; + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } } @@ -1527,19 +1535,11 @@ abstract class CameraGlobalEventApi { pigeonVar_channel.setMessageHandler(null); } else { pigeonVar_channel.setMessageHandler((Object? message) async { - assert( - message != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged was null.', - ); - final List args = (message as List?)!; - final PlatformDeviceOrientation? arg_orientation = - (args[0] as PlatformDeviceOrientation?); - assert( - arg_orientation != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged was null, expected non-null PlatformDeviceOrientation.', - ); + final List args = message! as List; + final PlatformDeviceOrientation arg_orientation = + args[0]! as PlatformDeviceOrientation; try { - api.deviceOrientationChanged(arg_orientation!); + api.deviceOrientationChanged(arg_orientation); return wrapResponse(empty: true); } on PlatformException catch (e) { return wrapResponse(error: e); @@ -1587,19 +1587,11 @@ abstract class CameraEventApi { pigeonVar_channel.setMessageHandler(null); } else { pigeonVar_channel.setMessageHandler((Object? message) async { - assert( - message != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized was null.', - ); - final List args = (message as List?)!; - final PlatformCameraState? arg_initialState = - (args[0] as PlatformCameraState?); - assert( - arg_initialState != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized was null, expected non-null PlatformCameraState.', - ); + final List args = message! as List; + final PlatformCameraState arg_initialState = + args[0]! as PlatformCameraState; try { - api.initialized(arg_initialState!); + api.initialized(arg_initialState); return wrapResponse(empty: true); } on PlatformException catch (e) { return wrapResponse(error: e); @@ -1621,18 +1613,10 @@ abstract class CameraEventApi { pigeonVar_channel.setMessageHandler(null); } else { pigeonVar_channel.setMessageHandler((Object? message) async { - assert( - message != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error was null.', - ); - final List args = (message as List?)!; - final String? arg_message = (args[0] as String?); - assert( - arg_message != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error was null, expected non-null String.', - ); + final List args = message! as List; + final String arg_message = args[0]! as String; try { - api.error(arg_message!); + api.error(arg_message); return wrapResponse(empty: true); } on PlatformException catch (e) { return wrapResponse(error: e); diff --git a/packages/camera/camera_avfoundation/pigeons/messages.dart b/packages/camera/camera_avfoundation/pigeons/messages.dart index 29ed9ed4b6b9..a8cb3940d5e0 100644 --- a/packages/camera/camera_avfoundation/pigeons/messages.dart +++ b/packages/camera/camera_avfoundation/pigeons/messages.dart @@ -9,6 +9,10 @@ import 'package:pigeon/pigeon.dart'; dartOut: 'lib/src/messages.g.dart', swiftOut: 'ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift', + objcOptions: ObjcOptions( + prefix: 'FCP', + headerIncludePath: './include/camera_avfoundation/messages.g.h', + ), copyrightHeader: 'pigeons/copyright.txt', ), ) @@ -187,6 +191,55 @@ class PlatformSize { final double height; } +// Pigeon equivalent of CGRect, with values in the (0,1) normalized coordinate space. +class PlatformRect { + PlatformRect({ + required this.x, + required this.y, + required this.width, + required this.height, + }); + + final double x; + final double y; + final double width; + final double height; +} + +/// Pigeon version of a geometric camera transform. +/// +/// Rotation and mirroring are applied at the hardware connection level +/// (AVCaptureConnection.videoRotationAngle / isVideoMirrored), which means no +/// CPU/GPU cost and the effect is visible in the preview, image stream, photos, +/// and recorded video simultaneously. +/// +/// Crop is applied per-frame via Core Image on the GPU (Metal) and has a small +/// (~1–3 ms) cost per frame. +class PlatformCameraTransform { + PlatformCameraTransform({ + required this.rotationDegrees, + required this.flipHorizontally, + required this.flipVertically, + this.cropRect, + }); + + /// Clockwise rotation in degrees. Must be 0, 90, 180, or 270. + final double rotationDegrees; + + /// Whether to flip the image along the horizontal axis (left–right mirror). + final bool flipHorizontally; + + /// Whether to flip the image along the vertical axis (upside-down mirror). + /// + /// Implemented as a 180° rotation composed with a horizontal flip. + final bool flipVertically; + + /// Optional crop rectangle in normalized (0,1) coordinate space. + /// + /// Applied after rotation/mirroring. Null means no crop. + final PlatformRect? cropRect; +} + @HostApi() abstract class CameraApi { /// Returns the list of available cameras. @@ -282,6 +335,14 @@ abstract class CameraApi { @ObjCSelector('setExposurePoint:') void setExposurePoint(PlatformPoint? point); + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + @async + @ObjCSelector('setLensPosition:') + void setLensPosition(double position); + /// Returns the minimum exposure offset supported by the camera. @async @ObjCSelector('getMinimumExposureOffset') @@ -352,6 +413,17 @@ abstract class CameraApi { @async @ObjCSelector('setImageFileFormat:') void setImageFileFormat(PlatformImageFileFormat format); + + /// Applies a geometric transform (rotation, mirroring, crop) to the camera + /// output. The transform is applied to the preview, image stream, captured + /// photos, and recorded video simultaneously. + /// + /// Requires iOS 17+ for hardware-accelerated rotation. On earlier iOS + /// versions the rotation part of the transform is silently ignored and only + /// the crop (if any) is applied in software. + @async + @ObjCSelector('setCameraTransform:') + void setTransform(PlatformCameraTransform transform); } @EventChannelApi() diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index 8a10d0d21f72..ed5f3064bbe2 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.10.1 +version: 0.10.2 environment: sdk: ^3.9.0 diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart index 53d7965c6bac..bbe36d08e8db 100644 --- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart +++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart @@ -7,6 +7,7 @@ import 'dart:math'; import 'package:async/async.dart'; import 'package:camera_avfoundation/src/avfoundation_camera.dart'; +import 'package:camera_avfoundation/src/camera_transform.dart'; import 'package:camera_avfoundation/src/messages.g.dart'; import 'package:camera_avfoundation/src/utils.dart'; import 'package:camera_platform_interface/camera_platform_interface.dart'; @@ -678,6 +679,14 @@ void main() { expect(minZoomLevel, stubZoomLevel); }); + test('Should set the lens position', () async { + const position = 0.5; + + await camera.setLensPosition(position); + + verify(mockApi.setLensPosition(position)); + }); + test('Should set the zoom level', () async { const zoom = 2.0; @@ -993,4 +1002,70 @@ void main() { verify(mockApi.setImageFileFormat(PlatformImageFileFormat.jpeg)); }); }); + + group('setTransform', () { + late AVFoundationCamera camera; + late MockCameraApi mockApi; + const cameraId = 1; + + setUp(() { + mockApi = MockCameraApi(); + camera = AVFoundationCamera(api: mockApi); + }); + + test('forwards rotation-only transform to host API', () async { + await camera.setTransform( + cameraId, + const CameraTransform(rotationDegrees: 90), + ); + + final captured = + verify(mockApi.setTransform(captureAny)).captured.single + as PlatformCameraTransform; + expect(captured.rotationDegrees, 90); + expect(captured.flipHorizontally, false); + expect(captured.flipVertically, false); + expect(captured.cropRect, isNull); + }); + + test('forwards full transform with crop to host API', () async { + await camera.setTransform( + cameraId, + const CameraTransform( + rotationDegrees: 180, + flipHorizontally: true, + cropRect: CameraTransformRect( + x: 0.1, + y: 0.1, + width: 0.8, + height: 0.8, + ), + ), + ); + + final captured = + verify(mockApi.setTransform(captureAny)).captured.single + as PlatformCameraTransform; + expect(captured.rotationDegrees, 180); + expect(captured.flipHorizontally, true); + expect(captured.flipVertically, false); + expect(captured.cropRect, isNotNull); + expect(captured.cropRect!.x, 0.1); + expect(captured.cropRect!.y, 0.1); + expect(captured.cropRect!.width, 0.8); + expect(captured.cropRect!.height, 0.8); + }); + + test('forwards identity transform (zeros) to host API', () async { + await camera.setTransform(cameraId, const CameraTransform()); + + final captured = + verify(mockApi.setTransform(captureAny)).captured.single + as PlatformCameraTransform; + expect(captured.rotationDegrees, 0); + expect(captured.flipHorizontally, false); + expect(captured.flipVertically, false); + expect(captured.cropRect, isNull); + }); + }); } diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart index 225eac9931c6..79c2820e212e 100644 --- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart +++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart @@ -22,6 +22,7 @@ import 'package:mockito/src/dummies.dart' as _i3; // ignore_for_file: unnecessary_parenthesis // ignore_for_file: camel_case_types // ignore_for_file: subtype_of_sealed_class +// ignore_for_file: invalid_use_of_internal_member /// A class which mocks [CameraApi]. /// @@ -231,6 +232,15 @@ class MockCameraApi extends _i1.Mock implements _i2.CameraApi { ) as _i4.Future); + @override + _i4.Future setLensPosition(double? position) => + (super.noSuchMethod( + Invocation.method(#setLensPosition, [position]), + returnValue: _i4.Future.value(), + returnValueForMissingStub: _i4.Future.value(), + ) + as _i4.Future); + @override _i4.Future getMinExposureOffset() => (super.noSuchMethod( @@ -360,4 +370,13 @@ class MockCameraApi extends _i1.Mock implements _i2.CameraApi { returnValueForMissingStub: _i4.Future.value(), ) as _i4.Future); + + @override + _i4.Future setTransform(_i2.PlatformCameraTransform? transform) => + (super.noSuchMethod( + Invocation.method(#setTransform, [transform]), + returnValue: _i4.Future.value(), + returnValueForMissingStub: _i4.Future.value(), + ) + as _i4.Future); } diff --git a/packages/camera/camera_linux/.gitignore b/packages/camera/camera_linux/.gitignore new file mode 100644 index 000000000000..33b7f6366ad1 --- /dev/null +++ b/packages/camera/camera_linux/.gitignore @@ -0,0 +1,31 @@ +deps/ + +# Miscellaneous +*.class +*.log +*.pyc +*.swp +.DS_Store +.atom/ +.buildlog/ +.history +.svn/ +migrate_working_dir/ + +# IntelliJ related +*.iml +*.ipr +*.iws +.idea/ + +# The .vscode folder contains launch configuration and tasks you configure in +# VS Code which you may wish to be included in version control, so this line +# is commented out by default. +#.vscode/ + +# Flutter/Dart/Pub related +# Libraries should not include pubspec.lock, per https://dart.dev/guides/libraries/private-files#pubspeclock. +/pubspec.lock +**/doc/api/ +.dart_tool/ +build/ diff --git a/packages/camera/camera_linux/.metadata b/packages/camera/camera_linux/.metadata new file mode 100644 index 000000000000..42a96a716b35 --- /dev/null +++ b/packages/camera/camera_linux/.metadata @@ -0,0 +1,30 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: "603104015dd692ea3403755b55d07813d5cf8965" + channel: "[user-branch]" + +project_type: plugin + +# Tracks metadata for the flutter migrate command +migration: + platforms: + - platform: root + create_revision: 603104015dd692ea3403755b55d07813d5cf8965 + base_revision: 603104015dd692ea3403755b55d07813d5cf8965 + - platform: linux + create_revision: 603104015dd692ea3403755b55d07813d5cf8965 + base_revision: 603104015dd692ea3403755b55d07813d5cf8965 + + # User provided section + + # List of Local paths (relative to this file) that should be + # ignored by the migrate tool. + # + # Files that are not part of the templates will be ignored by default. + unmanaged_files: + - 'lib/main.dart' + - 'ios/Runner.xcodeproj/project.pbxproj' diff --git a/packages/camera/camera_linux/CHANGELOG.md b/packages/camera/camera_linux/CHANGELOG.md new file mode 100644 index 000000000000..41cc7d8192ec --- /dev/null +++ b/packages/camera/camera_linux/CHANGELOG.md @@ -0,0 +1,3 @@ +## 0.0.1 + +* TODO: Describe initial release. diff --git a/packages/camera/camera_linux/LICENSE b/packages/camera/camera_linux/LICENSE new file mode 100644 index 000000000000..ba75c69f7f21 --- /dev/null +++ b/packages/camera/camera_linux/LICENSE @@ -0,0 +1 @@ +TODO: Add your license here. diff --git a/packages/camera/camera_linux/README.md b/packages/camera/camera_linux/README.md new file mode 100644 index 000000000000..7d5a9fc073ed --- /dev/null +++ b/packages/camera/camera_linux/README.md @@ -0,0 +1,15 @@ +# camera_linux + +A new Flutter plugin project. + +## Getting Started + +This project is a starting point for a Flutter +[plug-in package](https://flutter.dev/to/develop-plugins), +a specialized package that includes platform-specific implementation code for +Android and/or iOS. + +For help getting started with Flutter development, view the +[online documentation](https://docs.flutter.dev), which offers tutorials, +samples, guidance on mobile development, and a full API reference. + diff --git a/packages/camera/camera_linux/analysis_options.yaml b/packages/camera/camera_linux/analysis_options.yaml new file mode 100644 index 000000000000..a5744c1cfbe7 --- /dev/null +++ b/packages/camera/camera_linux/analysis_options.yaml @@ -0,0 +1,4 @@ +include: package:flutter_lints/flutter.yaml + +# Additional information about this file can be found at +# https://dart.dev/guides/language/analysis-options diff --git a/packages/camera/camera_linux/lib/camera_linux.dart b/packages/camera/camera_linux/lib/camera_linux.dart new file mode 100644 index 000000000000..4a8d4b963e4b --- /dev/null +++ b/packages/camera/camera_linux/lib/camera_linux.dart @@ -0,0 +1,2 @@ +export 'src/linux_camera.dart'; +export 'src/messages.g.dart'; diff --git a/packages/camera/camera_linux/lib/src/linux_camera.dart b/packages/camera/camera_linux/lib/src/linux_camera.dart new file mode 100644 index 000000000000..eace45141273 --- /dev/null +++ b/packages/camera/camera_linux/lib/src/linux_camera.dart @@ -0,0 +1,494 @@ +import 'dart:async'; +import 'dart:math'; + +import 'package:camera_linux/src/messages.g.dart'; +import 'package:camera_platform_interface/camera_platform_interface.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart'; +import 'package:path_provider/path_provider.dart'; +import 'package:stream_transform/stream_transform.dart'; + +class CameraLinux extends CameraPlatform { + final CameraApi _hostApi; + + CameraLinux({@visibleForTesting CameraApi? api}) + : _hostApi = api ?? CameraApi(); + + static void registerWith() { + CameraPlatform.instance = CameraLinux(); + } + + /// The controller we need to broadcast the different events coming + /// from handleMethodCall, specific to camera events. + /// + /// It is a `broadcast` because multiple controllers will connect to + /// different stream views of this Controller. + /// This is only exposed for test purposes. It shouldn't be used by clients of + /// the plugin as it may break or change at any time. + @visibleForTesting + final StreamController cameraEventStreamController = + StreamController.broadcast(); + + /// The per-camera handlers for messages that should be rebroadcast to + /// clients as [CameraEvent]s. + @visibleForTesting + final Map hostCameraHandlers = + {}; + + Stream _cameraEvents(int cameraId) => + cameraEventStreamController.stream + .where((CameraEvent event) => event.cameraId == cameraId); + + @override + Future> availableCameras() async { + try { + final camerasNames = await _hostApi.getAvailableCamerasNames(); + return camerasNames.map( + (name) { + return CameraDescription( + name: name, + lensDirection: CameraLensDirection.back, + sensorOrientation: 0, + ); + }, + ).toList(); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } + + /// resolutionPreset is not used on Linux. + /// enableAudio is not used on Linux. + @override + Future createCamera( + CameraDescription cameraDescription, + ResolutionPreset? resolutionPreset, { + bool enableAudio = false, + }) async { + try { + PlatformResolutionPreset pigeonResolutionPreset = + PlatformResolutionPreset.veryHigh; + + if (resolutionPreset != null) { + switch (resolutionPreset) { + case ResolutionPreset.low: + pigeonResolutionPreset = PlatformResolutionPreset.low; + break; + case ResolutionPreset.medium: + pigeonResolutionPreset = PlatformResolutionPreset.medium; + break; + case ResolutionPreset.high: + pigeonResolutionPreset = PlatformResolutionPreset.high; + break; + case ResolutionPreset.veryHigh: + pigeonResolutionPreset = PlatformResolutionPreset.veryHigh; + break; + case ResolutionPreset.ultraHigh: + pigeonResolutionPreset = PlatformResolutionPreset.ultraHigh; + break; + case ResolutionPreset.max: + pigeonResolutionPreset = PlatformResolutionPreset.max; + } + } + final cameraId = + await _hostApi.create(cameraDescription.name, pigeonResolutionPreset); + return cameraId; + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } + + @override + Future initializeCamera( + int cameraId, { + ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown, + }) async { + hostCameraHandlers.putIfAbsent(cameraId, + () => HostCameraMessageHandler(cameraId, cameraEventStreamController)); + final Completer completer = Completer(); + + unawaited( + onCameraInitialized(cameraId).first.then( + (CameraInitializedEvent value) => completer.complete(), + ), + ); + + PlatformImageFormatGroup imageFormat = PlatformImageFormatGroup.rgb8; + switch (imageFormatGroup) { + case ImageFormatGroup.jpeg: + case ImageFormatGroup.unknown: + imageFormat = PlatformImageFormatGroup.rgb8; + break; + case ImageFormatGroup.yuv420: + case ImageFormatGroup.nv21: + case ImageFormatGroup.bgra8888: + default: + imageFormat = PlatformImageFormatGroup.mono8; + break; + } + + try { + await _hostApi.initialize(cameraId, imageFormat); + } on PlatformException catch (e, s) { + completer.completeError( + CameraException(e.code, e.message), + s, + ); + } + + return completer.future; + } + + @override + Stream onCameraInitialized(int cameraId) { + return _cameraEvents(cameraId).whereType(); + } + + @override + Stream onCameraResolutionChanged(int cameraId) { + return _cameraEvents(cameraId).whereType(); + } + + @override + Stream onCameraClosing(int cameraId) { + return _cameraEvents(cameraId).whereType(); + } + + @override + Stream onCameraError(int cameraId) { + return _cameraEvents(cameraId).whereType(); + } + + @override + Stream onVideoRecordedEvent(int cameraId) { + return _cameraEvents(cameraId).whereType(); + } + + @override + Stream onDeviceOrientationChanged() { + return const Stream.empty(); + } + + /// The following methods are not implemented for Linux, as they are not + /// supported by the underlying camera API. + @override + Future lockCaptureOrientation( + int cameraId, DeviceOrientation orientation) { + return Future.value(); + } + + /// The following methods are not implemented for Linux, as they are not + /// supported by the underlying camera API. + @override + Future unlockCaptureOrientation(int cameraId) { + return Future.value(); + } + + @override + Future takePicture(int cameraId) async { + try { + final directory = await getTemporaryDirectory(); + final uuid = DateTime.now().millisecondsSinceEpoch.toString(); + final path = '${directory.path}/$uuid.jpg'; + await _hostApi.takePicture(cameraId, path); + return XFile(path); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } + + @override + Future prepareForVideoRecording() async { +// No-op for Linux no preparation is needed. + } + + @override + Future startVideoRecording( + int cameraId, { + @Deprecated( + 'This parameter is unused, and will be ignored on all platforms') + Duration? maxVideoDuration, + }) async { + try { + final directory = await getTemporaryDirectory(); + final uuid = DateTime.now().millisecondsSinceEpoch.toString(); + final path = '${directory.path}/$uuid.mp4'; + await _hostApi.startVideoRecording(cameraId, path); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + // No-op for Linux, as video recording is not supported. + return Future.value(); + } + + @override + Future stopVideoRecording(int cameraId) async { + try { + final path = await _hostApi.stopVideoRecording(cameraId); + return XFile(path); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } + + @override + Future pauseVideoRecording(int cameraId) { + throw UnimplementedError('pauseVideoRecording() is not implemented.'); + } + + @override + Future resumeVideoRecording(int cameraId) { + throw UnimplementedError('resumeVideoRecording() is not implemented.'); + } + + @override + Future setFlashMode(int cameraId, FlashMode mode) async { + // No-op for Linux, as flash mode is not supported. + } + + @override + Future setExposureMode(int cameraId, ExposureMode mode) async { + try { + await _hostApi.setExposureMode(cameraId, exposureModeToPlatform(mode)); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } + + @override + Future setExposurePoint(int cameraId, Point? point) async { + // No-op for Linux, as exposure point is not supported. + } + + @override + Future getMinExposureOffset(int cameraId) async { + return 0.0; + } + + @override + Future getMaxExposureOffset(int cameraId) async { + return 0.0; + } + + @override + Future getExposureOffsetStepSize(int cameraId) async { + return 0.0; + } + + @override + Future setExposureOffset(int cameraId, double offset) async { + return 0.0; + } + + @override + Future setFocusMode(int cameraId, FocusMode mode) async { + try { + await _hostApi.setFocusMode(cameraId, focusModeToPlatform(mode)); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } + + @override + Future setFocusPoint(int cameraId, Point? point) async { + // No-op for Linux, as focus point is not supported. + } + + @override + Future getMaxZoomLevel(int cameraId) async { + return 0.0; + } + + @override + Future getMinZoomLevel(int cameraId) async { + return 0.0; + } + + @override + Future setZoomLevel(int cameraId, double zoom) async { + // No-op for Linux, as zoom is not supported. + } + + @override + Future pausePreview(int cameraId) async { + throw UnimplementedError('pausePreview() is not implemented.'); + } + + @override + Future resumePreview(int cameraId) async { + throw UnimplementedError('resumePreview() is not implemented.'); + } + + @override + Future setDescriptionWhileRecording( + CameraDescription description) async { + throw UnimplementedError( + 'setDescriptionWhileRecording() is not implemented.'); + } + + @override + Widget buildPreview(int cameraId) { + unawaited( + _hostApi.getTextureId(cameraId).then( + (int? textureId) { + cameraEventStreamController.add(TextureIdEvent(cameraId, textureId)); + }, + ), + ); + return StreamBuilder( + stream: _cameraEvents(cameraId) + .whereType() + .map((event) => event.textureId), + builder: (context, snapshot) { + if (snapshot.data == null || snapshot.data == -1) { + return const Center(child: CircularProgressIndicator()); + } + + return RepaintBoundary( + child: Texture( + textureId: snapshot.data!, + filterQuality: FilterQuality.none, + ), + ); + }, + ); + } + + @override + Future dispose(int cameraId) async { + // Remove the handler for this camera. + final HostCameraMessageHandler? handler = + hostCameraHandlers.remove(cameraId); + handler?.dispose(); + + try { + await _hostApi.dispose(cameraId); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } + + /// The following methods are not implemented for Linux, as only jpeg is supported + @override + Future setImageFileFormat(int cameraId, ImageFileFormat format) async {} + + Future setImageFormatGroup( + int cameraId, PlatformImageFormatGroup format) async { + try { + await _hostApi.setImageFormatGroup(cameraId, format); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } +} + +/// An event fired when the camera texture id changed. +class TextureIdEvent extends CameraEvent { + const TextureIdEvent( + super.cameraId, + this.textureId, + ); + + TextureIdEvent.fromJson(Map json) + : textureId = json['textureId']! as int?, + super(json['cameraId']! as int); + + /// The texture ID of the camera. + final int? textureId; + + Map toJson() => { + 'cameraId': cameraId, + if (textureId != null) 'textureId': textureId!, + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + super == other && + other is TextureIdEvent && + runtimeType == other.runtimeType && + textureId == other.textureId; + + @override + int get hashCode => Object.hash( + super.hashCode, + textureId, + ); +} + +/// Callback handler for camera-level events from the platform host. +@visibleForTesting +class HostCameraMessageHandler implements CameraEventApi { + /// Creates a new handler that listens for events from camera [cameraId], and + /// broadcasts them to [streamController]. + HostCameraMessageHandler(this.cameraId, this.streamController) { + CameraEventApi.setUp(this, messageChannelSuffix: cameraId.toString()); + } + + /// Removes the handler for native messages. + void dispose() { + CameraEventApi.setUp(null, messageChannelSuffix: cameraId.toString()); + } + + /// The camera ID this handler listens for events from. + final int cameraId; + + /// The controller used to broadcast camera events coming from the + /// host platform. + final StreamController streamController; + + @override + void error(String message) { + streamController.add(CameraErrorEvent(cameraId, message)); + } + + @override + void initialized(PlatformCameraState initialState) { + streamController.add( + CameraInitializedEvent( + cameraId, + initialState.previewSize.width, + initialState.previewSize.height, + exposureModeFromPlatform(initialState.exposureMode), + initialState.exposurePointSupported, + focusModeFromPlatform(initialState.focusMode), + initialState.focusPointSupported, + ), + ); + } + + @override + void textureId(int textureId) { + streamController.add(TextureIdEvent(cameraId, textureId)); + } +} + +/// Converts a Pigeon [PlatformExposureMode] to an [ExposureMode]. +ExposureMode exposureModeFromPlatform(PlatformExposureMode mode) { + return switch (mode) { + PlatformExposureMode.auto => ExposureMode.auto, + PlatformExposureMode.locked => ExposureMode.locked, + }; +} + +/// Converts a Pigeon [PlatformFocusMode] to an [FocusMode]. +FocusMode focusModeFromPlatform(PlatformFocusMode mode) { + return switch (mode) { + PlatformFocusMode.auto => FocusMode.auto, + PlatformFocusMode.locked => FocusMode.locked, + }; +} + +PlatformFocusMode focusModeToPlatform(FocusMode mode) { + return switch (mode) { + FocusMode.auto => PlatformFocusMode.auto, + FocusMode.locked => PlatformFocusMode.locked, + }; +} + +PlatformExposureMode exposureModeToPlatform(ExposureMode mode) { + return switch (mode) { + ExposureMode.auto => PlatformExposureMode.auto, + ExposureMode.locked => PlatformExposureMode.locked, + }; +} diff --git a/packages/camera/camera_linux/lib/src/messages.g.dart b/packages/camera/camera_linux/lib/src/messages.g.dart new file mode 100644 index 000000000000..a1d2cac8878b --- /dev/null +++ b/packages/camera/camera_linux/lib/src/messages.g.dart @@ -0,0 +1,619 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// Autogenerated from Pigeon (v22.7.4), do not edit directly. +// See also: https://pub.dev/packages/pigeon +// ignore_for_file: public_member_api_docs, non_constant_identifier_names, avoid_as, unused_import, unnecessary_parenthesis, prefer_null_aware_operators, omit_local_variable_types, unused_shown_name, unnecessary_import, no_leading_underscores_for_local_identifiers + +import 'dart:async'; +import 'dart:typed_data' show Float64List, Int32List, Int64List, Uint8List; + +import 'package:flutter/foundation.dart' show ReadBuffer, WriteBuffer; +import 'package:flutter/services.dart'; + +PlatformException _createConnectionError(String channelName) { + return PlatformException( + code: 'channel-error', + message: 'Unable to establish connection on channel: "$channelName".', + ); +} + +List wrapResponse({Object? result, PlatformException? error, bool empty = false}) { + if (empty) { + return []; + } + if (error == null) { + return [result]; + } + return [error.code, error.message, error.details]; +} + +enum PlatformDeviceOrientation { + portraitUp, + landscapeLeft, + portraitDown, + landscapeRight, +} + +enum PlatformExposureMode { + auto, + locked, +} + +enum PlatformFlashMode { + off, + auto, + always, + torch, +} + +enum PlatformFocusMode { + auto, + locked, +} + +enum PlatformImageFormatGroup { + rgb8, + mono8, +} + +enum PlatformResolutionPreset { + low, + medium, + high, + veryHigh, + ultraHigh, + max, +} + +class PlatformSize { + PlatformSize({ + required this.width, + required this.height, + }); + + double width; + + double height; + + Object encode() { + return [ + width, + height, + ]; + } + + static PlatformSize decode(Object result) { + result as List; + return PlatformSize( + width: result[0]! as double, + height: result[1]! as double, + ); + } +} + +class PlatformCameraState { + PlatformCameraState({ + required this.previewSize, + required this.exposureMode, + required this.focusMode, + required this.exposurePointSupported, + required this.focusPointSupported, + }); + + /// The size of the preview, in pixels. + PlatformSize previewSize; + + /// The default exposure mode + PlatformExposureMode exposureMode; + + /// The default focus mode + PlatformFocusMode focusMode; + + /// Whether setting exposure points is supported. + bool exposurePointSupported; + + /// Whether setting focus points is supported. + bool focusPointSupported; + + Object encode() { + return [ + previewSize, + exposureMode, + focusMode, + exposurePointSupported, + focusPointSupported, + ]; + } + + static PlatformCameraState decode(Object result) { + result as List; + return PlatformCameraState( + previewSize: result[0]! as PlatformSize, + exposureMode: result[1]! as PlatformExposureMode, + focusMode: result[2]! as PlatformFocusMode, + exposurePointSupported: result[3]! as bool, + focusPointSupported: result[4]! as bool, + ); + } +} + +class PlatformPoint { + PlatformPoint({ + required this.x, + required this.y, + }); + + double x; + + double y; + + Object encode() { + return [ + x, + y, + ]; + } + + static PlatformPoint decode(Object result) { + result as List; + return PlatformPoint( + x: result[0]! as double, + y: result[1]! as double, + ); + } +} + + +class _PigeonCodec extends StandardMessageCodec { + const _PigeonCodec(); + @override + void writeValue(WriteBuffer buffer, Object? value) { + if (value is int) { + buffer.putUint8(4); + buffer.putInt64(value); + } else if (value is PlatformDeviceOrientation) { + buffer.putUint8(129); + writeValue(buffer, value.index); + } else if (value is PlatformExposureMode) { + buffer.putUint8(130); + writeValue(buffer, value.index); + } else if (value is PlatformFlashMode) { + buffer.putUint8(131); + writeValue(buffer, value.index); + } else if (value is PlatformFocusMode) { + buffer.putUint8(132); + writeValue(buffer, value.index); + } else if (value is PlatformImageFormatGroup) { + buffer.putUint8(133); + writeValue(buffer, value.index); + } else if (value is PlatformResolutionPreset) { + buffer.putUint8(134); + writeValue(buffer, value.index); + } else if (value is PlatformSize) { + buffer.putUint8(135); + writeValue(buffer, value.encode()); + } else if (value is PlatformCameraState) { + buffer.putUint8(136); + writeValue(buffer, value.encode()); + } else if (value is PlatformPoint) { + buffer.putUint8(137); + writeValue(buffer, value.encode()); + } else { + super.writeValue(buffer, value); + } + } + + @override + Object? readValueOfType(int type, ReadBuffer buffer) { + switch (type) { + case 129: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformDeviceOrientation.values[value]; + case 130: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformExposureMode.values[value]; + case 131: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformFlashMode.values[value]; + case 132: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformFocusMode.values[value]; + case 133: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformImageFormatGroup.values[value]; + case 134: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformResolutionPreset.values[value]; + case 135: + return PlatformSize.decode(readValue(buffer)!); + case 136: + return PlatformCameraState.decode(readValue(buffer)!); + case 137: + return PlatformPoint.decode(readValue(buffer)!); + default: + return super.readValueOfType(type, buffer); + } + } +} + +class CameraApi { + /// Constructor for [CameraApi]. The [binaryMessenger] named argument is + /// available for dependency injection. If it is left null, the default + /// BinaryMessenger will be used which routes to the host platform. + CameraApi({BinaryMessenger? binaryMessenger, String messageChannelSuffix = ''}) + : pigeonVar_binaryMessenger = binaryMessenger, + pigeonVar_messageChannelSuffix = messageChannelSuffix.isNotEmpty ? '.$messageChannelSuffix' : ''; + final BinaryMessenger? pigeonVar_binaryMessenger; + + static const MessageCodec pigeonChannelCodec = _PigeonCodec(); + + final String pigeonVar_messageChannelSuffix; + + /// Returns the list of available cameras. + Future> getAvailableCamerasNames() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCamerasNames$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as List?)!.cast(); + } + } + + /// Create a new camera with the given settings, and returns its ID. + Future create(String cameraName, PlatformResolutionPreset resolutionPreset) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.create$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraName, resolutionPreset]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as int?)!; + } + } + + /// Initializes the camera with the given ID. + Future initialize(int cameraId, PlatformImageFormatGroup imageFormat) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.initialize$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId, imageFormat]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Get the texture ID for the camera with the given ID. + Future getTextureId(int cameraId) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getTextureId$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return (pigeonVar_replyList[0] as int?); + } + } + + /// Indicates that the given camera is no longer being used on the Dart side, + /// and any associated resources can be cleaned up. + Future dispose(int cameraId) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.dispose$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Takes a picture with the current settings, and returns the path to the + /// resulting file. + Future takePicture(int cameraId, String path) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.takePicture$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId, path]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Begins recording video, optionally enabling streaming to Dart at the same + /// time. + Future startVideoRecording(int cameraId, String path) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.startVideoRecording$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId, path]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Stops recording video, and results the path to the resulting file. + Future stopVideoRecording(int cameraId) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.stopVideoRecording$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as String?)!; + } + } + + /// Switches the camera to the given exposure mode. + Future setExposureMode(int cameraId, PlatformExposureMode mode) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setExposureMode$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId, mode]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Switches the camera to the given focus mode. + Future setFocusMode(int cameraId, PlatformFocusMode mode) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setFocusMode$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId, mode]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + Future setImageFormatGroup(int cameraId, PlatformImageFormatGroup imageFormatGroup) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setImageFormatGroup$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId, imageFormatGroup]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } +} + +/// Handler for native callbacks that are tied to a specific camera ID. +/// +/// This is intended to be initialized with the camera ID as a suffix. +abstract class CameraEventApi { + static const MessageCodec pigeonChannelCodec = _PigeonCodec(); + + /// Called when the camera is inialitized for use. + void initialized(PlatformCameraState initialState); + + void textureId(int textureId); + + /// Called when an error occurs in the camera. + /// + /// This should be used for errors that occur outside of the context of + /// handling a specific HostApi call, such as during streaming. + void error(String message); + + static void setUp(CameraEventApi? api, {BinaryMessenger? binaryMessenger, String messageChannelSuffix = '',}) { + messageChannelSuffix = messageChannelSuffix.isNotEmpty ? '.$messageChannelSuffix' : ''; + { + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + 'dev.flutter.pigeon.camera_linux.CameraEventApi.initialized$messageChannelSuffix', pigeonChannelCodec, + binaryMessenger: binaryMessenger); + if (api == null) { + pigeonVar_channel.setMessageHandler(null); + } else { + pigeonVar_channel.setMessageHandler((Object? message) async { + assert(message != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.initialized was null.'); + final List args = (message as List?)!; + final PlatformCameraState? arg_initialState = (args[0] as PlatformCameraState?); + assert(arg_initialState != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.initialized was null, expected non-null PlatformCameraState.'); + try { + api.initialized(arg_initialState!); + return wrapResponse(empty: true); + } on PlatformException catch (e) { + return wrapResponse(error: e); + } catch (e) { + return wrapResponse(error: PlatformException(code: 'error', message: e.toString())); + } + }); + } + } + { + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + 'dev.flutter.pigeon.camera_linux.CameraEventApi.textureId$messageChannelSuffix', pigeonChannelCodec, + binaryMessenger: binaryMessenger); + if (api == null) { + pigeonVar_channel.setMessageHandler(null); + } else { + pigeonVar_channel.setMessageHandler((Object? message) async { + assert(message != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.textureId was null.'); + final List args = (message as List?)!; + final int? arg_textureId = (args[0] as int?); + assert(arg_textureId != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.textureId was null, expected non-null int.'); + try { + api.textureId(arg_textureId!); + return wrapResponse(empty: true); + } on PlatformException catch (e) { + return wrapResponse(error: e); + } catch (e) { + return wrapResponse(error: PlatformException(code: 'error', message: e.toString())); + } + }); + } + } + { + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + 'dev.flutter.pigeon.camera_linux.CameraEventApi.error$messageChannelSuffix', pigeonChannelCodec, + binaryMessenger: binaryMessenger); + if (api == null) { + pigeonVar_channel.setMessageHandler(null); + } else { + pigeonVar_channel.setMessageHandler((Object? message) async { + assert(message != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.error was null.'); + final List args = (message as List?)!; + final String? arg_message = (args[0] as String?); + assert(arg_message != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.error was null, expected non-null String.'); + try { + api.error(arg_message!); + return wrapResponse(empty: true); + } on PlatformException catch (e) { + return wrapResponse(error: e); + } catch (e) { + return wrapResponse(error: PlatformException(code: 'error', message: e.toString())); + } + }); + } + } + } +} diff --git a/packages/camera/camera_linux/linux/CMakeLists.txt b/packages/camera/camera_linux/linux/CMakeLists.txt new file mode 100644 index 000000000000..3346180b9ecd --- /dev/null +++ b/packages/camera/camera_linux/linux/CMakeLists.txt @@ -0,0 +1,131 @@ +# The Flutter tooling requires that developers have CMake 3.10 or later +# installed. You should not increase this version, as doing so will cause +# the plugin to fail to compile for some customers of the plugin. +cmake_minimum_required(VERSION 3.10) + +set(CMAKE_CXX_STANDARD 17) + +# Set variables +set(PYLON_VERSION "8.0.2.16314") +set(PYLON_ARCHIVE_NAME "pylon-${PYLON_VERSION}_linux-aarch64.tar.gz") +set(PYLON_ARCHIVE_PATH ${CMAKE_BINARY_DIR}/downloads/${PYLON_ARCHIVE_NAME}) +set(PYLON_DOWNLOAD_URL "https://github.com/LightX-Innovations/flutter_packages/releases/download/camera_linux_v0.1/pylon-${PYLON_VERSION}_linux-aarch64.tar.gz") +set(PYLON_ROOT ${CMAKE_BINARY_DIR}/pylon-sdk) + +set(PYLON_MPEG_ARCHIVE_NAME "pylon-supplementary-package-for-mpeg-4-1.0.2.121_aarch64.tar.gz") +set(PYLON_MPEG_ARCHIVE_PATH ${CMAKE_BINARY_DIR}/downloads/${PYLON_MPEG_ARCHIVE_NAME}) +set(PYLON_MPEG_DOWNLOAD_URL "https://github.com/LightX-Innovations/flutter_packages/releases/download/camera_linux_v0.1/pylon-supplementary-package-for-mpeg-4-1.0.2.121_aarch64.tar.gz") + +# Download the archives if they do not exist +if(NOT EXISTS "${PYLON_ARCHIVE_PATH}") + message(STATUS "Downloading Pylon SDK archive...") + file(DOWNLOAD + "${PYLON_DOWNLOAD_URL}" + "${PYLON_ARCHIVE_PATH}" + SHOW_PROGRESS + STATUS DOWNLOAD_STATUS + ) + list(GET DOWNLOAD_STATUS 0 DOWNLOAD_RESULT) +endif() + +if(NOT EXISTS "${PYLON_MPEG_ARCHIVE_PATH}") + message(STATUS "Downloading Pylon SDK MPEG archive...") + file(DOWNLOAD + "${PYLON_MPEG_DOWNLOAD_URL}" + "${PYLON_MPEG_ARCHIVE_PATH}" + SHOW_PROGRESS + STATUS DOWNLOAD_STATUS + ) + list(GET DOWNLOAD_STATUS 0 DOWNLOAD_RESULT) +endif() + + +if(NOT EXISTS "${PYLON_ROOT}") + message(STATUS "Extracting Pylon SDK to ${PYLON_ROOT}...") + file(MAKE_DIRECTORY "${PYLON_ROOT}") + execute_process( + COMMAND ${CMAKE_COMMAND} -E tar -xzf "${PYLON_ARCHIVE_PATH}" + WORKING_DIRECTORY "${PYLON_ROOT}" + ) + message(STATUS "Extracting Pylon MPEG SDK to ${PYLON_ROOT}...") + execute_process( + COMMAND ${CMAKE_COMMAND} -E tar -xzf "${PYLON_MPEG_ARCHIVE_PATH}" + WORKING_DIRECTORY "${PYLON_ROOT}" + ) +endif() + +include_directories(${PYLON_ROOT}/include) + +link_directories(${PYLON_ROOT}/lib) + +# Project-level configuration. +set(PROJECT_NAME "camera_linux") +project(${PROJECT_NAME} LANGUAGES CXX) + +# This value is used when generating builds using this plugin, so it must +# not be changed. +set(PLUGIN_NAME "camera_linux_plugin") + +# Any new source files that you add to the plugin should be added here. +list(APPEND PLUGIN_SOURCES + "camera_plugin.cpp" + "camera_host_plugin.cpp" + + "camera_video_recorder_image_event_handler.cpp" + "camera.cpp" + "capture_pipeline.cpp" + "fl_lightx_texture_gl.cpp" + + "messages.g.cc" +) + +# Define the plugin library target. Its name must not be changed (see comment +# on PLUGIN_NAME above). +add_library(${PLUGIN_NAME} SHARED + ${PLUGIN_SOURCES} +) + +# Apply a standard set of build settings that are configured in the +# application-level CMakeLists.txt. This can be removed for plugins that want +# full control over build settings. +apply_standard_settings(${PLUGIN_NAME}) + +# Symbols are hidden by default to reduce the chance of accidental conflicts +# between plugins. This should not be removed; any symbols that should be +# exported should be explicitly exported with the FLUTTER_PLUGIN_EXPORT macro. +set_target_properties(${PLUGIN_NAME} PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) + +# Source include directories and library dependencies. Add any plugin-specific +# dependencies here. +target_include_directories(${PLUGIN_NAME} INTERFACE + "${CMAKE_CURRENT_SOURCE_DIR}/include" + "${CMAKE_CURRENT_SOURCE_DIR}" +) +target_link_libraries(${PLUGIN_NAME} PRIVATE flutter) +target_link_libraries(${PLUGIN_NAME} PRIVATE PkgConfig::GTK) +cmake_policy(SET CMP0072 NEW) +set(OpenGL_GL_PREFERENCE GLVND) +find_package( OpenGL REQUIRED ) +target_link_libraries(${PLUGIN_NAME} PRIVATE OpenGL::GL) +target_link_libraries(${PLUGIN_NAME} + PRIVATE + ${PYLON_ROOT}/lib/libpylonbase.so + ${PYLON_ROOT}/lib/libpylonutility.so +) +find_package(PkgConfig REQUIRED) + +# --- OpenCV Integration --- +find_package(OpenCV REQUIRED) +include_directories(${OpenCV_INCLUDE_DIRS}) +target_link_libraries(${PLUGIN_NAME} PRIVATE ${OpenCV_LIBS}) + +# List of absolute paths to libraries that should be bundled with the plugin. +# This list could contain prebuilt libraries, or libraries created by an +# external build triggered from this build file. +set(camera_linux_bundled_libraries + "${PYLON_ROOT}/lib/libpylonbase.so" + "${PYLON_ROOT}/lib/libpylonutility.so" + PARENT_SCOPE +) diff --git a/packages/camera/camera_linux/linux/camera.cpp b/packages/camera/camera_linux/linux/camera.cpp new file mode 100644 index 000000000000..74fb05e30e3c --- /dev/null +++ b/packages/camera/camera_linux/linux/camera.cpp @@ -0,0 +1,411 @@ +#include "camera.h" + +#include +#include + +#include "capture_pipeline.h" + +Camera::Camera(Pylon::IPylonDevice* device, int64_t camera_id, + FlPluginRegistrar* registrar, + CameraLinuxPlatformResolutionPreset resolution_preset) + : camera_id(camera_id), + cameraLinuxCameraEventApi(camera_linux_camera_event_api_new( + fl_plugin_registrar_get_messenger(registrar), + std::to_string(camera_id).c_str())), + exposure_mode(CameraLinuxPlatformExposureMode:: + CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_AUTO), + focus_mode(CameraLinuxPlatformFocusMode:: + CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED), + width(3840), + height(2160), + imageFormatGroup(CameraLinuxPlatformImageFormatGroup:: + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_RGB8), + resolution_preset(resolution_preset), + registrar(registrar) { + camera = std::make_unique(device); + setResolutionPreset(resolution_preset); + if (registrar) g_object_ref(registrar); +} + +Camera::~Camera() { + if (capturePipeline && camera) camera->StopGrabbing(); + if (camera) { + if (camera->IsGrabbing()) camera->StopGrabbing(); + if (camera->IsOpen()) camera->Close(); + } + if (cameraLinuxCameraEventApi) g_object_unref(cameraLinuxCameraEventApi); + if (registrar) g_object_unref(registrar); +} + +void Camera::initialize(CameraLinuxPlatformImageFormatGroup imageFormat) { + imageFormatGroup = imageFormat; + capturePipeline = std::make_unique(*this, registrar); + if (camera->IsOpen()) { + camera->Close(); + } + + camera->Open(); + GenApi::INodeMap& nodemap = camera->GetNodeMap(); + Pylon::CEnumParameter(nodemap, "DeviceLinkThroughputLimitMode") + .TrySetValue("Off"); + Pylon::CBooleanParameter(nodemap, "AcquisitionFrameRateEnable") + .TrySetValue(true); + Pylon::CFloatParameter(nodemap, "AcquisitionFrameRate").TrySetValue(60.0); + Pylon::CFloatParameter(nodemap, "ResultingFrameRate").TrySetValue(60.0); + setImageFormatGroup(imageFormat); + Pylon::CIntegerParameter(nodemap, "Width").TrySetValue(width); + Pylon::CIntegerParameter(nodemap, "Height").TrySetValue(height); + Pylon::CIntegerParameter(nodemap, "OffsetX").TrySetValue(0); + Pylon::CIntegerParameter(nodemap, "OffsetY").TrySetValue(0); + Pylon::CStringParameter(nodemap, "ExposureAuto").TrySetValue("Off"); + Pylon::CBooleanParameter(nodemap, "ReverseY").TrySetValue(true); + Pylon::CBooleanParameter(nodemap, "AutoFunctionROIUseBrightness") + .TrySetValue(false); + Pylon::CBooleanParameter(nodemap, "AutoFunctionROIUseWhiteBalance") + .TrySetValue(false); + Pylon::CEnumParameter(nodemap, "BslDefectPixelCorrectionMode") + .TrySetValue("On"); + + capturePipeline->StartGrabbing(); + emitState(); +} + +void Camera::setImageFormatGroup( + CameraLinuxPlatformImageFormatGroup imageFormatGroup) { + CAMERA_CONFIG_LOCK({ + GenApi::INodeMap& nodemap = camera->GetNodeMap(); + switch (imageFormatGroup) { + case CameraLinuxPlatformImageFormatGroup:: + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_MONO8: + Pylon::CEnumParameter(nodemap, "PixelFormat").SetValue("Mono8"); + break; + case CameraLinuxPlatformImageFormatGroup:: + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_RGB8: + default: + Pylon::CEnumParameter(nodemap, "PixelFormat").SetValue("RGB8"); + break; + } + }); +} + +int64_t Camera::getTextureId() { + if (!capturePipeline) return -1; + return capturePipeline->get_texture_id(); +} + +void Camera::takePicture(std::string filePath) { + CAMERA_CONFIG_LOCK( + Pylon::CGrabResultPtr grabResult; + + if (camera->IsGrabbing()) { camera->StopGrabbing(); } + + if (!camera->GrabOne(Pylon::INFINITE, grabResult, + Pylon::TimeoutHandling_Return)) { + std::cerr << "Failed to grab image within timeout." << std::endl; + return; + } + + if (!grabResult.IsValid() || !grabResult->GrabSucceeded()) { + std::cerr << "Failed to grab image." << std::endl; + return; + }; + Pylon::CPylonImage image; image.AttachGrabResultBuffer(grabResult); + bool isMono = image.GetPixelType() == Pylon::PixelType_Mono8 || + image.GetPixelType() == Pylon::PixelType_Mono12 || + image.GetPixelType() == Pylon::PixelType_Mono16; + + cv::Mat mat(grabResult->GetHeight(), grabResult->GetWidth(), + isMono ? CV_8UC1 : CV_8UC3, (uint8_t*)image.GetBuffer()); + cv::Mat bgr; + cv::cvtColor(mat, bgr, isMono ? cv::COLOR_GRAY2BGR : cv::COLOR_RGB2BGR); + cv::imwrite(filePath, bgr); + + ); +} + +void camera_linux_camera_event_api_initialized_callback(GObject* object, + GAsyncResult* result, + gpointer user_data) {} + +void Camera::emitState() { + if (!cameraLinuxCameraEventApi) return; + CameraLinuxPlatformSize* size = camera_linux_platform_size_new(width, height); + CameraLinuxPlatformCameraState* cameraState = + camera_linux_platform_camera_state_new(size, exposure_mode, focus_mode, + false, false); + camera_linux_camera_event_api_initialized( + cameraLinuxCameraEventApi, cameraState, nullptr, + camera_linux_camera_event_api_initialized_callback, nullptr); + g_object_unref(cameraState); + g_object_unref(size); +} + +void Camera::emitTextureId(int64_t textureId) const { + if (!cameraLinuxCameraEventApi) return; + + camera_linux_camera_event_api_texture_id( + cameraLinuxCameraEventApi, textureId, nullptr, + camera_linux_camera_event_api_initialized_callback, nullptr); +} + +// void Camera::startGrabbing() { +// GenApi::INodeMap& nodemap = camera->GetNodeMap(); +// Pylon::CEnumParameter(nodemap, "TriggerSelector").SetValue("FrameStart"); +// Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("On"); +// Pylon::CEnumParameter(nodemap, "TriggerSource").SetValue("Software"); + +// // Manual grab loop with exposure bracketing +// cameraTextureImageEventHandler->OnImageEventHandlerRegistered(*camera); + +// camera->StartGrabbing(Pylon::GrabStrategy_OneByOne, +// Pylon::EGrabLoop::GrabLoop_ProvidedByUser); + +// std::thread([this]() { +// double shortExposure = 1000.0; // µs - initial value +// // double longExposure = 128000.0; // µs +// // const double gain = 0.6; +// // const double targetBrightness = 120.0; // target average +// // brightness + +// // const double overblownTargetRatio = 0.01; // 3% +// // const double overblownThreshold = 240.0; + +// auto& nodemap = camera->GetNodeMap(); +// // const double minExposure = +// // Pylon::CFloatParameter(nodemap, "ExposureTime").GetMin(); +// // const double maxExposure = +// // Pylon::CFloatParameter(nodemap, "ExposureTime").GetMax(); + +// while (camera->IsGrabbing()) { +// // --- Short exposure --- +// Pylon::CFloatParameter(nodemap, "ExposureTime") +// .TrySetValue(shortExposure); +// camera->WaitForFrameTriggerReady(5000, +// Pylon::TimeoutHandling_ThrowException); +// camera->ExecuteSoftwareTrigger(); + +// Pylon::CGrabResultPtr shortResult; +// camera->RetrieveResult(5000, shortResult, +// Pylon::TimeoutHandling_ThrowException); + +// // if (shortResult && shortResult->GrabSucceeded()) { +// // cameraTextureImageEventHandler->OnImageGrabbed(*camera, +// // shortResult); + +// // // === Adjust short exposure for overblown % === +// // const int width = shortResult->GetWidth(); +// // const int height = shortResult->GetHeight(); +// // const uint8_t* buffer = +// // static_cast(shortResult->GetBuffer()); + +// // const int cx = width / 2; +// // const int cy = height / 2; +// // const int radius = std::min(width, height) / 4; + +// // size_t overblown = 0; +// // size_t total = 0; + +// // for (int y = 0; y < height; ++y) { +// // for (int x = 0; x < width; ++x) { +// // int dx = x - cx; +// // int dy = y - cy; +// // if (dx * dx + dy * dy <= radius * radius) { +// // int index = (y * width + x) * 3; +// // uint8_t r = buffer[index]; +// // uint8_t g = buffer[index + 1]; +// // uint8_t b = buffer[index + 2]; +// // double luminance = 0.299 * r + 0.587 * g + 0.114 * b; + +// // if (luminance >= overblownThreshold) { +// // overblown++; +// // } +// // total++; +// // } +// // } +// // } + +// // if (total > 0) { +// // double ratio = static_cast(overblown) / total; +// // double error = overblownTargetRatio - ratio; + +// // // Adjust short exposure proportionally +// // double proposed = +// // shortExposure * (1.0 + gain * error / +// // overblownTargetRatio); +// // shortExposure = +// // std::max(minExposure, std::min(maxExposure, proposed)); +// // } +// // } + +// // // --- Long exposure --- +// // Pylon::CFloatParameter(nodemap, +// // "ExposureTime").TrySetValue(longExposure); +// // camera->WaitForFrameTriggerReady(5000, +// // Pylon::TimeoutHandling_ThrowException); +// // camera->ExecuteSoftwareTrigger(); + +// // Pylon::CGrabResultPtr longResult; +// // camera->RetrieveResult(5000, longResult, +// // Pylon::TimeoutHandling_ThrowException); +// // if (longResult && longResult->GrabSucceeded()) { +// // cameraTextureImageEventHandler->OnImageGrabbed(*camera, +// // longResult); + +// // // === Adjust long exposure brightness as before === +// // const int width = longResult->GetWidth(); +// // const int height = longResult->GetHeight(); +// // const uint8_t* buffer = +// // static_cast(longResult->GetBuffer()); + +// // const int cx = width / 2; +// // const int cy = height / 2; +// // const int radius = std::min(width, height) / 4; + +// // uint64_t sum = 0; +// // size_t count = 0; + +// // for (int y = 0; y < height; ++y) { +// // for (int x = 0; x < width; ++x) { +// // int dx = x - cx; +// // int dy = y - cy; +// // if (dx * dx + dy * dy <= radius * radius) { +// // int index = (y * width + x) * 3; +// // uint8_t r = buffer[index]; +// // uint8_t g = buffer[index + 1]; +// // uint8_t b = buffer[index + 2]; +// // double luminance = 0.299 * r + 0.587 * g + 0.114 * b; + +// // if (luminance > 10 && luminance < 240) { +// // sum += luminance; +// // count++; +// // } +// // } +// // } +// // } + +// // if (count > 0) { +// // double avgBrightness = static_cast(sum) / count; +// // double error = targetBrightness - avgBrightness; +// // double proposed = +// // longExposure * (1.0 + gain * error / targetBrightness); +// // longExposure = std::max(minExposure, std::min(maxExposure, +// // proposed)); +// // } +// // } +// } +// }).detach(); +// } + +Camera& Camera::setResolutionPreset( + CameraLinuxPlatformResolutionPreset preset) { + switch (preset) { + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_LOW: + width = 352; + height = 288; + break; + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MEDIUM: + width = 640; + height = 480; + break; + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_HIGH: + width = 1280; + height = 720; + break; + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_VERY_HIGH: + width = 1920; + height = 1080; + break; + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_ULTRA_HIGH: + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MAX: + width = 3840; + height = 2160; + break; + default: + width = 1920; + height = 1080; + break; + } + resolution_preset = preset; + return *this; +} + +void Camera::setExposureMode(CameraLinuxPlatformExposureMode mode) { + CAMERA_CONFIG_LOCK({ + GenApi::INodeMap& nodemap = camera->GetNodeMap(); + switch (mode) { + case CameraLinuxPlatformExposureMode:: + CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_AUTO: + Pylon::CEnumParameter(nodemap, "ExposureAuto") + .TrySetValue("Continuous"); + break; + case CameraLinuxPlatformExposureMode:: + CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_LOCKED: + Pylon::CEnumParameter(nodemap, "ExposureAuto").TrySetValue("Off"); + break; + default: + Pylon::CEnumParameter(nodemap, "ExposureAuto") + .TrySetValue("Continuous"); + break; + } + exposure_mode = mode; + emitState(); + }); +} + +void Camera::setFocusMode(CameraLinuxPlatformFocusMode mode) { + CAMERA_CONFIG_LOCK({ + GenApi::INodeMap& nodemap = camera->GetNodeMap(); + switch (mode) { + case CameraLinuxPlatformFocusMode::CAMERA_LINUX_PLATFORM_FOCUS_MODE_AUTO: + Pylon::CEnumParameter(nodemap, "FocusAuto") + .TrySetValue("FocusAuto_Continuous"); + break; + case CameraLinuxPlatformFocusMode:: + CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED: + Pylon::CEnumParameter(nodemap, "FocusAuto") + .TrySetValue("FocusAuto_Off"); + break; + default: + Pylon::CEnumParameter(nodemap, "FocusAuto") + .TrySetValue("FocusAuto_Continuous"); + break; + } + focus_mode = mode; + emitState(); + }); +} + +void Camera::startVideoRecording(std::string filePath) { + if (!camera || !Pylon::CVideoWriter::IsSupported() || + cameraVideoRecorderImageEventHandler) { + std::cerr << "Video recording is not supported or camera is not " + "initialized. or already recording." + << std::endl; + return; + } + CAMERA_CONFIG_LOCK({ + cameraVideoRecorderImageEventHandler = + std::make_unique(filePath); + camera->RegisterImageEventHandler( + cameraVideoRecorderImageEventHandler.get(), + Pylon::RegistrationMode_Append, Pylon::Cleanup_None); + }); +} + +void Camera::stopVideoRecording(std::string& filePath) { + if (!camera || !cameraVideoRecorderImageEventHandler) { + return; + } + CAMERA_CONFIG_LOCK({ + filePath = cameraVideoRecorderImageEventHandler->m_videoFilePath; + camera->DeregisterImageEventHandler( + cameraVideoRecorderImageEventHandler.get()); + cameraVideoRecorderImageEventHandler.reset(); + }); +} \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/camera.h b/packages/camera/camera_linux/linux/camera.h new file mode 100644 index 000000000000..5bdf143e5cb5 --- /dev/null +++ b/packages/camera/camera_linux/linux/camera.h @@ -0,0 +1,95 @@ + +#ifndef CAMERA_H_ +#define CAMERA_H_ + +#include + +#include "camera_video_recorder_image_event_handler.h" +#include "capture_pipeline.h" +#include "flutter_linux/flutter_linux.h" +#include "messages.g.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Woverloaded-virtual" +#pragma clang diagnostic ignored "-Wunused-variable" + +#include + +#pragma clang diagnostic pop + +class Camera { + // Camera + public: + int64_t camera_id; + std::unique_ptr camera; + std::unique_ptr capturePipeline; + CameraLinuxCameraEventApi* cameraLinuxCameraEventApi; + std::unique_ptr + cameraVideoRecorderImageEventHandler; + + Camera(Pylon::IPylonDevice* device, int64_t camera_id, + FlPluginRegistrar* registrar, + CameraLinuxPlatformResolutionPreset resolution_preset); + + Camera(Camera&&) = default; + Camera& operator=(Camera&&) = default; + + ~Camera(); + + void initialize(CameraLinuxPlatformImageFormatGroup imageFormat); + + int64_t getTextureId(); + + void takePicture(std::string filePath); + void startVideoRecording(std::string filePath); + void stopVideoRecording(std::string& filePath); + + void setImageFormatGroup( + CameraLinuxPlatformImageFormatGroup imageFormatGroup); + void setExposureMode(CameraLinuxPlatformExposureMode mode); + void setFocusMode(CameraLinuxPlatformFocusMode mode); + + struct HDRFrame { + std::vector buffer; + int width = 0; + int height = 0; + int exposure = 0; // in microseconds + }; + + // State + public: + CameraLinuxPlatformExposureMode exposure_mode; + CameraLinuxPlatformFocusMode focus_mode; + int width; + int height; + CameraLinuxPlatformImageFormatGroup imageFormatGroup; + + void emitState(); + void emitTextureId(int64_t textureId) const; + + Camera& setResolutionPreset(CameraLinuxPlatformResolutionPreset preset); + + private: + CameraLinuxPlatformResolutionPreset resolution_preset; + FlPluginRegistrar* registrar; +}; + +#define CAMERA_CONFIG_LOCK(code) \ + do { \ + bool wasGrabbing = camera->IsGrabbing(); \ + if (!camera) { \ + std::cerr << "Camera is not initialized." << std::endl; \ + return; \ + } \ + if (wasGrabbing) { \ + capturePipeline->StopGrabbing(); \ + capturePipeline.reset(); \ + } \ + {code}; \ + if (wasGrabbing) { \ + capturePipeline = std::make_unique(*this, registrar); \ + capturePipeline->StartGrabbing(); \ + } \ + } while (0) + +#endif // CAMERA_H_ diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.cpp b/packages/camera/camera_linux/linux/camera_host_plugin.cpp new file mode 100644 index 000000000000..0fcb98dfc145 --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_host_plugin.cpp @@ -0,0 +1,201 @@ +#include "camera_host_plugin.h" + +std::vector CameraHostPlugin::cameras = {}; +FlPluginRegistrar* CameraHostPlugin::registrar = nullptr; + +CameraHostPlugin::CameraHostPlugin(FlPluginRegistrar* registrar) + : m_registrar(FL_PLUGIN_REGISTRAR(g_object_ref(registrar))) { + CameraHostPlugin::registrar = m_registrar; + static CameraLinuxCameraApiVTable api_vtable = { + .get_available_cameras_names = get_available_cameras_names, + .create = create, + .initialize = initialize, + .get_texture_id = get_texture_id, + .dispose = dispose, + .take_picture = take_picture, + .start_video_recording = start_video_recording, + .stop_video_recording = stop_video_recording, + .set_exposure_mode = set_exposure_mode, + .set_focus_mode = set_focus_mode, + .set_image_format_group = set_image_format_group, + }; + + camera_linux_camera_api_set_method_handlers( + fl_plugin_registrar_get_messenger(registrar), nullptr, &api_vtable, this, + nullptr); + Pylon::PylonInitialize(); +} + +CameraHostPlugin::~CameraHostPlugin() { + cameras.clear(); + g_object_unref(m_registrar); + Pylon::PylonTerminate(); +} + +inline Camera& CameraHostPlugin::get_camera_by_id(int64_t camera_id) { + for (size_t i = 0; i < cameras.size(); ++i) { + if (cameras[i].camera_id == camera_id) { + return cameras[i]; + } + } + throw std::runtime_error("Camera not found"); +} + +void CameraHostPlugin::get_available_cameras_names( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(get_available_cameras_names, { + Pylon::CTlFactory& TlFactory = Pylon::CTlFactory::GetInstance(); + Pylon::DeviceInfoList_t lstDevices; + TlFactory.EnumerateDevices(lstDevices); + FlValue* list = fl_value_new_list(); + + if (!lstDevices.empty()) { + for (auto&& it = lstDevices.begin(); it != lstDevices.end(); ++it) { + fl_value_append_take(list, fl_value_new_string(it->GetFriendlyName())); + } + } + + CAMERA_HOST_RETURN(list); + }); +} + +void CameraHostPlugin::create( + const gchar* camera_name, + CameraLinuxPlatformResolutionPreset resolution_preset, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(create, { + Pylon::CTlFactory& TlFactory = Pylon::CTlFactory::GetInstance(); + Pylon::DeviceInfoList_t lstDevices; + TlFactory.EnumerateDevices(lstDevices); + + for (auto&& it = lstDevices.begin(); it != lstDevices.end(); ++it) { + if (it->GetFriendlyName() == camera_name) { + std::string serialNumber = it->GetSerialNumber().c_str(); + int64_t camera_id = std::stoll(serialNumber); + for (auto&& camera_it = cameras.begin(); camera_it != cameras.end(); + ++camera_it) { + if (camera_it->camera_id == camera_id) { + cameras.erase(camera_it); + break; + } + } + cameras.emplace_back(TlFactory.CreateDevice(*it), camera_id, registrar, + resolution_preset); + + CAMERA_HOST_RETURN(camera_id); + return; + } + } + + CAMERA_HOST_RAISE_ERROR("Camera not found"); + }); +} + +void CameraHostPlugin::dispose( + int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(dispose, { + for (auto&& camera_it = cameras.begin(); camera_it != cameras.end(); + ++camera_it) { + if (camera_it->camera_id == camera_id) { + cameras.erase(camera_it); + } + } + CAMERA_HOST_VOID_RETURN(); + }); +} + +void CameraHostPlugin::camera_linux_camera_event_api_initialized_callback( + GObject* object, GAsyncResult* result, gpointer user_data) {} + +void CameraHostPlugin::set_image_format_group( + int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format_group, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(set_image_format_group, { + Camera& camera = get_camera_by_id(camera_id); + camera.setImageFormatGroup(image_format_group); + CAMERA_HOST_VOID_RETURN(); + }); +} + +void CameraHostPlugin::initialize( + int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(initialize, { + Camera& camera = get_camera_by_id(camera_id); + camera.initialize(image_format); + CAMERA_HOST_VOID_RETURN(); + }); +} + +void CameraHostPlugin::get_texture_id( + int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(get_texture_id, { + Camera& camera = get_camera_by_id(camera_id); + int64_t texture_id = camera.getTextureId(); + if (texture_id == -1) { + CAMERA_HOST_RAISE_ERROR("Texture not created"); + } + CAMERA_HOST_RETURN(&texture_id); + }); +} + +void CameraHostPlugin::set_exposure_mode( + int64_t camera_id, CameraLinuxPlatformExposureMode mode, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(set_exposure_mode, { + Camera& camera = get_camera_by_id(camera_id); + camera.setExposureMode(mode); + + CAMERA_HOST_VOID_RETURN(); + }); +} + +void CameraHostPlugin::set_focus_mode( + int64_t camera_id, CameraLinuxPlatformFocusMode mode, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(set_focus_mode, { + Camera& camera = get_camera_by_id(camera_id); + camera.setFocusMode(mode); + + CAMERA_HOST_VOID_RETURN(); + }); +} + +void CameraHostPlugin::take_picture( + int64_t camera_id, const gchar* path, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(take_picture, { + Camera& camera = get_camera_by_id(camera_id); + + camera.takePicture(std::string(path)); + CAMERA_HOST_VOID_RETURN(); + }); +} + +void CameraHostPlugin::start_video_recording( + int64_t camera_id, const gchar* path, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(start_video_recording, { + Camera& camera = get_camera_by_id(camera_id); + + camera.startVideoRecording(std::string(path)); + CAMERA_HOST_VOID_RETURN(); + }); +} + +void CameraHostPlugin::stop_video_recording( + int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(stop_video_recording, { + Camera& camera = get_camera_by_id(camera_id); + + std::string path; + camera.stopVideoRecording(path); + if (path.empty()) { + CAMERA_HOST_RAISE_ERROR("Video recording not started"); + } + CAMERA_HOST_RETURN(path.c_str()); + }); +} \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.h b/packages/camera/camera_linux/linux/camera_host_plugin.h new file mode 100644 index 000000000000..0f6294cd7ff2 --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_host_plugin.h @@ -0,0 +1,111 @@ + +#ifndef CAMERA_HOST_PLUGIN_PRIVATE_H_ +#define CAMERA_HOST_PLUGIN_PRIVATE_H_ + +#include + +#include "flutter_linux/flutter_linux.h" +#include "messages.g.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Woverloaded-virtual" +#pragma clang diagnostic ignored "-Wunused-variable" + +#include + +#include "camera.h" + +#pragma clang diagnostic pop + +#define CAMERA_HOST_ERROR_HANDLING(method_name, code) \ + try { \ + [[maybe_unused]] auto camera_linux_camera_api_respond_macro = \ + &camera_linux_camera_api_respond_##method_name; \ + [[maybe_unused]] auto camera_linux_camera_api_respond_error_macro = \ + &camera_linux_camera_api_respond_error_##method_name; \ + code \ + } catch (const Pylon::GenericException& e) { \ + std::cerr << e.GetDescription() << std::endl; \ + camera_linux_camera_api_respond_error_##method_name( \ + response_handle, nullptr, e.GetDescription(), nullptr); \ + } catch (const std::exception& e) { \ + camera_linux_camera_api_respond_error_##method_name( \ + response_handle, nullptr, e.what(), nullptr); \ + std::cerr << "Exception occurred: " << e.what() << std::endl; \ + } catch (...) { \ + camera_linux_camera_api_respond_error_##method_name( \ + response_handle, nullptr, "CameraLinuxPlugin Unknown error", nullptr); \ + } + +#define CAMERA_HOST_RETURN(...) \ + camera_linux_camera_api_respond_macro(response_handle, __VA_ARGS__) + +#define CAMERA_HOST_VOID_RETURN() \ + camera_linux_camera_api_respond_macro(response_handle) + +#define CAMERA_HOST_RAISE_ERROR(description) \ + camera_linux_camera_api_respond_error_macro(response_handle, nullptr, \ + #description, nullptr) + +class CameraHostPlugin { + static FlPluginRegistrar* registrar; + FlPluginRegistrar* m_registrar; + static std::vector cameras; + + public: + CameraHostPlugin(FlPluginRegistrar* registrar); + + ~CameraHostPlugin(); + + inline static Camera& get_camera_by_id(int64_t camera_id); + + static void get_available_cameras_names( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + + static void create(const gchar* camera_name, + CameraLinuxPlatformResolutionPreset resolution_preset, + CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data); + + static void initialize(int64_t camera_id, + CameraLinuxPlatformImageFormatGroup image_format, + CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data); + + static void dispose(int64_t camera_id, + CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data); + + static void get_texture_id( + int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data); + + static void take_picture(int64_t camera_id, const gchar* path, + CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data); + + static void start_video_recording( + int64_t camera_id, const gchar* path, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + + static void stop_video_recording( + int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data); + + static void set_exposure_mode( + int64_t camera_id, CameraLinuxPlatformExposureMode mode, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + + static void set_focus_mode( + int64_t camera_id, CameraLinuxPlatformFocusMode mode, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + + static void camera_linux_camera_event_api_initialized_callback( + GObject* object, GAsyncResult* result, gpointer user_data); + + static void set_image_format_group( + int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format_group, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); +}; + +#endif // CAMERA_HOST_PLUGIN_PRIVATE_H_ diff --git a/packages/camera/camera_linux/linux/camera_plugin.cpp b/packages/camera/camera_linux/linux/camera_plugin.cpp new file mode 100644 index 000000000000..b486905c789e --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_plugin.cpp @@ -0,0 +1,8 @@ +#include "include/camera_linux/camera_plugin.h" + +#include "camera_host_plugin.h" + +void camera_plugin_register_with_registrar(FlPluginRegistrar* registrar) { + CameraHostPlugin* camera_host_plugin = new CameraHostPlugin(registrar); + g_object_unref(camera_host_plugin); +} diff --git a/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.cpp b/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.cpp new file mode 100644 index 000000000000..5fbf1316568e --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.cpp @@ -0,0 +1,40 @@ +#include "camera_video_recorder_image_event_handler.h" + +CameraVideoRecorderImageEventHandler::CameraVideoRecorderImageEventHandler( + std::string videoFilePath) + : m_videoFilePath(std::move(videoFilePath)) {} + +void CameraVideoRecorderImageEventHandler::OnImageGrabbed( + Pylon::CInstantCamera& camera, const Pylon::CGrabResultPtr& ptr) { + if (!ptr->GrabSucceeded()) { + std::cerr << "Error: Grab failed or texture not ready." << std::endl; + return; + } + + static bool isFirstFrame = true; + if (isFirstFrame) { + m_videoWriter.SetParameter(ptr->GetWidth(), ptr->GetHeight(), + ptr->GetPixelType(), + CAMERA_VIDEO_RECORDER_PLAY_BACK_FRAME_RATE, + CAMERA_VIDEO_RECORDER_QUALITY); + m_videoWriter.Open(m_videoFilePath.c_str()); + isFirstFrame = false; + } + + Pylon::CPylonImage image; + image.AttachGrabResultBuffer(ptr); + m_videoWriter.Add(image); +} + +void CameraVideoRecorderImageEventHandler::OnImageEventHandlerDeregistered( + Pylon::CInstantCamera& camera) { + if (m_videoWriter.IsOpen()) { + m_videoWriter.Close(); + } +} + +CameraVideoRecorderImageEventHandler::~CameraVideoRecorderImageEventHandler() { + if (m_videoWriter.IsOpen()) { + m_videoWriter.Close(); + } +} diff --git a/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.h b/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.h new file mode 100644 index 000000000000..de95fb51d73f --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.h @@ -0,0 +1,34 @@ + +#ifndef CAMERA_VIDEO_RECORDER_IMAGE_EVENT_HANDLER_H_ +#define CAMERA_VIDEO_RECORDER_IMAGE_EVENT_HANDLER_H_ + +#include "flutter_linux/flutter_linux.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Woverloaded-virtual" +#pragma clang diagnostic ignored "-Wunused-variable" + +#include + +#pragma clang diagnostic pop + +#define CAMERA_VIDEO_RECORDER_PLAY_BACK_FRAME_RATE 60.0 +#define CAMERA_VIDEO_RECORDER_QUALITY 100 + +class CameraVideoRecorderImageEventHandler : public Pylon::CImageEventHandler { + Pylon::CVideoWriter m_videoWriter; + + public: + std::string m_videoFilePath; + + CameraVideoRecorderImageEventHandler(std::string videoFilePath); + + ~CameraVideoRecorderImageEventHandler() override; + + void OnImageGrabbed(Pylon::CInstantCamera& camera, + const Pylon::CGrabResultPtr& ptr) override; + + void OnImageEventHandlerDeregistered(Pylon::CInstantCamera& camera) override; +}; + +#endif // CAMERA_VIDEO_RECORDER_IMAGE_EVENT_HANDLER_H_ \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/capture_pipeline.cpp b/packages/camera/camera_linux/linux/capture_pipeline.cpp new file mode 100644 index 000000000000..23d09bcaea1c --- /dev/null +++ b/packages/camera/camera_linux/linux/capture_pipeline.cpp @@ -0,0 +1,579 @@ + +#include "capture_pipeline.h" + +#include + +#include + +#include "camera.h" + +CapturePipeline::CapturePipeline(const Camera& camera, + FlPluginRegistrar* registrar) + : camera{camera}, + m_fl_registrar(registrar), + m_fl_texture_registrar( + fl_plugin_registrar_get_texture_registrar(registrar)) {} + +CapturePipeline::~CapturePipeline() { + if (m_fl_texture) { + glDeleteTextures(1, &m_fl_texture_name); + fl_texture_registrar_unregister_texture(m_fl_texture_registrar, + FL_TEXTURE(m_fl_texture)); + g_object_unref(m_fl_texture); + } +} + +// GenApi::INodeMap& nodemap = camera->GetNodeMap(); +// Pylon::CEnumParameter(nodemap, "TriggerSelector").SetValue("FrameStart"); +// Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("On"); +// Pylon::CEnumParameter(nodemap, "TriggerSource").SetValue("Software"); + +// // Manual grab loop with exposure bracketing +// cameraTextureImageEventHandler->OnImageEventHandlerRegistered(*camera); + +// camera->StartGrabbing(Pylon::GrabStrategy_OneByOne, +// Pylon::EGrabLoop::GrabLoop_ProvidedByUser); + +// std::thread([this]() { +// double shortExposure = 1000.0; // µs - initial value +// // double longExposure = 128000.0; // µs +// // const double gain = 0.6; +// // const double targetBrightness = 120.0; // target average +// // brightness + +// // const double overblownTargetRatio = 0.01; // 3% +// // const double overblownThreshold = 240.0; + +// auto& nodemap = camera->GetNodeMap(); +// // const double minExposure = +// // Pylon::CFloatParameter(nodemap, "ExposureTime").GetMin(); +// // const double maxExposure = +// // Pylon::CFloatParameter(nodemap, "ExposureTime").GetMax(); + +// while (camera->IsGrabbing()) { +// // --- Short exposure --- +// Pylon::CFloatParameter(nodemap, "ExposureTime") +// .TrySetValue(shortExposure); +// camera->WaitForFrameTriggerReady(5000, +// Pylon::TimeoutHandling_ThrowException); +// camera->ExecuteSoftwareTrigger(); + +// Pylon::CGrabResultPtr shortResult; +// camera->RetrieveResult(5000, shortResult, +// Pylon::TimeoutHandling_ThrowException); + +// // if (shortResult && shortResult->GrabSucceeded()) { +// // cameraTextureImageEventHandler->OnImageGrabbed(*camera, +// // shortResult); + +// // // === Adjust short exposure for overblown % === +// // const int width = shortResult->GetWidth(); +// // const int height = shortResult->GetHeight(); +// // const uint8_t* buffer = +// // static_cast(shortResult->GetBuffer()); + +// // const int cx = width / 2; +// // const int cy = height / 2; +// // const int radius = std::min(width, height) / 4; + +// // size_t overblown = 0; +// // size_t total = 0; + +// // for (int y = 0; y < height; ++y) { +// // for (int x = 0; x < width; ++x) { +// // int dx = x - cx; +// // int dy = y - cy; +// // if (dx * dx + dy * dy <= radius * radius) { +// // int index = (y * width + x) * 3; +// // uint8_t r = buffer[index]; +// // uint8_t g = buffer[index + 1]; +// // uint8_t b = buffer[index + 2]; +// // double luminance = 0.299 * r + 0.587 * g + 0.114 * b; + +// // if (luminance >= overblownThreshold) { +// // overblown++; +// // } +// // total++; +// // } +// // } +// // } + +// // if (total > 0) { +// // double ratio = static_cast(overblown) / total; +// // double error = overblownTargetRatio - ratio; + +// // // Adjust short exposure proportionally +// // double proposed = +// // shortExposure * (1.0 + gain * error / +// // overblownTargetRatio); +// // shortExposure = +// // std::max(minExposure, std::min(maxExposure, proposed)); +// // } +// // } + +// // // --- Long exposure --- +// // Pylon::CFloatParameter(nodemap, +// // "ExposureTime").TrySetValue(longExposure); +// // camera->WaitForFrameTriggerReady(5000, +// // Pylon::TimeoutHandling_ThrowException); +// // camera->ExecuteSoftwareTrigger(); + +// // Pylon::CGrabResultPtr longResult; +// // camera->RetrieveResult(5000, longResult, +// // Pylon::TimeoutHandling_ThrowException); +// // if (longResult && longResult->GrabSucceeded()) { +// // cameraTextureImageEventHandler->OnImageGrabbed(*camera, +// // longResult); + +// // // === Adjust long exposure brightness as before === +// // const int width = longResult->GetWidth(); +// // const int height = longResult->GetHeight(); +// // const uint8_t* buffer = +// // static_cast(longResult->GetBuffer()); + +// // const int cx = width / 2; +// // const int cy = height / 2; +// // const int radius = std::min(width, height) / 4; + +// // uint64_t sum = 0; +// // size_t count = 0; + +// // for (int y = 0; y < height; ++y) { +// // for (int x = 0; x < width; ++x) { +// // int dx = x - cx; +// // int dy = y - cy; +// // if (dx * dx + dy * dy <= radius * radius) { +// // int index = (y * width + x) * 3; +// // uint8_t r = buffer[index]; +// // uint8_t g = buffer[index + 1]; +// // uint8_t b = buffer[index + 2]; +// // double luminance = 0.299 * r + 0.587 * g + 0.114 * b; + +// // if (luminance > 10 && luminance < 240) { +// // sum += luminance; +// // count++; +// // } +// // } +// // } +// // } + +// // if (count > 0) { +// // double avgBrightness = static_cast(sum) / count; +// // double error = targetBrightness - avgBrightness; +// // double proposed = +// // longExposure * (1.0 + gain * error / targetBrightness); +// // longExposure = std::max(minExposure, std::min(maxExposure, +// // proposed)); +// // } +// // } +// } +// }).detach(); +// } + +void CapturePipeline::StartGrabbing() { + if (!camera.camera) { + std::cerr << "Camera is not initialized." << std::endl; + return; + } + GenApi::INodeMap& nodemap = camera.camera->GetNodeMap(); + Pylon::CEnumParameter(nodemap, "TriggerSelector").SetValue("FrameStart"); + Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("On"); + Pylon::CEnumParameter(nodemap, "TriggerSource").SetValue("Software"); + + camera.camera->StartGrabbing(Pylon::GrabStrategy_OneByOne, + Pylon::EGrabLoop::GrabLoop_ProvidedByUser); + + std::cout << "Starting camera grabbing..." << std::endl; + + std::thread([this]() { + GLInit(); + notifyTextureReady(); + + std::vector exposureLevels = {2000.0, 16000.0}; + size_t exposureIndex = 0; + GenApi::INodeMap& nodemap = camera.camera->GetNodeMap(); + + while (camera.camera->IsGrabbing()) { + // Set new exposure + double exposure = exposureLevels[exposureIndex]; + exposureIndex = (exposureIndex + 1) % exposureLevels.size(); + Pylon::CFloatParameter(nodemap, "ExposureTime").TrySetValue(exposure); + std::cout << "Set exposure to: " << exposure << "us" << std::endl; + + camera.camera->WaitForFrameTriggerReady(5000, + Pylon::TimeoutHandling_Return); + camera.camera->ExecuteSoftwareTrigger(); + Pylon::CGrabResultPtr grabResult; + if (!camera.camera->RetrieveResult(5000, grabResult, + Pylon::TimeoutHandling_Return)) { + continue; + } + std::cout << "image grabbed" << std::endl; + + if (!grabResult->GrabSucceeded()) { + std::cerr << "Error grabbing image: " + << grabResult->GetErrorDescription() << std::endl; + continue; + } + OnImageGrabbed(grabResult); + std::cout << "finish processing frame" << std::endl; + } + }).detach(); +} + +void CapturePipeline::notifyTextureReady() { + // Pass 'this' pointer to main thread callback + g_idle_add( + [](void* data) -> gboolean { + CapturePipeline* self = static_cast(data); + std::cout << "Texture is ready" << std::endl; + self->camera.emitTextureId(self->get_texture_id()); + return G_SOURCE_REMOVE; // remove source after running once + }, + this); +} + +void CapturePipeline::GLInit() { + FlView* fl_view = FL_VIEW(fl_plugin_registrar_get_view(m_fl_registrar)); + GdkWindow* window = gtk_widget_get_parent_window(GTK_WIDGET(fl_view)); + m_gl_context = gdk_window_create_gl_context(window, NULL); + gdk_gl_context_make_current(m_gl_context); + std::cout << "[DEBUG] Created and made current GL context." << std::endl; + + const int width = camera.width; + const int height = camera.height; + std::cout << "[DEBUG] Camera resolution: " << width << "x" << height + << std::endl; + + // 1. Create PBO ring buffer + m_ring_buffer_index = 0; + glGenBuffers(RING_BUFFER_SIZE, m_pbo_ring_buffer); + for (size_t i = 0; i < RING_BUFFER_SIZE; ++i) { + glBindBuffer(GL_PIXEL_PACK_BUFFER, m_pbo_ring_buffer[i]); + glBufferData(GL_PIXEL_PACK_BUFFER, width * height * 3, nullptr, + GL_STREAM_READ); + std::cout << "[DEBUG] Created PBO buffer ID: " << m_pbo_ring_buffer[i] + << std::endl; + } + glBindBuffer(GL_PIXEL_PACK_BUFFER, 0); + + glGenTextures(RING_BUFFER_SIZE, m_exposure_textures); + for (int i = 0; i < RING_BUFFER_SIZE; ++i) { + glBindTexture(GL_TEXTURE_2D, m_exposure_textures[i]); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + } + glBindTexture(GL_TEXTURE_2D, 0); + + // 2. Create Motion Mask Texture + // glGenTextures(1, &m_motion_mask_texture); + // glBindTexture(GL_TEXTURE_2D, m_motion_mask_texture); + // glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, width, height, 0, GL_RED, + // GL_UNSIGNED_BYTE, nullptr); + // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + // std::cout << "[DEBUG] Created motion mask texture ID: " + // << m_motion_mask_texture << std::endl; + + // 3. Create HDR Fusion Shader Program + m_hdr_fusion_shader_program = createHDRShaderProgram(); + std::cout << "[DEBUG] Created HDR fusion shader program ID: " + << m_hdr_fusion_shader_program << std::endl; + + float quadVertices[] = { + // pos // tex + -1.0f, -1.0f, 0.0f, 0.0f, 1.0f, -1.0f, 1.0f, 0.0f, + -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, + }; + + glGenVertexArrays(1, &m_hdr_fusion_vao); + glGenFramebuffers(1, &m_hdr_fusion_fbo); + glBindVertexArray(m_hdr_fusion_vao); + glBindBuffer(GL_ARRAY_BUFFER, m_hdr_fusion_vbo); + glBufferData(GL_ARRAY_BUFFER, sizeof(quadVertices), quadVertices, + GL_STATIC_DRAW); + + glEnableVertexAttribArray(0); + glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (void*)0); + glEnableVertexAttribArray(1); + glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), + (void*)(2 * sizeof(float))); + + glBindVertexArray(0); + std::cout << "[DEBUG] Created HDR fusion VAO: " << m_hdr_fusion_vao + << ", VBO: " << m_hdr_fusion_vbo << std::endl; + + // 4. Create Tone Mapping Shader Program + // TODO: Add debug print here when implemented + + // 5. Create Mono Shader Program + // m_mono_shader_program = createMonoShaderProgram(); + // std::cout << "[DEBUG] Created Mono shader program ID: " + // << m_mono_shader_program << std::endl; + + // float quadVerticesMono[] = { + // // pos // tex + // -1.0f, -1.0f, 0.0f, 0.0f, 1.0f, -1.0f, 1.0f, 0.0f, + // -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, + // }; + + // glGenVertexArrays(1, &m_mono_vao); + // glGenBuffers(1, &m_mono_vbo); + // glBindVertexArray(m_mono_vao); + // glBindBuffer(GL_ARRAY_BUFFER, m_mono_vbo); + // glBufferData(GL_ARRAY_BUFFER, sizeof(quadVerticesMono), quadVerticesMono, + // GL_STATIC_DRAW); + + // glEnableVertexAttribArray(0); + // glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), + // (void*)0); glEnableVertexAttribArray(1); glVertexAttribPointer(1, 2, + // GL_FLOAT, GL_FALSE, 4 * sizeof(float), + // (void*)(2 * sizeof(float))); + + // glBindVertexArray(0); + // std::cout << "[DEBUG] Created Mono VAO: " << m_mono_vao + // << ", VBO: " << m_mono_vbo << std::endl; + + // 6. Create Output Texture + glGenTextures(1, &m_output_texture); + glBindTexture(GL_TEXTURE_2D, m_output_texture); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB, + GL_UNSIGNED_BYTE, nullptr); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + std::cout << "[DEBUG] Created output texture ID: " << m_output_texture + << std::endl; + + // 7. Wrap output texture for Flutter + m_fl_texture = + fl_lightx_texture_gl_new(GL_TEXTURE_2D, m_output_texture, width, height); + m_fl_texture_name = m_output_texture; + fl_texture_registrar_register_texture(m_fl_texture_registrar, + FL_TEXTURE(m_fl_texture)); + fl_texture_registrar_mark_texture_frame_available(m_fl_texture_registrar, + FL_TEXTURE(m_fl_texture)); + std::cout << "[DEBUG] Registered and marked Flutter texture frame available " + "for texture ID: " + << m_output_texture << std::endl; +} + +void CapturePipeline::StopGrabbing() {} +void CapturePipeline::OnImageGrabbed(const Pylon::CGrabResultPtr& grabResult) { + if (!grabResult || !grabResult->GrabSucceeded()) { + std::cerr << "[DEBUG] Error grabbing image: " + << (grabResult ? grabResult->GetErrorDescription() : "No result") + << std::endl; + return; + } + + const int width = grabResult->GetWidth(); + const int height = grabResult->GetHeight(); + const uint8_t* data = static_cast(grabResult->GetBuffer()); + if (!data) { + std::cerr << "[DEBUG] No image data available." << std::endl; + return; + } + + gdk_gl_context_make_current(m_gl_context); + + const int bufferIndex = m_ring_buffer_index; + const int nextIndex = (m_ring_buffer_index + 1) % RING_BUFFER_SIZE; + + GLuint pbo = m_pbo_ring_buffer[bufferIndex]; + GLuint texture = m_exposure_textures[bufferIndex]; + + glBindBuffer(GL_PIXEL_UNPACK_BUFFER, pbo); + glBufferData(GL_PIXEL_UNPACK_BUFFER, width * height * 3, nullptr, + GL_STREAM_DRAW); + + void* ptr = glMapBufferRange(GL_PIXEL_UNPACK_BUFFER, 0, width * height * 3, + GL_MAP_WRITE_BIT | GL_MAP_INVALIDATE_BUFFER_BIT); + if (ptr) { + std::memcpy(ptr, data, width * height * 3); + glUnmapBuffer(GL_PIXEL_UNPACK_BUFFER); + } else { + std::cerr << "[ERROR] Failed to map PBO" << std::endl; + } + + // Upload from PBO to texture (allocated only once elsewhere) + glBindTexture(GL_TEXTURE_2D, texture); + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RGB, + GL_UNSIGNED_BYTE, nullptr); + glBindTexture(GL_TEXTURE_2D, 0); + glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0); + + std::cout << "[DEBUG] Uploaded image data to texture index " << bufferIndex + << std::endl; + + m_ring_buffer_index = nextIndex; + + // --- HDR Shader Pass --- + glBindFramebuffer(GL_FRAMEBUFFER, m_hdr_fusion_fbo); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, + m_output_texture, 0); + + if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { + std::cerr << "[ERROR] Framebuffer not complete." << std::endl; + glBindFramebuffer(GL_FRAMEBUFFER, 0); + return; + } + + glViewport(0, 0, width, height); + glUseProgram(m_hdr_fusion_shader_program); + + const char* uniformNames[] = {"texLow", "texMidLow"}; + for (int i = 0; i < 2; ++i) { + glActiveTexture(GL_TEXTURE0 + i); + glBindTexture(GL_TEXTURE_2D, m_exposure_textures[i]); + GLint loc = + glGetUniformLocation(m_hdr_fusion_shader_program, uniformNames[i]); + glUniform1i(loc, i); + } + + glBindVertexArray(m_hdr_fusion_vao); + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + // Cleanup + glBindVertexArray(0); + for (int i = 0; i < 2; ++i) { + glActiveTexture(GL_TEXTURE0 + i); + glBindTexture(GL_TEXTURE_2D, 0); + } + glBindFramebuffer(GL_FRAMEBUFFER, 0); + glUseProgram(0); + + // Notify Flutter + glFlush(); + fl_texture_registrar_mark_texture_frame_available(m_fl_texture_registrar, + FL_TEXTURE(m_fl_texture)); +} + +int64_t CapturePipeline::get_texture_id() { + if (!m_fl_texture) { + std::cerr << "Texture is null" << std::endl; + return -1; + } + return fl_texture_get_id(FL_TEXTURE(m_fl_texture)); +} + +void CapturePipeline::OnNewFrame() {} + +GLuint CapturePipeline::compileShader(GLenum type, const char* src) { + GLuint shader = glCreateShader(type); + glShaderSource(shader, 1, &src, nullptr); + glCompileShader(shader); + GLint success; + glGetShaderiv(shader, GL_COMPILE_STATUS, &success); + if (!success) { + char log[512]; + glGetShaderInfoLog(shader, 512, nullptr, log); + std::cerr << "Shader compile error: " << log << std::endl; + } + return shader; +} + +GLuint CapturePipeline::createMonoShaderProgram() { + const char* vertexSrc = R"( + #version 300 es + precision mediump float; + layout (location = 0) in vec2 position; + layout (location = 1) in vec2 texCoord; + out vec2 TexCoords; + void main() { + TexCoords = texCoord; + gl_Position = vec4(position, 0.0, 1.0); + } + )"; + + const char* fragmentSrc = R"( + #version 300 es + precision mediump float; + in vec2 TexCoords; + out vec4 FragColor; + uniform sampler2D monoTexture; + void main() { + float gray = texture(monoTexture, TexCoords).r; + FragColor = vec4(gray, gray, gray, 1.0); // convert mono to RGB + } + )"; + + GLuint vs = compileShader(GL_VERTEX_SHADER, vertexSrc); + GLuint fs = compileShader(GL_FRAGMENT_SHADER, fragmentSrc); + + GLuint program = glCreateProgram(); + glAttachShader(program, vs); + glAttachShader(program, fs); + glLinkProgram(program); + + GLint success; + glGetProgramiv(program, GL_LINK_STATUS, &success); + if (!success) { + char log[512]; + glGetProgramInfoLog(program, 512, nullptr, log); + std::cerr << "Shader program link error: " << log << std::endl; + } + + glDeleteShader(vs); + glDeleteShader(fs); + + return program; +} + +GLuint CapturePipeline::createHDRShaderProgram() { + const char* vertexSrc = R"( + #version 300 es + precision mediump float; + layout (location = 0) in vec2 position; + layout (location = 1) in vec2 texCoord; + out vec2 TexCoords; + void main() { + TexCoords = texCoord; + gl_Position = vec4(position, 0.0, 1.0); + } + )"; + + const char* fragmentSrc = R"( + #version 300 es + precision mediump float; + in vec2 TexCoords; + out vec4 FragColor; + + uniform sampler2D texLow; + uniform sampler2D texMidLow; + + void main() { + vec3 colorLow = texture(texLow, TexCoords).rgb; + vec3 colorMidLow = texture(texMidLow, TexCoords).rgb; + + // Simple exposure fusion strategy: weighted average (weights can be adjusted) + float w1 = 0.2; + float w2 = 0.8; + + vec3 hdr = (colorLow * w1 + colorMidLow * w2) / (w1 + w2); + FragColor = vec4(hdr, 1.0); + } + )"; + + GLuint vs = compileShader(GL_VERTEX_SHADER, vertexSrc); + GLuint fs = compileShader(GL_FRAGMENT_SHADER, fragmentSrc); + + GLuint program = glCreateProgram(); + glAttachShader(program, vs); + glAttachShader(program, fs); + glLinkProgram(program); + + GLint success; + glGetProgramiv(program, GL_LINK_STATUS, &success); + if (!success) { + char log[512]; + glGetProgramInfoLog(program, 512, nullptr, log); + std::cerr << "Shader program link error: " << log << std::endl; + } + + glDeleteShader(vs); + glDeleteShader(fs); + + return program; +} diff --git a/packages/camera/camera_linux/linux/capture_pipeline.h b/packages/camera/camera_linux/linux/capture_pipeline.h new file mode 100644 index 000000000000..07fcef61d642 --- /dev/null +++ b/packages/camera/camera_linux/linux/capture_pipeline.h @@ -0,0 +1,82 @@ + +#ifndef CAPTURE_PIPELINE_H_ +#define CAPTURE_PIPELINE_H_ + +#include + +#include + +#include "fl_lightx_texture_gl.h" +#include "flutter_linux/flutter_linux.h" +#include "messages.g.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Woverloaded-virtual" +#pragma clang diagnostic ignored "-Wunused-variable" + +#include + +#pragma clang diagnostic pop + +#include +#include + +#define RING_BUFFER_SIZE 2 + +class Camera; + +class CapturePipeline { + public: + CapturePipeline(const Camera& camera, FlPluginRegistrar* registrar); + ~CapturePipeline(); + + void StartGrabbing(); + void StopGrabbing(); + + int64_t get_texture_id(); + + private: + const Camera& camera; + + // FL Texture + FlLightxTextureGL* m_fl_texture; + unsigned int m_fl_texture_name; + FlPluginRegistrar* m_fl_registrar; + FlTextureRegistrar* m_fl_texture_registrar; + GdkGLContext* m_gl_context; + + // OpenGL resources + GLuint m_pbo_ring_buffer[RING_BUFFER_SIZE]; + GLuint m_exposure_textures[RING_BUFFER_SIZE] = {0}; + size_t m_ring_buffer_index; + + // motion mask texture + // GLuint m_motion_mask_texture; + + // hdr fusion GPU shader pass + GLuint m_hdr_fusion_shader_program; + GLuint m_hdr_fusion_vao, m_hdr_fusion_vbo; + GLuint m_hdr_fusion_fbo; + + // tone mapping GPU shader pass + // GLuint m_tone_mapping_shader_program; + // GLuint m_tone_mapping_vao, m_tone_mapping_vbo; + // GLuint m_tone_mapping_fbo; + + // mono texture GPU shader pass + // GLuint m_mono_shader_program; + // GLuint m_mono_fbo; + + // output texture + GLuint m_output_texture; + + void OnImageGrabbed(const Pylon::CGrabResultPtr& grabResult); + void GLInit(); + void OnNewFrame(); + GLuint compileShader(GLenum type, const char* src); + GLuint createMonoShaderProgram(); + GLuint createHDRShaderProgram(); + void notifyTextureReady(); +}; + +#endif // CAPTURE_PIPELINE_H_ diff --git a/packages/camera/camera_linux/linux/fl_lightx_texture_gl.cpp b/packages/camera/camera_linux/linux/fl_lightx_texture_gl.cpp new file mode 100644 index 000000000000..5340eb7c4cd4 --- /dev/null +++ b/packages/camera/camera_linux/linux/fl_lightx_texture_gl.cpp @@ -0,0 +1,32 @@ +#include "fl_lightx_texture_gl.h" + +G_DEFINE_TYPE(FlLightxTextureGL, fl_lightx_texture_gl, fl_texture_gl_get_type()) + +static gboolean fl_lightx_texture_gl_populate(FlTextureGL* texture, + uint32_t* target, uint32_t* name, + uint32_t* width, uint32_t* height, + GError** error) { + FlLightxTextureGL* f = (FlLightxTextureGL*)texture; + *target = f->target; + *name = f->name; + *width = f->width; + *height = f->height; + return true; +} + +FlLightxTextureGL* fl_lightx_texture_gl_new(uint32_t target, uint32_t name, + uint32_t width, uint32_t height) { + auto r = FL_LIGHTX_TEXTURE_GL( + g_object_new(fl_lightx_texture_gl_get_type(), nullptr)); + r->target = target; + r->name = name; + r->width = width; + r->height = height; + return r; +} + +static void fl_lightx_texture_gl_class_init(FlLightxTextureGLClass* klass) { + FL_TEXTURE_GL_CLASS(klass)->populate = fl_lightx_texture_gl_populate; +} + +static void fl_lightx_texture_gl_init(FlLightxTextureGL* self) {} \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/fl_lightx_texture_gl.h b/packages/camera/camera_linux/linux/fl_lightx_texture_gl.h new file mode 100644 index 000000000000..7585a2e7fcef --- /dev/null +++ b/packages/camera/camera_linux/linux/fl_lightx_texture_gl.h @@ -0,0 +1,22 @@ + +#ifndef FL_LIGHTX_TEXTURE_GL_H_ +#define FL_LIGHTX_TEXTURE_GL_H_ + +#include "flutter_linux/flutter_linux.h" +#include "messages.g.h" + +G_DECLARE_FINAL_TYPE(FlLightxTextureGL, fl_lightx_texture_gl, FL, + LIGHTX_TEXTURE_GL, FlTextureGL) + +struct _FlLightxTextureGL { + FlTextureGL parent_instance; + uint32_t target; + uint32_t name; + uint32_t width; + uint32_t height; +}; + +FlLightxTextureGL* fl_lightx_texture_gl_new(uint32_t target, uint32_t name, + uint32_t width, uint32_t height); + +#endif // FL_LIGHTX_TEXTURE_GL_H_ \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/include/camera_linux/camera_plugin.h b/packages/camera/camera_linux/linux/include/camera_linux/camera_plugin.h new file mode 100644 index 000000000000..8352ed3f3fa1 --- /dev/null +++ b/packages/camera/camera_linux/linux/include/camera_linux/camera_plugin.h @@ -0,0 +1,19 @@ +#ifndef FLUTTER_PLUGIN_CAMERA_LINUX_PLUGIN_H_ +#define FLUTTER_PLUGIN_CAMERA_LINUX_PLUGIN_H_ + +#include + +G_BEGIN_DECLS + +#ifdef FLUTTER_PLUGIN_IMPL +#define FLUTTER_PLUGIN_EXPORT __attribute__((visibility("default"))) +#else +#define FLUTTER_PLUGIN_EXPORT +#endif + +FLUTTER_PLUGIN_EXPORT void camera_plugin_register_with_registrar( + FlPluginRegistrar* registrar); + +G_END_DECLS + +#endif // FLUTTER_PLUGIN_CAMERA_LINUX_PLUGIN_H_ diff --git a/packages/camera/camera_linux/linux/messages.g.cc b/packages/camera/camera_linux/linux/messages.g.cc new file mode 100644 index 000000000000..ec5cfd5389ca --- /dev/null +++ b/packages/camera/camera_linux/linux/messages.g.cc @@ -0,0 +1,1563 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// Autogenerated from Pigeon (v22.7.4), do not edit directly. +// See also: https://pub.dev/packages/pigeon + +#include "messages.g.h" + +struct _CameraLinuxPlatformSize { + GObject parent_instance; + + double width; + double height; +}; + +G_DEFINE_TYPE(CameraLinuxPlatformSize, camera_linux_platform_size, G_TYPE_OBJECT) + +static void camera_linux_platform_size_dispose(GObject* object) { + G_OBJECT_CLASS(camera_linux_platform_size_parent_class)->dispose(object); +} + +static void camera_linux_platform_size_init(CameraLinuxPlatformSize* self) { +} + +static void camera_linux_platform_size_class_init(CameraLinuxPlatformSizeClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_platform_size_dispose; +} + +CameraLinuxPlatformSize* camera_linux_platform_size_new(double width, double height) { + CameraLinuxPlatformSize* self = CAMERA_LINUX_PLATFORM_SIZE(g_object_new(camera_linux_platform_size_get_type(), nullptr)); + self->width = width; + self->height = height; + return self; +} + +double camera_linux_platform_size_get_width(CameraLinuxPlatformSize* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_SIZE(self), 0.0); + return self->width; +} + +double camera_linux_platform_size_get_height(CameraLinuxPlatformSize* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_SIZE(self), 0.0); + return self->height; +} + +static FlValue* camera_linux_platform_size_to_list(CameraLinuxPlatformSize* self) { + FlValue* values = fl_value_new_list(); + fl_value_append_take(values, fl_value_new_float(self->width)); + fl_value_append_take(values, fl_value_new_float(self->height)); + return values; +} + +static CameraLinuxPlatformSize* camera_linux_platform_size_new_from_list(FlValue* values) { + FlValue* value0 = fl_value_get_list_value(values, 0); + double width = fl_value_get_float(value0); + FlValue* value1 = fl_value_get_list_value(values, 1); + double height = fl_value_get_float(value1); + return camera_linux_platform_size_new(width, height); +} + +struct _CameraLinuxPlatformCameraState { + GObject parent_instance; + + CameraLinuxPlatformSize* preview_size; + CameraLinuxPlatformExposureMode exposure_mode; + CameraLinuxPlatformFocusMode focus_mode; + gboolean exposure_point_supported; + gboolean focus_point_supported; +}; + +G_DEFINE_TYPE(CameraLinuxPlatformCameraState, camera_linux_platform_camera_state, G_TYPE_OBJECT) + +static void camera_linux_platform_camera_state_dispose(GObject* object) { + CameraLinuxPlatformCameraState* self = CAMERA_LINUX_PLATFORM_CAMERA_STATE(object); + g_clear_object(&self->preview_size); + G_OBJECT_CLASS(camera_linux_platform_camera_state_parent_class)->dispose(object); +} + +static void camera_linux_platform_camera_state_init(CameraLinuxPlatformCameraState* self) { +} + +static void camera_linux_platform_camera_state_class_init(CameraLinuxPlatformCameraStateClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_platform_camera_state_dispose; +} + +CameraLinuxPlatformCameraState* camera_linux_platform_camera_state_new(CameraLinuxPlatformSize* preview_size, CameraLinuxPlatformExposureMode exposure_mode, CameraLinuxPlatformFocusMode focus_mode, gboolean exposure_point_supported, gboolean focus_point_supported) { + CameraLinuxPlatformCameraState* self = CAMERA_LINUX_PLATFORM_CAMERA_STATE(g_object_new(camera_linux_platform_camera_state_get_type(), nullptr)); + self->preview_size = CAMERA_LINUX_PLATFORM_SIZE(g_object_ref(preview_size)); + self->exposure_mode = exposure_mode; + self->focus_mode = focus_mode; + self->exposure_point_supported = exposure_point_supported; + self->focus_point_supported = focus_point_supported; + return self; +} + +CameraLinuxPlatformSize* camera_linux_platform_camera_state_get_preview_size(CameraLinuxPlatformCameraState* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_STATE(self), nullptr); + return self->preview_size; +} + +CameraLinuxPlatformExposureMode camera_linux_platform_camera_state_get_exposure_mode(CameraLinuxPlatformCameraState* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_STATE(self), static_cast(0)); + return self->exposure_mode; +} + +CameraLinuxPlatformFocusMode camera_linux_platform_camera_state_get_focus_mode(CameraLinuxPlatformCameraState* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_STATE(self), static_cast(0)); + return self->focus_mode; +} + +gboolean camera_linux_platform_camera_state_get_exposure_point_supported(CameraLinuxPlatformCameraState* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_STATE(self), FALSE); + return self->exposure_point_supported; +} + +gboolean camera_linux_platform_camera_state_get_focus_point_supported(CameraLinuxPlatformCameraState* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_STATE(self), FALSE); + return self->focus_point_supported; +} + +static FlValue* camera_linux_platform_camera_state_to_list(CameraLinuxPlatformCameraState* self) { + FlValue* values = fl_value_new_list(); + fl_value_append_take(values, fl_value_new_custom_object(135, G_OBJECT(self->preview_size))); + fl_value_append_take(values, fl_value_new_custom(130, fl_value_new_int(self->exposure_mode), (GDestroyNotify)fl_value_unref)); + fl_value_append_take(values, fl_value_new_custom(132, fl_value_new_int(self->focus_mode), (GDestroyNotify)fl_value_unref)); + fl_value_append_take(values, fl_value_new_bool(self->exposure_point_supported)); + fl_value_append_take(values, fl_value_new_bool(self->focus_point_supported)); + return values; +} + +static CameraLinuxPlatformCameraState* camera_linux_platform_camera_state_new_from_list(FlValue* values) { + FlValue* value0 = fl_value_get_list_value(values, 0); + CameraLinuxPlatformSize* preview_size = CAMERA_LINUX_PLATFORM_SIZE(fl_value_get_custom_value_object(value0)); + FlValue* value1 = fl_value_get_list_value(values, 1); + CameraLinuxPlatformExposureMode exposure_mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); + FlValue* value2 = fl_value_get_list_value(values, 2); + CameraLinuxPlatformFocusMode focus_mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value2))))); + FlValue* value3 = fl_value_get_list_value(values, 3); + gboolean exposure_point_supported = fl_value_get_bool(value3); + FlValue* value4 = fl_value_get_list_value(values, 4); + gboolean focus_point_supported = fl_value_get_bool(value4); + return camera_linux_platform_camera_state_new(preview_size, exposure_mode, focus_mode, exposure_point_supported, focus_point_supported); +} + +struct _CameraLinuxPlatformPoint { + GObject parent_instance; + + double x; + double y; +}; + +G_DEFINE_TYPE(CameraLinuxPlatformPoint, camera_linux_platform_point, G_TYPE_OBJECT) + +static void camera_linux_platform_point_dispose(GObject* object) { + G_OBJECT_CLASS(camera_linux_platform_point_parent_class)->dispose(object); +} + +static void camera_linux_platform_point_init(CameraLinuxPlatformPoint* self) { +} + +static void camera_linux_platform_point_class_init(CameraLinuxPlatformPointClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_platform_point_dispose; +} + +CameraLinuxPlatformPoint* camera_linux_platform_point_new(double x, double y) { + CameraLinuxPlatformPoint* self = CAMERA_LINUX_PLATFORM_POINT(g_object_new(camera_linux_platform_point_get_type(), nullptr)); + self->x = x; + self->y = y; + return self; +} + +double camera_linux_platform_point_get_x(CameraLinuxPlatformPoint* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_POINT(self), 0.0); + return self->x; +} + +double camera_linux_platform_point_get_y(CameraLinuxPlatformPoint* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_POINT(self), 0.0); + return self->y; +} + +static FlValue* camera_linux_platform_point_to_list(CameraLinuxPlatformPoint* self) { + FlValue* values = fl_value_new_list(); + fl_value_append_take(values, fl_value_new_float(self->x)); + fl_value_append_take(values, fl_value_new_float(self->y)); + return values; +} + +static CameraLinuxPlatformPoint* camera_linux_platform_point_new_from_list(FlValue* values) { + FlValue* value0 = fl_value_get_list_value(values, 0); + double x = fl_value_get_float(value0); + FlValue* value1 = fl_value_get_list_value(values, 1); + double y = fl_value_get_float(value1); + return camera_linux_platform_point_new(x, y); +} + +struct _CameraLinuxMessageCodec { + FlStandardMessageCodec parent_instance; + +}; + +G_DEFINE_TYPE(CameraLinuxMessageCodec, camera_linux_message_codec, fl_standard_message_codec_get_type()) + +static gboolean camera_linux_message_codec_write_camera_linux_platform_device_orientation(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 129; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_exposure_mode(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 130; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_flash_mode(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 131; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_focus_mode(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 132; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_image_format_group(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 133; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_resolution_preset(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 134; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_size(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformSize* value, GError** error) { + uint8_t type = 135; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + g_autoptr(FlValue) values = camera_linux_platform_size_to_list(value); + return fl_standard_message_codec_write_value(codec, buffer, values, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_camera_state(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformCameraState* value, GError** error) { + uint8_t type = 136; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + g_autoptr(FlValue) values = camera_linux_platform_camera_state_to_list(value); + return fl_standard_message_codec_write_value(codec, buffer, values, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_point(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformPoint* value, GError** error) { + uint8_t type = 137; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + g_autoptr(FlValue) values = camera_linux_platform_point_to_list(value); + return fl_standard_message_codec_write_value(codec, buffer, values, error); +} + +static gboolean camera_linux_message_codec_write_value(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + if (fl_value_get_type(value) == FL_VALUE_TYPE_CUSTOM) { + switch (fl_value_get_custom_type(value)) { + case 129: + return camera_linux_message_codec_write_camera_linux_platform_device_orientation(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 130: + return camera_linux_message_codec_write_camera_linux_platform_exposure_mode(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 131: + return camera_linux_message_codec_write_camera_linux_platform_flash_mode(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 132: + return camera_linux_message_codec_write_camera_linux_platform_focus_mode(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 133: + return camera_linux_message_codec_write_camera_linux_platform_image_format_group(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 134: + return camera_linux_message_codec_write_camera_linux_platform_resolution_preset(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 135: + return camera_linux_message_codec_write_camera_linux_platform_size(codec, buffer, CAMERA_LINUX_PLATFORM_SIZE(fl_value_get_custom_value_object(value)), error); + case 136: + return camera_linux_message_codec_write_camera_linux_platform_camera_state(codec, buffer, CAMERA_LINUX_PLATFORM_CAMERA_STATE(fl_value_get_custom_value_object(value)), error); + case 137: + return camera_linux_message_codec_write_camera_linux_platform_point(codec, buffer, CAMERA_LINUX_PLATFORM_POINT(fl_value_get_custom_value_object(value)), error); + } + } + + return FL_STANDARD_MESSAGE_CODEC_CLASS(camera_linux_message_codec_parent_class)->write_value(codec, buffer, value, error); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_device_orientation(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(129, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_exposure_mode(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(130, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_flash_mode(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(131, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_focus_mode(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(132, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_image_format_group(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(133, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_resolution_preset(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(134, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_size(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + g_autoptr(FlValue) values = fl_standard_message_codec_read_value(codec, buffer, offset, error); + if (values == nullptr) { + return nullptr; + } + + g_autoptr(CameraLinuxPlatformSize) value = camera_linux_platform_size_new_from_list(values); + if (value == nullptr) { + g_set_error(error, FL_MESSAGE_CODEC_ERROR, FL_MESSAGE_CODEC_ERROR_FAILED, "Invalid data received for MessageData"); + return nullptr; + } + + return fl_value_new_custom_object(135, G_OBJECT(value)); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_state(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + g_autoptr(FlValue) values = fl_standard_message_codec_read_value(codec, buffer, offset, error); + if (values == nullptr) { + return nullptr; + } + + g_autoptr(CameraLinuxPlatformCameraState) value = camera_linux_platform_camera_state_new_from_list(values); + if (value == nullptr) { + g_set_error(error, FL_MESSAGE_CODEC_ERROR, FL_MESSAGE_CODEC_ERROR_FAILED, "Invalid data received for MessageData"); + return nullptr; + } + + return fl_value_new_custom_object(136, G_OBJECT(value)); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_point(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + g_autoptr(FlValue) values = fl_standard_message_codec_read_value(codec, buffer, offset, error); + if (values == nullptr) { + return nullptr; + } + + g_autoptr(CameraLinuxPlatformPoint) value = camera_linux_platform_point_new_from_list(values); + if (value == nullptr) { + g_set_error(error, FL_MESSAGE_CODEC_ERROR, FL_MESSAGE_CODEC_ERROR_FAILED, "Invalid data received for MessageData"); + return nullptr; + } + + return fl_value_new_custom_object(137, G_OBJECT(value)); +} + +static FlValue* camera_linux_message_codec_read_value_of_type(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, int type, GError** error) { + switch (type) { + case 129: + return camera_linux_message_codec_read_camera_linux_platform_device_orientation(codec, buffer, offset, error); + case 130: + return camera_linux_message_codec_read_camera_linux_platform_exposure_mode(codec, buffer, offset, error); + case 131: + return camera_linux_message_codec_read_camera_linux_platform_flash_mode(codec, buffer, offset, error); + case 132: + return camera_linux_message_codec_read_camera_linux_platform_focus_mode(codec, buffer, offset, error); + case 133: + return camera_linux_message_codec_read_camera_linux_platform_image_format_group(codec, buffer, offset, error); + case 134: + return camera_linux_message_codec_read_camera_linux_platform_resolution_preset(codec, buffer, offset, error); + case 135: + return camera_linux_message_codec_read_camera_linux_platform_size(codec, buffer, offset, error); + case 136: + return camera_linux_message_codec_read_camera_linux_platform_camera_state(codec, buffer, offset, error); + case 137: + return camera_linux_message_codec_read_camera_linux_platform_point(codec, buffer, offset, error); + default: + return FL_STANDARD_MESSAGE_CODEC_CLASS(camera_linux_message_codec_parent_class)->read_value_of_type(codec, buffer, offset, type, error); + } +} + +static void camera_linux_message_codec_init(CameraLinuxMessageCodec* self) { +} + +static void camera_linux_message_codec_class_init(CameraLinuxMessageCodecClass* klass) { + FL_STANDARD_MESSAGE_CODEC_CLASS(klass)->write_value = camera_linux_message_codec_write_value; + FL_STANDARD_MESSAGE_CODEC_CLASS(klass)->read_value_of_type = camera_linux_message_codec_read_value_of_type; +} + +static CameraLinuxMessageCodec* camera_linux_message_codec_new() { + CameraLinuxMessageCodec* self = CAMERA_LINUX_MESSAGE_CODEC(g_object_new(camera_linux_message_codec_get_type(), nullptr)); + return self; +} + +struct _CameraLinuxCameraApiResponseHandle { + GObject parent_instance; + + FlBasicMessageChannel* channel; + FlBasicMessageChannelResponseHandle* response_handle; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiResponseHandle, camera_linux_camera_api_response_handle, G_TYPE_OBJECT) + +static void camera_linux_camera_api_response_handle_dispose(GObject* object) { + CameraLinuxCameraApiResponseHandle* self = CAMERA_LINUX_CAMERA_API_RESPONSE_HANDLE(object); + g_clear_object(&self->channel); + g_clear_object(&self->response_handle); + G_OBJECT_CLASS(camera_linux_camera_api_response_handle_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_response_handle_init(CameraLinuxCameraApiResponseHandle* self) { +} + +static void camera_linux_camera_api_response_handle_class_init(CameraLinuxCameraApiResponseHandleClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_response_handle_dispose; +} + +static CameraLinuxCameraApiResponseHandle* camera_linux_camera_api_response_handle_new(FlBasicMessageChannel* channel, FlBasicMessageChannelResponseHandle* response_handle) { + CameraLinuxCameraApiResponseHandle* self = CAMERA_LINUX_CAMERA_API_RESPONSE_HANDLE(g_object_new(camera_linux_camera_api_response_handle_get_type(), nullptr)); + self->channel = FL_BASIC_MESSAGE_CHANNEL(g_object_ref(channel)); + self->response_handle = FL_BASIC_MESSAGE_CHANNEL_RESPONSE_HANDLE(g_object_ref(response_handle)); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetAvailableCamerasNamesResponse, camera_linux_camera_api_get_available_cameras_names_response, CAMERA_LINUX, CAMERA_API_GET_AVAILABLE_CAMERAS_NAMES_RESPONSE, GObject) + +struct _CameraLinuxCameraApiGetAvailableCamerasNamesResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiGetAvailableCamerasNamesResponse, camera_linux_camera_api_get_available_cameras_names_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_get_available_cameras_names_response_dispose(GObject* object) { + CameraLinuxCameraApiGetAvailableCamerasNamesResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_NAMES_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_get_available_cameras_names_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_get_available_cameras_names_response_init(CameraLinuxCameraApiGetAvailableCamerasNamesResponse* self) { +} + +static void camera_linux_camera_api_get_available_cameras_names_response_class_init(CameraLinuxCameraApiGetAvailableCamerasNamesResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_available_cameras_names_response_dispose; +} + +static CameraLinuxCameraApiGetAvailableCamerasNamesResponse* camera_linux_camera_api_get_available_cameras_names_response_new(FlValue* return_value) { + CameraLinuxCameraApiGetAvailableCamerasNamesResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_NAMES_RESPONSE(g_object_new(camera_linux_camera_api_get_available_cameras_names_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_ref(return_value)); + return self; +} + +static CameraLinuxCameraApiGetAvailableCamerasNamesResponse* camera_linux_camera_api_get_available_cameras_names_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiGetAvailableCamerasNamesResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_NAMES_RESPONSE(g_object_new(camera_linux_camera_api_get_available_cameras_names_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiCreateResponse, camera_linux_camera_api_create_response, CAMERA_LINUX, CAMERA_API_CREATE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiCreateResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiCreateResponse, camera_linux_camera_api_create_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_create_response_dispose(GObject* object) { + CameraLinuxCameraApiCreateResponse* self = CAMERA_LINUX_CAMERA_API_CREATE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_create_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_create_response_init(CameraLinuxCameraApiCreateResponse* self) { +} + +static void camera_linux_camera_api_create_response_class_init(CameraLinuxCameraApiCreateResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_create_response_dispose; +} + +static CameraLinuxCameraApiCreateResponse* camera_linux_camera_api_create_response_new(int64_t return_value) { + CameraLinuxCameraApiCreateResponse* self = CAMERA_LINUX_CAMERA_API_CREATE_RESPONSE(g_object_new(camera_linux_camera_api_create_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_int(return_value)); + return self; +} + +static CameraLinuxCameraApiCreateResponse* camera_linux_camera_api_create_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiCreateResponse* self = CAMERA_LINUX_CAMERA_API_CREATE_RESPONSE(g_object_new(camera_linux_camera_api_create_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiInitializeResponse, camera_linux_camera_api_initialize_response, CAMERA_LINUX, CAMERA_API_INITIALIZE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiInitializeResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiInitializeResponse, camera_linux_camera_api_initialize_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_initialize_response_dispose(GObject* object) { + CameraLinuxCameraApiInitializeResponse* self = CAMERA_LINUX_CAMERA_API_INITIALIZE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_initialize_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_initialize_response_init(CameraLinuxCameraApiInitializeResponse* self) { +} + +static void camera_linux_camera_api_initialize_response_class_init(CameraLinuxCameraApiInitializeResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_initialize_response_dispose; +} + +static CameraLinuxCameraApiInitializeResponse* camera_linux_camera_api_initialize_response_new() { + CameraLinuxCameraApiInitializeResponse* self = CAMERA_LINUX_CAMERA_API_INITIALIZE_RESPONSE(g_object_new(camera_linux_camera_api_initialize_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiInitializeResponse* camera_linux_camera_api_initialize_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiInitializeResponse* self = CAMERA_LINUX_CAMERA_API_INITIALIZE_RESPONSE(g_object_new(camera_linux_camera_api_initialize_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetTextureIdResponse, camera_linux_camera_api_get_texture_id_response, CAMERA_LINUX, CAMERA_API_GET_TEXTURE_ID_RESPONSE, GObject) + +struct _CameraLinuxCameraApiGetTextureIdResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiGetTextureIdResponse, camera_linux_camera_api_get_texture_id_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_get_texture_id_response_dispose(GObject* object) { + CameraLinuxCameraApiGetTextureIdResponse* self = CAMERA_LINUX_CAMERA_API_GET_TEXTURE_ID_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_get_texture_id_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_get_texture_id_response_init(CameraLinuxCameraApiGetTextureIdResponse* self) { +} + +static void camera_linux_camera_api_get_texture_id_response_class_init(CameraLinuxCameraApiGetTextureIdResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_texture_id_response_dispose; +} + +static CameraLinuxCameraApiGetTextureIdResponse* camera_linux_camera_api_get_texture_id_response_new(int64_t* return_value) { + CameraLinuxCameraApiGetTextureIdResponse* self = CAMERA_LINUX_CAMERA_API_GET_TEXTURE_ID_RESPONSE(g_object_new(camera_linux_camera_api_get_texture_id_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, return_value != nullptr ? fl_value_new_int(*return_value) : fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiGetTextureIdResponse* camera_linux_camera_api_get_texture_id_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiGetTextureIdResponse* self = CAMERA_LINUX_CAMERA_API_GET_TEXTURE_ID_RESPONSE(g_object_new(camera_linux_camera_api_get_texture_id_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiDisposeResponse, camera_linux_camera_api_dispose_response, CAMERA_LINUX, CAMERA_API_DISPOSE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiDisposeResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiDisposeResponse, camera_linux_camera_api_dispose_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_dispose_response_dispose(GObject* object) { + CameraLinuxCameraApiDisposeResponse* self = CAMERA_LINUX_CAMERA_API_DISPOSE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_dispose_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_dispose_response_init(CameraLinuxCameraApiDisposeResponse* self) { +} + +static void camera_linux_camera_api_dispose_response_class_init(CameraLinuxCameraApiDisposeResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_dispose_response_dispose; +} + +static CameraLinuxCameraApiDisposeResponse* camera_linux_camera_api_dispose_response_new() { + CameraLinuxCameraApiDisposeResponse* self = CAMERA_LINUX_CAMERA_API_DISPOSE_RESPONSE(g_object_new(camera_linux_camera_api_dispose_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiDisposeResponse* camera_linux_camera_api_dispose_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiDisposeResponse* self = CAMERA_LINUX_CAMERA_API_DISPOSE_RESPONSE(g_object_new(camera_linux_camera_api_dispose_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiTakePictureResponse, camera_linux_camera_api_take_picture_response, CAMERA_LINUX, CAMERA_API_TAKE_PICTURE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiTakePictureResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiTakePictureResponse, camera_linux_camera_api_take_picture_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_take_picture_response_dispose(GObject* object) { + CameraLinuxCameraApiTakePictureResponse* self = CAMERA_LINUX_CAMERA_API_TAKE_PICTURE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_take_picture_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_take_picture_response_init(CameraLinuxCameraApiTakePictureResponse* self) { +} + +static void camera_linux_camera_api_take_picture_response_class_init(CameraLinuxCameraApiTakePictureResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_take_picture_response_dispose; +} + +static CameraLinuxCameraApiTakePictureResponse* camera_linux_camera_api_take_picture_response_new() { + CameraLinuxCameraApiTakePictureResponse* self = CAMERA_LINUX_CAMERA_API_TAKE_PICTURE_RESPONSE(g_object_new(camera_linux_camera_api_take_picture_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiTakePictureResponse* camera_linux_camera_api_take_picture_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiTakePictureResponse* self = CAMERA_LINUX_CAMERA_API_TAKE_PICTURE_RESPONSE(g_object_new(camera_linux_camera_api_take_picture_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiStartVideoRecordingResponse, camera_linux_camera_api_start_video_recording_response, CAMERA_LINUX, CAMERA_API_START_VIDEO_RECORDING_RESPONSE, GObject) + +struct _CameraLinuxCameraApiStartVideoRecordingResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiStartVideoRecordingResponse, camera_linux_camera_api_start_video_recording_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_start_video_recording_response_dispose(GObject* object) { + CameraLinuxCameraApiStartVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_START_VIDEO_RECORDING_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_start_video_recording_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_start_video_recording_response_init(CameraLinuxCameraApiStartVideoRecordingResponse* self) { +} + +static void camera_linux_camera_api_start_video_recording_response_class_init(CameraLinuxCameraApiStartVideoRecordingResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_start_video_recording_response_dispose; +} + +static CameraLinuxCameraApiStartVideoRecordingResponse* camera_linux_camera_api_start_video_recording_response_new() { + CameraLinuxCameraApiStartVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_START_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_start_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiStartVideoRecordingResponse* camera_linux_camera_api_start_video_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiStartVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_START_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_start_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiStopVideoRecordingResponse, camera_linux_camera_api_stop_video_recording_response, CAMERA_LINUX, CAMERA_API_STOP_VIDEO_RECORDING_RESPONSE, GObject) + +struct _CameraLinuxCameraApiStopVideoRecordingResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiStopVideoRecordingResponse, camera_linux_camera_api_stop_video_recording_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_stop_video_recording_response_dispose(GObject* object) { + CameraLinuxCameraApiStopVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_STOP_VIDEO_RECORDING_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_stop_video_recording_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_stop_video_recording_response_init(CameraLinuxCameraApiStopVideoRecordingResponse* self) { +} + +static void camera_linux_camera_api_stop_video_recording_response_class_init(CameraLinuxCameraApiStopVideoRecordingResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_stop_video_recording_response_dispose; +} + +static CameraLinuxCameraApiStopVideoRecordingResponse* camera_linux_camera_api_stop_video_recording_response_new(const gchar* return_value) { + CameraLinuxCameraApiStopVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_STOP_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_stop_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(return_value)); + return self; +} + +static CameraLinuxCameraApiStopVideoRecordingResponse* camera_linux_camera_api_stop_video_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiStopVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_STOP_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_stop_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetExposureModeResponse, camera_linux_camera_api_set_exposure_mode_response, CAMERA_LINUX, CAMERA_API_SET_EXPOSURE_MODE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetExposureModeResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetExposureModeResponse, camera_linux_camera_api_set_exposure_mode_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_exposure_mode_response_dispose(GObject* object) { + CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_exposure_mode_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_exposure_mode_response_init(CameraLinuxCameraApiSetExposureModeResponse* self) { +} + +static void camera_linux_camera_api_set_exposure_mode_response_class_init(CameraLinuxCameraApiSetExposureModeResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_exposure_mode_response_dispose; +} + +static CameraLinuxCameraApiSetExposureModeResponse* camera_linux_camera_api_set_exposure_mode_response_new() { + CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_mode_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetExposureModeResponse* camera_linux_camera_api_set_exposure_mode_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_mode_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetFocusModeResponse, camera_linux_camera_api_set_focus_mode_response, CAMERA_LINUX, CAMERA_API_SET_FOCUS_MODE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetFocusModeResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetFocusModeResponse, camera_linux_camera_api_set_focus_mode_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_focus_mode_response_dispose(GObject* object) { + CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_focus_mode_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_focus_mode_response_init(CameraLinuxCameraApiSetFocusModeResponse* self) { +} + +static void camera_linux_camera_api_set_focus_mode_response_class_init(CameraLinuxCameraApiSetFocusModeResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_focus_mode_response_dispose; +} + +static CameraLinuxCameraApiSetFocusModeResponse* camera_linux_camera_api_set_focus_mode_response_new() { + CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_mode_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetFocusModeResponse* camera_linux_camera_api_set_focus_mode_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_mode_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetImageFormatGroupResponse, camera_linux_camera_api_set_image_format_group_response, CAMERA_LINUX, CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetImageFormatGroupResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetImageFormatGroupResponse, camera_linux_camera_api_set_image_format_group_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_image_format_group_response_dispose(GObject* object) { + CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_image_format_group_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_image_format_group_response_init(CameraLinuxCameraApiSetImageFormatGroupResponse* self) { +} + +static void camera_linux_camera_api_set_image_format_group_response_class_init(CameraLinuxCameraApiSetImageFormatGroupResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_image_format_group_response_dispose; +} + +static CameraLinuxCameraApiSetImageFormatGroupResponse* camera_linux_camera_api_set_image_format_group_response_new() { + CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(g_object_new(camera_linux_camera_api_set_image_format_group_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetImageFormatGroupResponse* camera_linux_camera_api_set_image_format_group_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(g_object_new(camera_linux_camera_api_set_image_format_group_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +struct _CameraLinuxCameraApi { + GObject parent_instance; + + const CameraLinuxCameraApiVTable* vtable; + gpointer user_data; + GDestroyNotify user_data_free_func; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApi, camera_linux_camera_api, G_TYPE_OBJECT) + +static void camera_linux_camera_api_dispose(GObject* object) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(object); + if (self->user_data != nullptr) { + self->user_data_free_func(self->user_data); + } + self->user_data = nullptr; + G_OBJECT_CLASS(camera_linux_camera_api_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_init(CameraLinuxCameraApi* self) { +} + +static void camera_linux_camera_api_class_init(CameraLinuxCameraApiClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_dispose; +} + +static CameraLinuxCameraApi* camera_linux_camera_api_new(const CameraLinuxCameraApiVTable* vtable, gpointer user_data, GDestroyNotify user_data_free_func) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(g_object_new(camera_linux_camera_api_get_type(), nullptr)); + self->vtable = vtable; + self->user_data = user_data; + self->user_data_free_func = user_data_free_func; + return self; +} + +static void camera_linux_camera_api_get_available_cameras_names_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->get_available_cameras_names == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->get_available_cameras_names(handle, self->user_data); +} + +static void camera_linux_camera_api_create_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->create == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + const gchar* camera_name = fl_value_get_string(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformResolutionPreset resolution_preset = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->create(camera_name, resolution_preset, handle, self->user_data); +} + +static void camera_linux_camera_api_initialize_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->initialize == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformImageFormatGroup image_format = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->initialize(camera_id, image_format, handle, self->user_data); +} + +static void camera_linux_camera_api_get_texture_id_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->get_texture_id == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->get_texture_id(camera_id, handle, self->user_data); +} + +static void camera_linux_camera_api_dispose_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->dispose == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->dispose(camera_id, handle, self->user_data); +} + +static void camera_linux_camera_api_take_picture_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->take_picture == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + const gchar* path = fl_value_get_string(value1); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->take_picture(camera_id, path, handle, self->user_data); +} + +static void camera_linux_camera_api_start_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->start_video_recording == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + const gchar* path = fl_value_get_string(value1); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->start_video_recording(camera_id, path, handle, self->user_data); +} + +static void camera_linux_camera_api_stop_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->stop_video_recording == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->stop_video_recording(camera_id, handle, self->user_data); +} + +static void camera_linux_camera_api_set_exposure_mode_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_exposure_mode == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformExposureMode mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_exposure_mode(camera_id, mode, handle, self->user_data); +} + +static void camera_linux_camera_api_set_focus_mode_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_focus_mode == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformFocusMode mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_focus_mode(camera_id, mode, handle, self->user_data); +} + +static void camera_linux_camera_api_set_image_format_group_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_image_format_group == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformImageFormatGroup image_format_group = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_image_format_group(camera_id, image_format_group, handle, self->user_data); +} + +void camera_linux_camera_api_set_method_handlers(FlBinaryMessenger* messenger, const gchar* suffix, const CameraLinuxCameraApiVTable* vtable, gpointer user_data, GDestroyNotify user_data_free_func) { + g_autofree gchar* dot_suffix = suffix != nullptr ? g_strdup_printf(".%s", suffix) : g_strdup(""); + g_autoptr(CameraLinuxCameraApi) api_data = camera_linux_camera_api_new(vtable, user_data, user_data_free_func); + + g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); + g_autofree gchar* get_available_cameras_names_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCamerasNames%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_available_cameras_names_channel = fl_basic_message_channel_new(messenger, get_available_cameras_names_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_available_cameras_names_channel, camera_linux_camera_api_get_available_cameras_names_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* create_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.create%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) create_channel = fl_basic_message_channel_new(messenger, create_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(create_channel, camera_linux_camera_api_create_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* initialize_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.initialize%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) initialize_channel = fl_basic_message_channel_new(messenger, initialize_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(initialize_channel, camera_linux_camera_api_initialize_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* get_texture_id_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getTextureId%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_texture_id_channel = fl_basic_message_channel_new(messenger, get_texture_id_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_texture_id_channel, camera_linux_camera_api_get_texture_id_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* dispose_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.dispose%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) dispose_channel = fl_basic_message_channel_new(messenger, dispose_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(dispose_channel, camera_linux_camera_api_dispose_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* take_picture_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.takePicture%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) take_picture_channel = fl_basic_message_channel_new(messenger, take_picture_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(take_picture_channel, camera_linux_camera_api_take_picture_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* start_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.startVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) start_video_recording_channel = fl_basic_message_channel_new(messenger, start_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(start_video_recording_channel, camera_linux_camera_api_start_video_recording_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* stop_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) stop_video_recording_channel = fl_basic_message_channel_new(messenger, stop_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(stop_video_recording_channel, camera_linux_camera_api_stop_video_recording_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_exposure_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposureMode%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_exposure_mode_channel = fl_basic_message_channel_new(messenger, set_exposure_mode_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_exposure_mode_channel, camera_linux_camera_api_set_exposure_mode_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_focus_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFocusMode%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_focus_mode_channel = fl_basic_message_channel_new(messenger, set_focus_mode_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_focus_mode_channel, camera_linux_camera_api_set_focus_mode_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_image_format_group_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFormatGroup%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_image_format_group_channel = fl_basic_message_channel_new(messenger, set_image_format_group_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_image_format_group_channel, camera_linux_camera_api_set_image_format_group_cb, g_object_ref(api_data), g_object_unref); +} + +void camera_linux_camera_api_clear_method_handlers(FlBinaryMessenger* messenger, const gchar* suffix) { + g_autofree gchar* dot_suffix = suffix != nullptr ? g_strdup_printf(".%s", suffix) : g_strdup(""); + + g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); + g_autofree gchar* get_available_cameras_names_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCamerasNames%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_available_cameras_names_channel = fl_basic_message_channel_new(messenger, get_available_cameras_names_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_available_cameras_names_channel, nullptr, nullptr, nullptr); + g_autofree gchar* create_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.create%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) create_channel = fl_basic_message_channel_new(messenger, create_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(create_channel, nullptr, nullptr, nullptr); + g_autofree gchar* initialize_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.initialize%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) initialize_channel = fl_basic_message_channel_new(messenger, initialize_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(initialize_channel, nullptr, nullptr, nullptr); + g_autofree gchar* get_texture_id_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getTextureId%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_texture_id_channel = fl_basic_message_channel_new(messenger, get_texture_id_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_texture_id_channel, nullptr, nullptr, nullptr); + g_autofree gchar* dispose_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.dispose%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) dispose_channel = fl_basic_message_channel_new(messenger, dispose_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(dispose_channel, nullptr, nullptr, nullptr); + g_autofree gchar* take_picture_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.takePicture%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) take_picture_channel = fl_basic_message_channel_new(messenger, take_picture_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(take_picture_channel, nullptr, nullptr, nullptr); + g_autofree gchar* start_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.startVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) start_video_recording_channel = fl_basic_message_channel_new(messenger, start_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(start_video_recording_channel, nullptr, nullptr, nullptr); + g_autofree gchar* stop_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) stop_video_recording_channel = fl_basic_message_channel_new(messenger, stop_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(stop_video_recording_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_exposure_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposureMode%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_exposure_mode_channel = fl_basic_message_channel_new(messenger, set_exposure_mode_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_exposure_mode_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_focus_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFocusMode%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_focus_mode_channel = fl_basic_message_channel_new(messenger, set_focus_mode_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_focus_mode_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_image_format_group_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFormatGroup%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_image_format_group_channel = fl_basic_message_channel_new(messenger, set_image_format_group_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_image_format_group_channel, nullptr, nullptr, nullptr); +} + +void camera_linux_camera_api_respond_get_available_cameras_names(CameraLinuxCameraApiResponseHandle* response_handle, FlValue* return_value) { + g_autoptr(CameraLinuxCameraApiGetAvailableCamerasNamesResponse) response = camera_linux_camera_api_get_available_cameras_names_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getAvailableCamerasNames", error->message); + } +} + +void camera_linux_camera_api_respond_error_get_available_cameras_names(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiGetAvailableCamerasNamesResponse) response = camera_linux_camera_api_get_available_cameras_names_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getAvailableCamerasNames", error->message); + } +} + +void camera_linux_camera_api_respond_create(CameraLinuxCameraApiResponseHandle* response_handle, int64_t return_value) { + g_autoptr(CameraLinuxCameraApiCreateResponse) response = camera_linux_camera_api_create_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "create", error->message); + } +} + +void camera_linux_camera_api_respond_error_create(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiCreateResponse) response = camera_linux_camera_api_create_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "create", error->message); + } +} + +void camera_linux_camera_api_respond_initialize(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiInitializeResponse) response = camera_linux_camera_api_initialize_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "initialize", error->message); + } +} + +void camera_linux_camera_api_respond_error_initialize(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiInitializeResponse) response = camera_linux_camera_api_initialize_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "initialize", error->message); + } +} + +void camera_linux_camera_api_respond_get_texture_id(CameraLinuxCameraApiResponseHandle* response_handle, int64_t* return_value) { + g_autoptr(CameraLinuxCameraApiGetTextureIdResponse) response = camera_linux_camera_api_get_texture_id_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getTextureId", error->message); + } +} + +void camera_linux_camera_api_respond_error_get_texture_id(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiGetTextureIdResponse) response = camera_linux_camera_api_get_texture_id_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getTextureId", error->message); + } +} + +void camera_linux_camera_api_respond_dispose(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiDisposeResponse) response = camera_linux_camera_api_dispose_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "dispose", error->message); + } +} + +void camera_linux_camera_api_respond_error_dispose(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiDisposeResponse) response = camera_linux_camera_api_dispose_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "dispose", error->message); + } +} + +void camera_linux_camera_api_respond_take_picture(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiTakePictureResponse) response = camera_linux_camera_api_take_picture_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "takePicture", error->message); + } +} + +void camera_linux_camera_api_respond_error_take_picture(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiTakePictureResponse) response = camera_linux_camera_api_take_picture_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "takePicture", error->message); + } +} + +void camera_linux_camera_api_respond_start_video_recording(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiStartVideoRecordingResponse) response = camera_linux_camera_api_start_video_recording_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "startVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_error_start_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiStartVideoRecordingResponse) response = camera_linux_camera_api_start_video_recording_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "startVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_stop_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* return_value) { + g_autoptr(CameraLinuxCameraApiStopVideoRecordingResponse) response = camera_linux_camera_api_stop_video_recording_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "stopVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_error_stop_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiStopVideoRecordingResponse) response = camera_linux_camera_api_stop_video_recording_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "stopVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_set_exposure_mode(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetExposureModeResponse) response = camera_linux_camera_api_set_exposure_mode_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposureMode", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_exposure_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetExposureModeResponse) response = camera_linux_camera_api_set_exposure_mode_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposureMode", error->message); + } +} + +void camera_linux_camera_api_respond_set_focus_mode(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetFocusModeResponse) response = camera_linux_camera_api_set_focus_mode_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFocusMode", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_focus_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetFocusModeResponse) response = camera_linux_camera_api_set_focus_mode_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFocusMode", error->message); + } +} + +void camera_linux_camera_api_respond_set_image_format_group(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetImageFormatGroupResponse) response = camera_linux_camera_api_set_image_format_group_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setImageFormatGroup", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_image_format_group(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetImageFormatGroupResponse) response = camera_linux_camera_api_set_image_format_group_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setImageFormatGroup", error->message); + } +} + +struct _CameraLinuxCameraEventApi { + GObject parent_instance; + + FlBinaryMessenger* messenger; + gchar *suffix; +}; + +G_DEFINE_TYPE(CameraLinuxCameraEventApi, camera_linux_camera_event_api, G_TYPE_OBJECT) + +static void camera_linux_camera_event_api_dispose(GObject* object) { + CameraLinuxCameraEventApi* self = CAMERA_LINUX_CAMERA_EVENT_API(object); + g_clear_object(&self->messenger); + g_clear_pointer(&self->suffix, g_free); + G_OBJECT_CLASS(camera_linux_camera_event_api_parent_class)->dispose(object); +} + +static void camera_linux_camera_event_api_init(CameraLinuxCameraEventApi* self) { +} + +static void camera_linux_camera_event_api_class_init(CameraLinuxCameraEventApiClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_event_api_dispose; +} + +CameraLinuxCameraEventApi* camera_linux_camera_event_api_new(FlBinaryMessenger* messenger, const gchar* suffix) { + CameraLinuxCameraEventApi* self = CAMERA_LINUX_CAMERA_EVENT_API(g_object_new(camera_linux_camera_event_api_get_type(), nullptr)); + self->messenger = FL_BINARY_MESSENGER(g_object_ref(messenger)); + self->suffix = suffix != nullptr ? g_strdup_printf(".%s", suffix) : g_strdup(""); + return self; +} + +struct _CameraLinuxCameraEventApiInitializedResponse { + GObject parent_instance; + + FlValue* error; +}; + +G_DEFINE_TYPE(CameraLinuxCameraEventApiInitializedResponse, camera_linux_camera_event_api_initialized_response, G_TYPE_OBJECT) + +static void camera_linux_camera_event_api_initialized_response_dispose(GObject* object) { + CameraLinuxCameraEventApiInitializedResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_INITIALIZED_RESPONSE(object); + g_clear_pointer(&self->error, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_event_api_initialized_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_event_api_initialized_response_init(CameraLinuxCameraEventApiInitializedResponse* self) { +} + +static void camera_linux_camera_event_api_initialized_response_class_init(CameraLinuxCameraEventApiInitializedResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_event_api_initialized_response_dispose; +} + +static CameraLinuxCameraEventApiInitializedResponse* camera_linux_camera_event_api_initialized_response_new(FlValue* response) { + CameraLinuxCameraEventApiInitializedResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_INITIALIZED_RESPONSE(g_object_new(camera_linux_camera_event_api_initialized_response_get_type(), nullptr)); + if (fl_value_get_length(response) > 1) { + self->error = fl_value_ref(response); + } + return self; +} + +gboolean camera_linux_camera_event_api_initialized_response_is_error(CameraLinuxCameraEventApiInitializedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_INITIALIZED_RESPONSE(self), FALSE); + return self->error != nullptr; +} + +const gchar* camera_linux_camera_event_api_initialized_response_get_error_code(CameraLinuxCameraEventApiInitializedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_INITIALIZED_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_initialized_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 0)); +} + +const gchar* camera_linux_camera_event_api_initialized_response_get_error_message(CameraLinuxCameraEventApiInitializedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_INITIALIZED_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_initialized_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 1)); +} + +FlValue* camera_linux_camera_event_api_initialized_response_get_error_details(CameraLinuxCameraEventApiInitializedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_INITIALIZED_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_initialized_response_is_error(self)); + return fl_value_get_list_value(self->error, 2); +} + +static void camera_linux_camera_event_api_initialized_cb(GObject* object, GAsyncResult* result, gpointer user_data) { + GTask* task = G_TASK(user_data); + g_task_return_pointer(task, result, g_object_unref); +} + +void camera_linux_camera_event_api_initialized(CameraLinuxCameraEventApi* self, CameraLinuxPlatformCameraState* initial_state, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data) { + g_autoptr(FlValue) args = fl_value_new_list(); + fl_value_append_take(args, fl_value_new_custom_object(136, G_OBJECT(initial_state))); + g_autofree gchar* channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraEventApi.initialized%s", self->suffix); + g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); + FlBasicMessageChannel* channel = fl_basic_message_channel_new(self->messenger, channel_name, FL_MESSAGE_CODEC(codec)); + GTask* task = g_task_new(self, cancellable, callback, user_data); + g_task_set_task_data(task, channel, g_object_unref); + fl_basic_message_channel_send(channel, args, cancellable, camera_linux_camera_event_api_initialized_cb, task); +} + +CameraLinuxCameraEventApiInitializedResponse* camera_linux_camera_event_api_initialized_finish(CameraLinuxCameraEventApi* self, GAsyncResult* result, GError** error) { + g_autoptr(GTask) task = G_TASK(result); + GAsyncResult* r = G_ASYNC_RESULT(g_task_propagate_pointer(task, nullptr)); + FlBasicMessageChannel* channel = FL_BASIC_MESSAGE_CHANNEL(g_task_get_task_data(task)); + g_autoptr(FlValue) response = fl_basic_message_channel_send_finish(channel, r, error); + if (response == nullptr) { + return nullptr; + } + return camera_linux_camera_event_api_initialized_response_new(response); +} + +struct _CameraLinuxCameraEventApiTextureIdResponse { + GObject parent_instance; + + FlValue* error; +}; + +G_DEFINE_TYPE(CameraLinuxCameraEventApiTextureIdResponse, camera_linux_camera_event_api_texture_id_response, G_TYPE_OBJECT) + +static void camera_linux_camera_event_api_texture_id_response_dispose(GObject* object) { + CameraLinuxCameraEventApiTextureIdResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(object); + g_clear_pointer(&self->error, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_event_api_texture_id_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_event_api_texture_id_response_init(CameraLinuxCameraEventApiTextureIdResponse* self) { +} + +static void camera_linux_camera_event_api_texture_id_response_class_init(CameraLinuxCameraEventApiTextureIdResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_event_api_texture_id_response_dispose; +} + +static CameraLinuxCameraEventApiTextureIdResponse* camera_linux_camera_event_api_texture_id_response_new(FlValue* response) { + CameraLinuxCameraEventApiTextureIdResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(g_object_new(camera_linux_camera_event_api_texture_id_response_get_type(), nullptr)); + if (fl_value_get_length(response) > 1) { + self->error = fl_value_ref(response); + } + return self; +} + +gboolean camera_linux_camera_event_api_texture_id_response_is_error(CameraLinuxCameraEventApiTextureIdResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(self), FALSE); + return self->error != nullptr; +} + +const gchar* camera_linux_camera_event_api_texture_id_response_get_error_code(CameraLinuxCameraEventApiTextureIdResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_texture_id_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 0)); +} + +const gchar* camera_linux_camera_event_api_texture_id_response_get_error_message(CameraLinuxCameraEventApiTextureIdResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_texture_id_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 1)); +} + +FlValue* camera_linux_camera_event_api_texture_id_response_get_error_details(CameraLinuxCameraEventApiTextureIdResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_texture_id_response_is_error(self)); + return fl_value_get_list_value(self->error, 2); +} + +static void camera_linux_camera_event_api_texture_id_cb(GObject* object, GAsyncResult* result, gpointer user_data) { + GTask* task = G_TASK(user_data); + g_task_return_pointer(task, result, g_object_unref); +} + +void camera_linux_camera_event_api_texture_id(CameraLinuxCameraEventApi* self, int64_t texture_id, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data) { + g_autoptr(FlValue) args = fl_value_new_list(); + fl_value_append_take(args, fl_value_new_int(texture_id)); + g_autofree gchar* channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraEventApi.textureId%s", self->suffix); + g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); + FlBasicMessageChannel* channel = fl_basic_message_channel_new(self->messenger, channel_name, FL_MESSAGE_CODEC(codec)); + GTask* task = g_task_new(self, cancellable, callback, user_data); + g_task_set_task_data(task, channel, g_object_unref); + fl_basic_message_channel_send(channel, args, cancellable, camera_linux_camera_event_api_texture_id_cb, task); +} + +CameraLinuxCameraEventApiTextureIdResponse* camera_linux_camera_event_api_texture_id_finish(CameraLinuxCameraEventApi* self, GAsyncResult* result, GError** error) { + g_autoptr(GTask) task = G_TASK(result); + GAsyncResult* r = G_ASYNC_RESULT(g_task_propagate_pointer(task, nullptr)); + FlBasicMessageChannel* channel = FL_BASIC_MESSAGE_CHANNEL(g_task_get_task_data(task)); + g_autoptr(FlValue) response = fl_basic_message_channel_send_finish(channel, r, error); + if (response == nullptr) { + return nullptr; + } + return camera_linux_camera_event_api_texture_id_response_new(response); +} + +struct _CameraLinuxCameraEventApiErrorResponse { + GObject parent_instance; + + FlValue* error; +}; + +G_DEFINE_TYPE(CameraLinuxCameraEventApiErrorResponse, camera_linux_camera_event_api_error_response, G_TYPE_OBJECT) + +static void camera_linux_camera_event_api_error_response_dispose(GObject* object) { + CameraLinuxCameraEventApiErrorResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_ERROR_RESPONSE(object); + g_clear_pointer(&self->error, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_event_api_error_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_event_api_error_response_init(CameraLinuxCameraEventApiErrorResponse* self) { +} + +static void camera_linux_camera_event_api_error_response_class_init(CameraLinuxCameraEventApiErrorResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_event_api_error_response_dispose; +} + +static CameraLinuxCameraEventApiErrorResponse* camera_linux_camera_event_api_error_response_new(FlValue* response) { + CameraLinuxCameraEventApiErrorResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_ERROR_RESPONSE(g_object_new(camera_linux_camera_event_api_error_response_get_type(), nullptr)); + if (fl_value_get_length(response) > 1) { + self->error = fl_value_ref(response); + } + return self; +} + +gboolean camera_linux_camera_event_api_error_response_is_error(CameraLinuxCameraEventApiErrorResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_ERROR_RESPONSE(self), FALSE); + return self->error != nullptr; +} + +const gchar* camera_linux_camera_event_api_error_response_get_error_code(CameraLinuxCameraEventApiErrorResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_ERROR_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_error_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 0)); +} + +const gchar* camera_linux_camera_event_api_error_response_get_error_message(CameraLinuxCameraEventApiErrorResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_ERROR_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_error_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 1)); +} + +FlValue* camera_linux_camera_event_api_error_response_get_error_details(CameraLinuxCameraEventApiErrorResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_ERROR_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_error_response_is_error(self)); + return fl_value_get_list_value(self->error, 2); +} + +static void camera_linux_camera_event_api_error_cb(GObject* object, GAsyncResult* result, gpointer user_data) { + GTask* task = G_TASK(user_data); + g_task_return_pointer(task, result, g_object_unref); +} + +void camera_linux_camera_event_api_error(CameraLinuxCameraEventApi* self, const gchar* message, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data) { + g_autoptr(FlValue) args = fl_value_new_list(); + fl_value_append_take(args, fl_value_new_string(message)); + g_autofree gchar* channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraEventApi.error%s", self->suffix); + g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); + FlBasicMessageChannel* channel = fl_basic_message_channel_new(self->messenger, channel_name, FL_MESSAGE_CODEC(codec)); + GTask* task = g_task_new(self, cancellable, callback, user_data); + g_task_set_task_data(task, channel, g_object_unref); + fl_basic_message_channel_send(channel, args, cancellable, camera_linux_camera_event_api_error_cb, task); +} + +CameraLinuxCameraEventApiErrorResponse* camera_linux_camera_event_api_error_finish(CameraLinuxCameraEventApi* self, GAsyncResult* result, GError** error) { + g_autoptr(GTask) task = G_TASK(result); + GAsyncResult* r = G_ASYNC_RESULT(g_task_propagate_pointer(task, nullptr)); + FlBasicMessageChannel* channel = FL_BASIC_MESSAGE_CHANNEL(g_task_get_task_data(task)); + g_autoptr(FlValue) response = fl_basic_message_channel_send_finish(channel, r, error); + if (response == nullptr) { + return nullptr; + } + return camera_linux_camera_event_api_error_response_new(response); +} diff --git a/packages/camera/camera_linux/linux/messages.g.h b/packages/camera/camera_linux/linux/messages.g.h new file mode 100644 index 000000000000..e84a5660ef81 --- /dev/null +++ b/packages/camera/camera_linux/linux/messages.g.h @@ -0,0 +1,727 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// Autogenerated from Pigeon (v22.7.4), do not edit directly. +// See also: https://pub.dev/packages/pigeon + +#ifndef PIGEON_MESSAGES_G_H_ +#define PIGEON_MESSAGES_G_H_ + +#include + +G_BEGIN_DECLS + +/** + * CameraLinuxPlatformDeviceOrientation: + * CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_PORTRAIT_UP: + * CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_LANDSCAPE_LEFT: + * CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_PORTRAIT_DOWN: + * CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_LANDSCAPE_RIGHT: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_PORTRAIT_UP = 0, + CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_LANDSCAPE_LEFT = 1, + CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_PORTRAIT_DOWN = 2, + CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_LANDSCAPE_RIGHT = 3 +} CameraLinuxPlatformDeviceOrientation; + +/** + * CameraLinuxPlatformExposureMode: + * CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_AUTO: + * CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_LOCKED: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_AUTO = 0, + CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_LOCKED = 1 +} CameraLinuxPlatformExposureMode; + +/** + * CameraLinuxPlatformFlashMode: + * CAMERA_LINUX_PLATFORM_FLASH_MODE_OFF: + * CAMERA_LINUX_PLATFORM_FLASH_MODE_AUTO: + * CAMERA_LINUX_PLATFORM_FLASH_MODE_ALWAYS: + * CAMERA_LINUX_PLATFORM_FLASH_MODE_TORCH: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_FLASH_MODE_OFF = 0, + CAMERA_LINUX_PLATFORM_FLASH_MODE_AUTO = 1, + CAMERA_LINUX_PLATFORM_FLASH_MODE_ALWAYS = 2, + CAMERA_LINUX_PLATFORM_FLASH_MODE_TORCH = 3 +} CameraLinuxPlatformFlashMode; + +/** + * CameraLinuxPlatformFocusMode: + * CAMERA_LINUX_PLATFORM_FOCUS_MODE_AUTO: + * CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_FOCUS_MODE_AUTO = 0, + CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED = 1 +} CameraLinuxPlatformFocusMode; + +/** + * CameraLinuxPlatformImageFormatGroup: + * CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_RGB8: + * CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_MONO8: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_RGB8 = 0, + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_MONO8 = 1 +} CameraLinuxPlatformImageFormatGroup; + +/** + * CameraLinuxPlatformResolutionPreset: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_LOW: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MEDIUM: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_HIGH: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_VERY_HIGH: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_ULTRA_HIGH: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MAX: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_LOW = 0, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MEDIUM = 1, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_HIGH = 2, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_VERY_HIGH = 3, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_ULTRA_HIGH = 4, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MAX = 5 +} CameraLinuxPlatformResolutionPreset; + +/** + * CameraLinuxPlatformSize: + * + */ + +G_DECLARE_FINAL_TYPE(CameraLinuxPlatformSize, camera_linux_platform_size, CAMERA_LINUX, PLATFORM_SIZE, GObject) + +/** + * camera_linux_platform_size_new: + * width: field in this object. + * height: field in this object. + * + * Creates a new #PlatformSize object. + * + * Returns: a new #CameraLinuxPlatformSize + */ +CameraLinuxPlatformSize* camera_linux_platform_size_new(double width, double height); + +/** + * camera_linux_platform_size_get_width + * @object: a #CameraLinuxPlatformSize. + * + * Gets the value of the width field of @object. + * + * Returns: the field value. + */ +double camera_linux_platform_size_get_width(CameraLinuxPlatformSize* object); + +/** + * camera_linux_platform_size_get_height + * @object: a #CameraLinuxPlatformSize. + * + * Gets the value of the height field of @object. + * + * Returns: the field value. + */ +double camera_linux_platform_size_get_height(CameraLinuxPlatformSize* object); + +/** + * CameraLinuxPlatformCameraState: + * + */ + +G_DECLARE_FINAL_TYPE(CameraLinuxPlatformCameraState, camera_linux_platform_camera_state, CAMERA_LINUX, PLATFORM_CAMERA_STATE, GObject) + +/** + * camera_linux_platform_camera_state_new: + * preview_size: field in this object. + * exposure_mode: field in this object. + * focus_mode: field in this object. + * exposure_point_supported: field in this object. + * focus_point_supported: field in this object. + * + * Creates a new #PlatformCameraState object. + * + * Returns: a new #CameraLinuxPlatformCameraState + */ +CameraLinuxPlatformCameraState* camera_linux_platform_camera_state_new(CameraLinuxPlatformSize* preview_size, CameraLinuxPlatformExposureMode exposure_mode, CameraLinuxPlatformFocusMode focus_mode, gboolean exposure_point_supported, gboolean focus_point_supported); + +/** + * camera_linux_platform_camera_state_get_preview_size + * @object: a #CameraLinuxPlatformCameraState. + * + * The size of the preview, in pixels. + * + * Returns: the field value. + */ +CameraLinuxPlatformSize* camera_linux_platform_camera_state_get_preview_size(CameraLinuxPlatformCameraState* object); + +/** + * camera_linux_platform_camera_state_get_exposure_mode + * @object: a #CameraLinuxPlatformCameraState. + * + * The default exposure mode + * + * Returns: the field value. + */ +CameraLinuxPlatformExposureMode camera_linux_platform_camera_state_get_exposure_mode(CameraLinuxPlatformCameraState* object); + +/** + * camera_linux_platform_camera_state_get_focus_mode + * @object: a #CameraLinuxPlatformCameraState. + * + * The default focus mode + * + * Returns: the field value. + */ +CameraLinuxPlatformFocusMode camera_linux_platform_camera_state_get_focus_mode(CameraLinuxPlatformCameraState* object); + +/** + * camera_linux_platform_camera_state_get_exposure_point_supported + * @object: a #CameraLinuxPlatformCameraState. + * + * Whether setting exposure points is supported. + * + * Returns: the field value. + */ +gboolean camera_linux_platform_camera_state_get_exposure_point_supported(CameraLinuxPlatformCameraState* object); + +/** + * camera_linux_platform_camera_state_get_focus_point_supported + * @object: a #CameraLinuxPlatformCameraState. + * + * Whether setting focus points is supported. + * + * Returns: the field value. + */ +gboolean camera_linux_platform_camera_state_get_focus_point_supported(CameraLinuxPlatformCameraState* object); + +/** + * CameraLinuxPlatformPoint: + * + */ + +G_DECLARE_FINAL_TYPE(CameraLinuxPlatformPoint, camera_linux_platform_point, CAMERA_LINUX, PLATFORM_POINT, GObject) + +/** + * camera_linux_platform_point_new: + * x: field in this object. + * y: field in this object. + * + * Creates a new #PlatformPoint object. + * + * Returns: a new #CameraLinuxPlatformPoint + */ +CameraLinuxPlatformPoint* camera_linux_platform_point_new(double x, double y); + +/** + * camera_linux_platform_point_get_x + * @object: a #CameraLinuxPlatformPoint. + * + * Gets the value of the x field of @object. + * + * Returns: the field value. + */ +double camera_linux_platform_point_get_x(CameraLinuxPlatformPoint* object); + +/** + * camera_linux_platform_point_get_y + * @object: a #CameraLinuxPlatformPoint. + * + * Gets the value of the y field of @object. + * + * Returns: the field value. + */ +double camera_linux_platform_point_get_y(CameraLinuxPlatformPoint* object); + +G_DECLARE_FINAL_TYPE(CameraLinuxMessageCodec, camera_linux_message_codec, CAMERA_LINUX, MESSAGE_CODEC, FlStandardMessageCodec) + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApi, camera_linux_camera_api, CAMERA_LINUX, CAMERA_API, GObject) + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiResponseHandle, camera_linux_camera_api_response_handle, CAMERA_LINUX, CAMERA_API_RESPONSE_HANDLE, GObject) + +/** + * CameraLinuxCameraApiVTable: + * + * Table of functions exposed by CameraApi to be implemented by the API provider. + */ +typedef struct { + void (*get_available_cameras_names)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*create)(const gchar* camera_name, CameraLinuxPlatformResolutionPreset resolution_preset, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*initialize)(int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*get_texture_id)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*dispose)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*take_picture)(int64_t camera_id, const gchar* path, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*start_video_recording)(int64_t camera_id, const gchar* path, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*stop_video_recording)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_exposure_mode)(int64_t camera_id, CameraLinuxPlatformExposureMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_focus_mode)(int64_t camera_id, CameraLinuxPlatformFocusMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_image_format_group)(int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format_group, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); +} CameraLinuxCameraApiVTable; + +/** + * camera_linux_camera_api_set_method_handlers: + * + * @messenger: an #FlBinaryMessenger. + * @suffix: (allow-none): a suffix to add to the API or %NULL for none. + * @vtable: implementations of the methods in this API. + * @user_data: (closure): user data to pass to the functions in @vtable. + * @user_data_free_func: (allow-none): a function which gets called to free @user_data, or %NULL. + * + * Connects the method handlers in the CameraApi API. + */ +void camera_linux_camera_api_set_method_handlers(FlBinaryMessenger* messenger, const gchar* suffix, const CameraLinuxCameraApiVTable* vtable, gpointer user_data, GDestroyNotify user_data_free_func); + +/** + * camera_linux_camera_api_clear_method_handlers: + * + * @messenger: an #FlBinaryMessenger. + * @suffix: (allow-none): a suffix to add to the API or %NULL for none. + * + * Clears the method handlers in the CameraApi API. + */ +void camera_linux_camera_api_clear_method_handlers(FlBinaryMessenger* messenger, const gchar* suffix); + +/** + * camera_linux_camera_api_respond_get_available_cameras_names: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.getAvailableCamerasNames. + */ +void camera_linux_camera_api_respond_get_available_cameras_names(CameraLinuxCameraApiResponseHandle* response_handle, FlValue* return_value); + +/** + * camera_linux_camera_api_respond_error_get_available_cameras_names: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.getAvailableCamerasNames. + */ +void camera_linux_camera_api_respond_error_get_available_cameras_names(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_create: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.create. + */ +void camera_linux_camera_api_respond_create(CameraLinuxCameraApiResponseHandle* response_handle, int64_t return_value); + +/** + * camera_linux_camera_api_respond_error_create: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.create. + */ +void camera_linux_camera_api_respond_error_create(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_initialize: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.initialize. + */ +void camera_linux_camera_api_respond_initialize(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_initialize: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.initialize. + */ +void camera_linux_camera_api_respond_error_initialize(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_get_texture_id: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.getTextureId. + */ +void camera_linux_camera_api_respond_get_texture_id(CameraLinuxCameraApiResponseHandle* response_handle, int64_t* return_value); + +/** + * camera_linux_camera_api_respond_error_get_texture_id: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.getTextureId. + */ +void camera_linux_camera_api_respond_error_get_texture_id(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_dispose: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.dispose. + */ +void camera_linux_camera_api_respond_dispose(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_dispose: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.dispose. + */ +void camera_linux_camera_api_respond_error_dispose(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_take_picture: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.takePicture. + */ +void camera_linux_camera_api_respond_take_picture(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_take_picture: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.takePicture. + */ +void camera_linux_camera_api_respond_error_take_picture(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_start_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.startVideoRecording. + */ +void camera_linux_camera_api_respond_start_video_recording(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_start_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.startVideoRecording. + */ +void camera_linux_camera_api_respond_error_start_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_stop_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.stopVideoRecording. + */ +void camera_linux_camera_api_respond_stop_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* return_value); + +/** + * camera_linux_camera_api_respond_error_stop_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.stopVideoRecording. + */ +void camera_linux_camera_api_respond_error_stop_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_exposure_mode: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setExposureMode. + */ +void camera_linux_camera_api_respond_set_exposure_mode(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_exposure_mode: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setExposureMode. + */ +void camera_linux_camera_api_respond_error_set_exposure_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_focus_mode: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setFocusMode. + */ +void camera_linux_camera_api_respond_set_focus_mode(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_focus_mode: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setFocusMode. + */ +void camera_linux_camera_api_respond_error_set_focus_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_image_format_group: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setImageFormatGroup. + */ +void camera_linux_camera_api_respond_set_image_format_group(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_image_format_group: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setImageFormatGroup. + */ +void camera_linux_camera_api_respond_error_set_image_format_group(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraEventApiInitializedResponse, camera_linux_camera_event_api_initialized_response, CAMERA_LINUX, CAMERA_EVENT_API_INITIALIZED_RESPONSE, GObject) + +/** + * camera_linux_camera_event_api_initialized_response_is_error: + * @response: a #CameraLinuxCameraEventApiInitializedResponse. + * + * Checks if a response to CameraEventApi.initialized is an error. + * + * Returns: a %TRUE if this response is an error. + */ +gboolean camera_linux_camera_event_api_initialized_response_is_error(CameraLinuxCameraEventApiInitializedResponse* response); + +/** + * camera_linux_camera_event_api_initialized_response_get_error_code: + * @response: a #CameraLinuxCameraEventApiInitializedResponse. + * + * Get the error code for this response. + * + * Returns: an error code or %NULL if not an error. + */ +const gchar* camera_linux_camera_event_api_initialized_response_get_error_code(CameraLinuxCameraEventApiInitializedResponse* response); + +/** + * camera_linux_camera_event_api_initialized_response_get_error_message: + * @response: a #CameraLinuxCameraEventApiInitializedResponse. + * + * Get the error message for this response. + * + * Returns: an error message. + */ +const gchar* camera_linux_camera_event_api_initialized_response_get_error_message(CameraLinuxCameraEventApiInitializedResponse* response); + +/** + * camera_linux_camera_event_api_initialized_response_get_error_details: + * @response: a #CameraLinuxCameraEventApiInitializedResponse. + * + * Get the error details for this response. + * + * Returns: (allow-none): an error details or %NULL. + */ +FlValue* camera_linux_camera_event_api_initialized_response_get_error_details(CameraLinuxCameraEventApiInitializedResponse* response); + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraEventApiTextureIdResponse, camera_linux_camera_event_api_texture_id_response, CAMERA_LINUX, CAMERA_EVENT_API_TEXTURE_ID_RESPONSE, GObject) + +/** + * camera_linux_camera_event_api_texture_id_response_is_error: + * @response: a #CameraLinuxCameraEventApiTextureIdResponse. + * + * Checks if a response to CameraEventApi.textureId is an error. + * + * Returns: a %TRUE if this response is an error. + */ +gboolean camera_linux_camera_event_api_texture_id_response_is_error(CameraLinuxCameraEventApiTextureIdResponse* response); + +/** + * camera_linux_camera_event_api_texture_id_response_get_error_code: + * @response: a #CameraLinuxCameraEventApiTextureIdResponse. + * + * Get the error code for this response. + * + * Returns: an error code or %NULL if not an error. + */ +const gchar* camera_linux_camera_event_api_texture_id_response_get_error_code(CameraLinuxCameraEventApiTextureIdResponse* response); + +/** + * camera_linux_camera_event_api_texture_id_response_get_error_message: + * @response: a #CameraLinuxCameraEventApiTextureIdResponse. + * + * Get the error message for this response. + * + * Returns: an error message. + */ +const gchar* camera_linux_camera_event_api_texture_id_response_get_error_message(CameraLinuxCameraEventApiTextureIdResponse* response); + +/** + * camera_linux_camera_event_api_texture_id_response_get_error_details: + * @response: a #CameraLinuxCameraEventApiTextureIdResponse. + * + * Get the error details for this response. + * + * Returns: (allow-none): an error details or %NULL. + */ +FlValue* camera_linux_camera_event_api_texture_id_response_get_error_details(CameraLinuxCameraEventApiTextureIdResponse* response); + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraEventApiErrorResponse, camera_linux_camera_event_api_error_response, CAMERA_LINUX, CAMERA_EVENT_API_ERROR_RESPONSE, GObject) + +/** + * camera_linux_camera_event_api_error_response_is_error: + * @response: a #CameraLinuxCameraEventApiErrorResponse. + * + * Checks if a response to CameraEventApi.error is an error. + * + * Returns: a %TRUE if this response is an error. + */ +gboolean camera_linux_camera_event_api_error_response_is_error(CameraLinuxCameraEventApiErrorResponse* response); + +/** + * camera_linux_camera_event_api_error_response_get_error_code: + * @response: a #CameraLinuxCameraEventApiErrorResponse. + * + * Get the error code for this response. + * + * Returns: an error code or %NULL if not an error. + */ +const gchar* camera_linux_camera_event_api_error_response_get_error_code(CameraLinuxCameraEventApiErrorResponse* response); + +/** + * camera_linux_camera_event_api_error_response_get_error_message: + * @response: a #CameraLinuxCameraEventApiErrorResponse. + * + * Get the error message for this response. + * + * Returns: an error message. + */ +const gchar* camera_linux_camera_event_api_error_response_get_error_message(CameraLinuxCameraEventApiErrorResponse* response); + +/** + * camera_linux_camera_event_api_error_response_get_error_details: + * @response: a #CameraLinuxCameraEventApiErrorResponse. + * + * Get the error details for this response. + * + * Returns: (allow-none): an error details or %NULL. + */ +FlValue* camera_linux_camera_event_api_error_response_get_error_details(CameraLinuxCameraEventApiErrorResponse* response); + +/** + * CameraLinuxCameraEventApi: + * + * Handler for native callbacks that are tied to a specific camera ID. + * + * This is intended to be initialized with the camera ID as a suffix. + */ + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraEventApi, camera_linux_camera_event_api, CAMERA_LINUX, CAMERA_EVENT_API, GObject) + +/** + * camera_linux_camera_event_api_new: + * @messenger: an #FlBinaryMessenger. + * @suffix: (allow-none): a suffix to add to the API or %NULL for none. + * + * Creates a new object to access the CameraEventApi API. + * + * Returns: a new #CameraLinuxCameraEventApi + */ +CameraLinuxCameraEventApi* camera_linux_camera_event_api_new(FlBinaryMessenger* messenger, const gchar* suffix); + +/** + * camera_linux_camera_event_api_initialized: + * @api: a #CameraLinuxCameraEventApi. + * @initial_state: parameter for this method. + * @cancellable: (allow-none): a #GCancellable or %NULL. + * @callback: (scope async): (allow-none): a #GAsyncReadyCallback to call when the call is complete or %NULL to ignore the response. + * @user_data: (closure): user data to pass to @callback. + * + * Called when the camera is inialitized for use. + */ +void camera_linux_camera_event_api_initialized(CameraLinuxCameraEventApi* api, CameraLinuxPlatformCameraState* initial_state, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data); + +/** + * camera_linux_camera_event_api_initialized_finish: + * @api: a #CameraLinuxCameraEventApi. + * @result: a #GAsyncResult. + * @error: (allow-none): #GError location to store the error occurring, or %NULL to ignore. + * + * Completes a camera_linux_camera_event_api_initialized() call. + * + * Returns: a #CameraLinuxCameraEventApiInitializedResponse or %NULL on error. + */ +CameraLinuxCameraEventApiInitializedResponse* camera_linux_camera_event_api_initialized_finish(CameraLinuxCameraEventApi* api, GAsyncResult* result, GError** error); + +/** + * camera_linux_camera_event_api_texture_id: + * @api: a #CameraLinuxCameraEventApi. + * @texture_id: parameter for this method. + * @cancellable: (allow-none): a #GCancellable or %NULL. + * @callback: (scope async): (allow-none): a #GAsyncReadyCallback to call when the call is complete or %NULL to ignore the response. + * @user_data: (closure): user data to pass to @callback. + * + */ +void camera_linux_camera_event_api_texture_id(CameraLinuxCameraEventApi* api, int64_t texture_id, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data); + +/** + * camera_linux_camera_event_api_texture_id_finish: + * @api: a #CameraLinuxCameraEventApi. + * @result: a #GAsyncResult. + * @error: (allow-none): #GError location to store the error occurring, or %NULL to ignore. + * + * Completes a camera_linux_camera_event_api_texture_id() call. + * + * Returns: a #CameraLinuxCameraEventApiTextureIdResponse or %NULL on error. + */ +CameraLinuxCameraEventApiTextureIdResponse* camera_linux_camera_event_api_texture_id_finish(CameraLinuxCameraEventApi* api, GAsyncResult* result, GError** error); + +/** + * camera_linux_camera_event_api_error: + * @api: a #CameraLinuxCameraEventApi. + * @message: parameter for this method. + * @cancellable: (allow-none): a #GCancellable or %NULL. + * @callback: (scope async): (allow-none): a #GAsyncReadyCallback to call when the call is complete or %NULL to ignore the response. + * @user_data: (closure): user data to pass to @callback. + * + * Called when an error occurs in the camera. + * + * This should be used for errors that occur outside of the context of + * handling a specific HostApi call, such as during streaming. + */ +void camera_linux_camera_event_api_error(CameraLinuxCameraEventApi* api, const gchar* message, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data); + +/** + * camera_linux_camera_event_api_error_finish: + * @api: a #CameraLinuxCameraEventApi. + * @result: a #GAsyncResult. + * @error: (allow-none): #GError location to store the error occurring, or %NULL to ignore. + * + * Completes a camera_linux_camera_event_api_error() call. + * + * Returns: a #CameraLinuxCameraEventApiErrorResponse or %NULL on error. + */ +CameraLinuxCameraEventApiErrorResponse* camera_linux_camera_event_api_error_finish(CameraLinuxCameraEventApi* api, GAsyncResult* result, GError** error); + +G_END_DECLS + +#endif // PIGEON_MESSAGES_G_H_ diff --git a/packages/camera/camera_linux/pigeons/copyright.txt b/packages/camera/camera_linux/pigeons/copyright.txt new file mode 100644 index 000000000000..1236b63caf3a --- /dev/null +++ b/packages/camera/camera_linux/pigeons/copyright.txt @@ -0,0 +1,3 @@ +Copyright 2013 The Flutter Authors. All rights reserved. +Use of this source code is governed by a BSD-style license that can be +found in the LICENSE file. diff --git a/packages/camera/camera_linux/pigeons/messages.dart b/packages/camera/camera_linux/pigeons/messages.dart new file mode 100644 index 000000000000..6f1ee783d31b --- /dev/null +++ b/packages/camera/camera_linux/pigeons/messages.dart @@ -0,0 +1,166 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import 'package:pigeon/pigeon.dart'; + +@ConfigurePigeon(PigeonOptions( + dartOut: 'lib/src/messages.g.dart', + gobjectHeaderOut: 'linux/messages.g.h', + gobjectSourceOut: 'linux/messages.g.cc', + gobjectOptions: GObjectOptions(), + copyrightHeader: 'pigeons/copyright.txt', +)) + +// Pigeon equivalent of CGSize. +class PlatformSize { + PlatformSize({required this.width, required this.height}); + + final double width; + final double height; +} + +// Pigeon version of DeviceOrientation. +enum PlatformDeviceOrientation { + portraitUp, + landscapeLeft, + portraitDown, + landscapeRight, +} + +// Pigeon version of ExposureMode. +enum PlatformExposureMode { + auto, + locked, +} + +// Pigeon version of FlashMode. +enum PlatformFlashMode { + off, + auto, + always, + torch, +} + +// Pigeon version of FocusMode. +enum PlatformFocusMode { + auto, + locked, +} + +// Pigeon version of the subset of ImageFormatGroup supported on iOS. +enum PlatformImageFormatGroup { + rgb8, + mono8, +} + +enum PlatformResolutionPreset { + low, // 352x288 on iOS, ~240p on Android and Web + medium, // ~480p + high, // ~720p + veryHigh, // ~1080p + ultraHigh, // ~2160p + max, // The highest resolution available. +} + +// Pigeon version of the data needed for a CameraInitializedEvent. +class PlatformCameraState { + PlatformCameraState({ + required this.previewSize, + required this.exposureMode, + required this.focusMode, + required this.exposurePointSupported, + required this.focusPointSupported, + }); + + /// The size of the preview, in pixels. + final PlatformSize previewSize; + + /// The default exposure mode + final PlatformExposureMode exposureMode; + + /// The default focus mode + final PlatformFocusMode focusMode; + + /// Whether setting exposure points is supported. + final bool exposurePointSupported; + + /// Whether setting focus points is supported. + final bool focusPointSupported; +} + +// Pigeon equivalent of CGPoint. +class PlatformPoint { + PlatformPoint({required this.x, required this.y}); + + final double x; + final double y; +} + +@HostApi() +abstract class CameraApi { + /// Returns the list of available cameras. + @async + List getAvailableCamerasNames(); + + /// Create a new camera with the given settings, and returns its ID. + @async + int create(String cameraName, PlatformResolutionPreset resolutionPreset); + + /// Initializes the camera with the given ID. + @async + void initialize(int cameraId, PlatformImageFormatGroup imageFormat); + + /// Get the texture ID for the camera with the given ID. + @async + int? getTextureId(int cameraId); + + /// Indicates that the given camera is no longer being used on the Dart side, + /// and any associated resources can be cleaned up. + @async + void dispose(int cameraId); + + /// Takes a picture with the current settings, and returns the path to the + /// resulting file. + @async + void takePicture(int cameraId, String path); + + /// Begins recording video, optionally enabling streaming to Dart at the same + /// time. + @async + void startVideoRecording(int cameraId, String path); + + /// Stops recording video, and results the path to the resulting file. + @async + String stopVideoRecording(int cameraId); + + /// Switches the camera to the given exposure mode. + @async + void setExposureMode(int cameraId, PlatformExposureMode mode); + + /// Switches the camera to the given focus mode. + @async + void setFocusMode(int cameraId, PlatformFocusMode mode); + + //Sets the ImageFormatGroup. + @async + void setImageFormatGroup( + int cameraId, PlatformImageFormatGroup imageFormatGroup); +} + +/// Handler for native callbacks that are tied to a specific camera ID. +/// +/// This is intended to be initialized with the camera ID as a suffix. +@FlutterApi() +abstract class CameraEventApi { + /// Called when the camera is inialitized for use. + void initialized(PlatformCameraState initialState); + + void textureId(int textureId); + + /// Called when an error occurs in the camera. + /// + /// This should be used for errors that occur outside of the context of + /// handling a specific HostApi call, such as during streaming. + void error(String message); +} diff --git a/packages/camera/camera_linux/pubspec.yaml b/packages/camera/camera_linux/pubspec.yaml new file mode 100644 index 000000000000..d3fdd73d210c --- /dev/null +++ b/packages/camera/camera_linux/pubspec.yaml @@ -0,0 +1,76 @@ +name: camera_linux +description: "Linux implementation of the camera plugin." +version: 0.0.1 + +environment: + sdk: ^3.5.4 + flutter: ">=3.3.0" + +dependencies: + flutter: + sdk: flutter + plugin_platform_interface: ^2.0.2 + camera_platform_interface: ^2.7.0 + stream_transform: ^2.1.1 + path_provider: ^2.1.5 + +dev_dependencies: + flutter_test: + sdk: flutter + flutter_lints: ^4.0.0 + pigeon: ^22.4.2 + +# For information on the generic Dart part of this file, see the +# following page: https://dart.dev/tools/pub/pubspec + +# The following section is specific to Flutter packages. +flutter: + # This section identifies this Flutter project as a plugin project. + # The 'pluginClass' specifies the class (in Java, Kotlin, Swift, Objective-C, etc.) + # which should be registered in the plugin registry. This is required for + # using method channels. + # The Android 'package' specifies package in which the registered class is. + # This is required for using method channels on Android. + # The 'ffiPlugin' specifies that native code should be built and bundled. + # This is required for using `dart:ffi`. + # All these are used by the tooling to maintain consistency when + # adding or updating assets for this project. + plugin: + platforms: + linux: + pluginClass: CameraPlugin + dartPluginClass: CameraLinux + + assets: + - deps/pylon-8.0.2.16314_linux-aarch64_setup.tar.gz + + # To add assets to your plugin package, add an assets section, like this: + # assets: + # - images/a_dot_burr.jpeg + # - images/a_dot_ham.jpeg + # + # For details regarding assets in packages, see + # https://flutter.dev/to/asset-from-package + # + # An image asset can refer to one or more resolution-specific "variants", see + # https://flutter.dev/to/resolution-aware-images + + # To add custom fonts to your plugin package, add a fonts section here, + # in this "flutter" section. Each entry in this list should have a + # "family" key with the font family name, and a "fonts" key with a + # list giving the asset and other descriptors for the font. For + # example: + # fonts: + # - family: Schyler + # fonts: + # - asset: fonts/Schyler-Regular.ttf + # - asset: fonts/Schyler-Italic.ttf + # style: italic + # - family: Trajan Pro + # fonts: + # - asset: fonts/TrajanPro.ttf + # - asset: fonts/TrajanPro_Bold.ttf + # weight: 700 + # + # For details regarding fonts in packages, see + # https://flutter.dev/to/font-from-package