From 41c5fcd626aeddfce26742c74ce09df0e544d87b Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Thu, 17 Oct 2024 22:38:25 -0400 Subject: [PATCH 01/21] Remove unnecessary files and update camera_avfoundation package --- .gitignore | 2 + flutter_packages.code-workspace | 16 +++ .../camera_avfoundation/example/lib/main.dart | 105 +++++++++--------- .../lib/src/avfoundation_camera.dart | 26 +++-- .../lib/src/messages.g.dart | 34 +++++- .../camera_avfoundation/pigeons/messages.dart | 21 +++- 6 files changed, 129 insertions(+), 75 deletions(-) create mode 100644 flutter_packages.code-workspace diff --git a/.gitignore b/.gitignore index c9dae8e685c2..430247c396c5 100644 --- a/.gitignore +++ b/.gitignore @@ -58,3 +58,5 @@ gradlew.bat .project .classpath .settings +packages/camera/camera_avfoundation/example/ios/Runner.xcworkspace/xcshareddata/swiftpm/Package.resolved +packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved diff --git a/flutter_packages.code-workspace b/flutter_packages.code-workspace new file mode 100644 index 000000000000..7c4259ae73f0 --- /dev/null +++ b/flutter_packages.code-workspace @@ -0,0 +1,16 @@ +{ + "folders": [ + { + "path": "." + }, + { + "path": "./packages/camera" + }, + { + "path": "./packages/camera/camera_avfoundation" + } + ], + "settings": { + "editor.quickSuggestions": false + } +} diff --git a/packages/camera/camera_avfoundation/example/lib/main.dart b/packages/camera/camera_avfoundation/example/lib/main.dart index f9576b96398f..2f9bc238bd4c 100644 --- a/packages/camera/camera_avfoundation/example/lib/main.dart +++ b/packages/camera/camera_avfoundation/example/lib/main.dart @@ -6,6 +6,7 @@ import 'dart:async'; import 'dart:io'; import 'dart:math'; +import 'package:camera_avfoundation/camera_avfoundation.dart'; import 'package:camera_platform_interface/camera_platform_interface.dart'; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; @@ -47,8 +48,7 @@ void _logError(String code, String? message) { print('Error: $code${message == null ? '' : '\nError Message: $message'}'); } -class _CameraExampleHomeState extends State - with WidgetsBindingObserver, TickerProviderStateMixin { +class _CameraExampleHomeState extends State with WidgetsBindingObserver, TickerProviderStateMixin { CameraController? controller; XFile? imageFile; XFile? videoFile; @@ -68,6 +68,7 @@ class _CameraExampleHomeState extends State double _maxAvailableZoom = 1.0; double _currentScale = 1.0; double _baseScale = 1.0; + double _lenPosition = 0.0; // Counting pointers (number of user fingers on screen) int _pointers = 0; @@ -151,6 +152,7 @@ class _CameraExampleHomeState extends State ), ), ), + _buildLensPositionSettings(), _captureControlRowWidget(), _modeControlRowWidget(), Padding( @@ -164,6 +166,37 @@ class _CameraExampleHomeState extends State ); } + Widget _buildLensPositionSettings() { + return ButtonBar( + layoutBehavior: ButtonBarLayoutBehavior.constrained, + alignment: MainAxisAlignment.center, + children: [ + ElevatedButton( + onPressed: () { + final AVFoundationCamera camera = CameraPlatform.instance as AVFoundationCamera; + setState(() { + _lenPosition = (_lenPosition + 0.1).clamp(0.0, 1.0); + }); + camera.setFocusMode(controller!.cameraId, FocusMode.locked); + camera.setLensPosition(_lenPosition); + }, + child: const Text('Increment Lens Position'), + ), + ElevatedButton( + onPressed: () { + final AVFoundationCamera camera = CameraPlatform.instance as AVFoundationCamera; + setState(() { + _lenPosition = (_lenPosition - 0.1).clamp(0.0, 1.0); + }); + camera.setFocusMode(controller!.cameraId, FocusMode.locked); + camera.setLensPosition(_lenPosition); + }, + child: const Text('Decrement Lens Position'), + ), + ], + ); + } + /// Display the preview from the camera (or a message if the preview is not available). Widget _cameraPreviewWidget() { final CameraController? cameraController = controller; @@ -281,9 +314,7 @@ class _CameraExampleHomeState extends State IconButton( icon: const Icon(Icons.exposure), color: Colors.blue, - onPressed: controller != null - ? onExposureModeButtonPressed - : null, + onPressed: controller != null ? onExposureModeButtonPressed : null, ), IconButton( icon: const Icon(Icons.filter_center_focus), @@ -306,9 +337,7 @@ class _CameraExampleHomeState extends State : Icons.screen_rotation, ), color: Colors.blue, - onPressed: controller != null - ? onCaptureOrientationLockButtonPressed - : null, + onPressed: controller != null ? onCaptureOrientationLockButtonPressed : null, ), ], ), @@ -328,39 +357,23 @@ class _CameraExampleHomeState extends State children: [ IconButton( icon: const Icon(Icons.flash_off), - color: controller?.value.flashMode == FlashMode.off - ? Colors.orange - : Colors.blue, - onPressed: controller != null - ? () => onSetFlashModeButtonPressed(FlashMode.off) - : null, + color: controller?.value.flashMode == FlashMode.off ? Colors.orange : Colors.blue, + onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.off) : null, ), IconButton( icon: const Icon(Icons.flash_auto), - color: controller?.value.flashMode == FlashMode.auto - ? Colors.orange - : Colors.blue, - onPressed: controller != null - ? () => onSetFlashModeButtonPressed(FlashMode.auto) - : null, + color: controller?.value.flashMode == FlashMode.auto ? Colors.orange : Colors.blue, + onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.auto) : null, ), IconButton( icon: const Icon(Icons.flash_on), - color: controller?.value.flashMode == FlashMode.always - ? Colors.orange - : Colors.blue, - onPressed: controller != null - ? () => onSetFlashModeButtonPressed(FlashMode.always) - : null, + color: controller?.value.flashMode == FlashMode.always ? Colors.orange : Colors.blue, + onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.always) : null, ), IconButton( icon: const Icon(Icons.highlight), - color: controller?.value.flashMode == FlashMode.torch - ? Colors.orange - : Colors.blue, - onPressed: controller != null - ? () => onSetFlashModeButtonPressed(FlashMode.torch) - : null, + color: controller?.value.flashMode == FlashMode.torch ? Colors.orange : Colors.blue, + onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.torch) : null, ), ], ), @@ -370,14 +383,10 @@ class _CameraExampleHomeState extends State Widget _exposureModeControlRowWidget() { final ButtonStyle styleAuto = TextButton.styleFrom( - foregroundColor: controller?.value.exposureMode == ExposureMode.auto - ? Colors.orange - : Colors.blue, + foregroundColor: controller?.value.exposureMode == ExposureMode.auto ? Colors.orange : Colors.blue, ); final ButtonStyle styleLocked = TextButton.styleFrom( - foregroundColor: controller?.value.exposureMode == ExposureMode.locked - ? Colors.orange - : Colors.blue, + foregroundColor: controller?.value.exposureMode == ExposureMode.locked ? Colors.orange : Colors.blue, ); return SizeTransition( @@ -419,9 +428,7 @@ class _CameraExampleHomeState extends State ), TextButton( style: styleLocked, - onPressed: controller != null - ? () => controller!.setExposureOffset(0.0) - : null, + onPressed: controller != null ? () => controller!.setExposureOffset(0.0) : null, child: const Text('RESET OFFSET'), ), ], @@ -454,14 +461,10 @@ class _CameraExampleHomeState extends State Widget _focusModeControlRowWidget() { final ButtonStyle styleAuto = TextButton.styleFrom( - foregroundColor: controller?.value.focusMode == FocusMode.auto - ? Colors.orange - : Colors.blue, + foregroundColor: controller?.value.focusMode == FocusMode.auto ? Colors.orange : Colors.blue, ); final ButtonStyle styleLocked = TextButton.styleFrom( - foregroundColor: controller?.value.focusMode == FocusMode.locked - ? Colors.orange - : Colors.blue, + foregroundColor: controller?.value.focusMode == FocusMode.locked ? Colors.orange : Colors.blue, ); return SizeTransition( @@ -477,9 +480,7 @@ class _CameraExampleHomeState extends State children: [ TextButton( style: styleAuto, - onPressed: controller != null - ? () => onSetFocusModeButtonPressed(FocusMode.auto) - : null, + onPressed: controller != null ? () => onSetFocusModeButtonPressed(FocusMode.auto) : null, onLongPress: () { if (controller != null) { CameraPlatform.instance.setFocusPoint( @@ -493,9 +494,7 @@ class _CameraExampleHomeState extends State ), TextButton( style: styleLocked, - onPressed: controller != null - ? () => onSetFocusModeButtonPressed(FocusMode.locked) - : null, + onPressed: controller != null ? () => onSetFocusModeButtonPressed(FocusMode.locked) : null, child: const Text('LOCKED'), ), ], diff --git a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart index 3907ed89219b..97649e00517b 100644 --- a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart +++ b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart @@ -1,4 +1,4 @@ -// Copyright 2013 The Flutter Authors + // Copyright 2013 The Flutter Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -37,8 +37,7 @@ class AVFoundationCamera extends CameraPlatform { /// This is only exposed for test purposes. It shouldn't be used by clients of /// the plugin as it may break or change at any time. @visibleForTesting - final StreamController cameraEventStreamController = - StreamController.broadcast(); + final StreamController cameraEventStreamController = StreamController.broadcast(); /// The handler for device-level messages that should be rebroadcast to /// clients as [DeviceEvent]s. @@ -51,8 +50,7 @@ class AVFoundationCamera extends CameraPlatform { /// The per-camera handlers for messages that should be rebroadcast to /// clients as [CameraEvent]s. @visibleForTesting - final Map hostCameraHandlers = - {}; + final Map hostCameraHandlers = {}; // The stream to receive frames from the native code. StreamSubscription? _platformImageStreamSubscription; @@ -67,9 +65,7 @@ class AVFoundationCamera extends CameraPlatform { @override Future> availableCameras() async { try { - return (await _hostApi.getAvailableCameras()) - .map(cameraDescriptionFromPlatform) - .toList(); + return (await _hostApi.getAvailableCameras()).map(cameraDescriptionFromPlatform).toList(); } on PlatformException catch (e) { throw CameraException(e.code, e.message); } @@ -172,8 +168,7 @@ class AVFoundationCamera extends CameraPlatform { @override Stream onDeviceOrientationChanged() { - return hostHandler.deviceEventStreamController.stream - .whereType(); + return hostHandler.deviceEventStreamController.stream.whereType(); } @override @@ -191,6 +186,14 @@ class AVFoundationCamera extends CameraPlatform { await _hostApi.unlockCaptureOrientation(); } + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + Future setLensPosition(double position) async { + await _hostApi.setLensPosition(position); + } + @override Future takePicture(int cameraId) async { final String path = await _hostApi.takePicture(); @@ -623,8 +626,7 @@ class HostDeviceMessageHandler implements CameraGlobalEventApi { /// /// It is a `broadcast` because multiple controllers will connect to /// different stream views of this Controller. - final StreamController deviceEventStreamController = - StreamController.broadcast(); + final StreamController deviceEventStreamController = StreamController.broadcast(); @override void deviceOrientationChanged(PlatformDeviceOrientation orientation) { diff --git a/packages/camera/camera_avfoundation/lib/src/messages.g.dart b/packages/camera/camera_avfoundation/lib/src/messages.g.dart index 46c94d58f8a1..4254c2f79273 100644 --- a/packages/camera/camera_avfoundation/lib/src/messages.g.dart +++ b/packages/camera/camera_avfoundation/lib/src/messages.g.dart @@ -53,10 +53,8 @@ bool _deepEquals(Object? a, Object? b) { enum PlatformCameraLensDirection { /// Front facing camera (a user looking at the screen is seen by the camera). front, - /// Back facing camera (a user looking at the screen is not seen by the camera). back, - /// External camera which may not be mounted to the device. external, } @@ -481,6 +479,7 @@ class PlatformSize { int get hashCode => Object.hashAll(_toList()); } + class _PigeonCodec extends StandardMessageCodec { const _PigeonCodec(); @override @@ -488,7 +487,7 @@ class _PigeonCodec extends StandardMessageCodec { if (value is int) { buffer.putUint8(4); buffer.putInt64(value); - } else if (value is PlatformCameraLensDirection) { + } else if (value is PlatformCameraLensDirection) { buffer.putUint8(129); writeValue(buffer, value.index); } else if (value is PlatformCameraLensType) { @@ -645,8 +644,7 @@ class CameraApi { message: 'Host platform returned null value for non-null return value.', ); } else { - return (pigeonVar_replyList[0] as List?)! - .cast(); + return (pigeonVar_replyList[0] as List?)!.cast(); } } @@ -1103,6 +1101,32 @@ class CameraApi { } } + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + Future setLensPosition(double position) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_avfoundation.CameraApi.setLensPosition$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([position]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + /// Returns the minimum exposure offset supported by the camera. Future getMinExposureOffset() async { final pigeonVar_channelName = diff --git a/packages/camera/camera_avfoundation/pigeons/messages.dart b/packages/camera/camera_avfoundation/pigeons/messages.dart index 29ed9ed4b6b9..bd0a81f9a812 100644 --- a/packages/camera/camera_avfoundation/pigeons/messages.dart +++ b/packages/camera/camera_avfoundation/pigeons/messages.dart @@ -4,14 +4,17 @@ import 'package:pigeon/pigeon.dart'; -@ConfigurePigeon( - PigeonOptions( - dartOut: 'lib/src/messages.g.dart', - swiftOut: +@ConfigurePigeon(PigeonOptions( + dartOut: 'lib/src/messages.g.dart', + swiftOut: 'ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift', - copyrightHeader: 'pigeons/copyright.txt', + objcOptions: ObjcOptions( + prefix: 'FCP', + headerIncludePath: './include/camera_avfoundation/messages.g.h', ), + copyrightHeader: 'pigeons/copyright.txt', ) + // Pigeon version of CameraLensDirection. enum PlatformCameraLensDirection { /// Front facing camera (a user looking at the screen is seen by the camera). @@ -282,6 +285,14 @@ abstract class CameraApi { @ObjCSelector('setExposurePoint:') void setExposurePoint(PlatformPoint? point); + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + @async + @ObjCSelector('setLensPosition:') + void setLensPosition(double position); + /// Returns the minimum exposure offset supported by the camera. @async @ObjCSelector('getMinimumExposureOffset') From bc72f2800590e0eb4f429ad15fdcf3813ee134f6 Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Mon, 28 Oct 2024 09:24:12 -0400 Subject: [PATCH 02/21] Update flutter_packages.code-workspace and FLTCam.m --- flutter_packages.code-workspace | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/flutter_packages.code-workspace b/flutter_packages.code-workspace index 7c4259ae73f0..bbb59b0a5b2d 100644 --- a/flutter_packages.code-workspace +++ b/flutter_packages.code-workspace @@ -6,11 +6,22 @@ { "path": "./packages/camera" }, + { + "path": "./packages/camera/camera_avfoundation" + }, { "path": "./packages/camera/camera_avfoundation" } ], "settings": { - "editor.quickSuggestions": false + "editor.quickSuggestions": { + "comments": "off", + "strings": "off", + "other": "off" + }, + "files.associations": { + "MLTable": "yaml", + "cstring": "cpp" + } } } From 325722def3e9f5f42973b70db5f7892d58b924c9 Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Thu, 15 May 2025 20:32:30 -0400 Subject: [PATCH 03/21] init linux plugin --- packages/camera/camera/example/.metadata | 30 ++++ packages/camera/camera/example/README.md | 16 ++ .../camera/example/analysis_options.yaml | 28 ++++ .../camera/camera/example/linux/.gitignore | 1 + .../camera/example/linux/CMakeLists.txt | 145 +++++++++++++++++ .../example/linux/flutter/CMakeLists.txt | 88 +++++++++++ .../linux/flutter/generated_plugins.cmake | 23 +++ packages/camera/camera/example/linux/main.cc | 6 + .../camera/example/linux/my_application.cc | 124 +++++++++++++++ .../camera/example/linux/my_application.h | 18 +++ .../camera/example/test/widget_test.dart | 30 ++++ packages/camera/camera/pubspec.yaml | 7 + packages/camera/camera_linux/.gitignore | 29 ++++ packages/camera/camera_linux/.metadata | 30 ++++ packages/camera/camera_linux/CHANGELOG.md | 3 + packages/camera/camera_linux/LICENSE | 1 + packages/camera/camera_linux/README.md | 15 ++ .../camera/camera_linux/analysis_options.yaml | 4 + .../camera/camera_linux/example/.gitignore | 43 +++++ .../camera/camera_linux/example/README.md | 16 ++ .../example/analysis_options.yaml | 28 ++++ .../plugin_integration_test.dart | 25 +++ .../camera/camera_linux/example/lib/main.dart | 63 ++++++++ .../camera_linux/example/linux/.gitignore | 1 + .../camera_linux/example/linux/CMakeLists.txt | 147 ++++++++++++++++++ .../example/linux/flutter/CMakeLists.txt | 88 +++++++++++ .../camera/camera_linux/example/linux/main.cc | 6 + .../example/linux/my_application.cc | 124 +++++++++++++++ .../example/linux/my_application.h | 18 +++ .../camera/camera_linux/example/pubspec.yaml | 85 ++++++++++ .../example/test/widget_test.dart | 27 ++++ .../camera/camera_linux/lib/camera_linux.dart | 8 + .../lib/camera_linux_method_channel.dart | 17 ++ .../lib/camera_linux_platform_interface.dart | 29 ++++ .../camera/camera_linux/linux/CMakeLists.txt | 94 +++++++++++ .../camera_linux/linux/camera_linux_plugin.cc | 76 +++++++++ .../linux/camera_linux_plugin_private.h | 10 ++ .../camera_linux/camera_linux_plugin.h | 26 ++++ .../linux/test/camera_linux_plugin_test.cc | 31 ++++ packages/camera/camera_linux/pubspec.yaml | 68 ++++++++ .../camera_linux_method_channel_test.dart | 27 ++++ .../camera_linux/test/camera_linux_test.dart | 29 ++++ 42 files changed, 1684 insertions(+) create mode 100644 packages/camera/camera/example/.metadata create mode 100644 packages/camera/camera/example/README.md create mode 100644 packages/camera/camera/example/analysis_options.yaml create mode 100644 packages/camera/camera/example/linux/.gitignore create mode 100644 packages/camera/camera/example/linux/CMakeLists.txt create mode 100644 packages/camera/camera/example/linux/flutter/CMakeLists.txt create mode 100644 packages/camera/camera/example/linux/flutter/generated_plugins.cmake create mode 100644 packages/camera/camera/example/linux/main.cc create mode 100644 packages/camera/camera/example/linux/my_application.cc create mode 100644 packages/camera/camera/example/linux/my_application.h create mode 100644 packages/camera/camera/example/test/widget_test.dart create mode 100644 packages/camera/camera_linux/.gitignore create mode 100644 packages/camera/camera_linux/.metadata create mode 100644 packages/camera/camera_linux/CHANGELOG.md create mode 100644 packages/camera/camera_linux/LICENSE create mode 100644 packages/camera/camera_linux/README.md create mode 100644 packages/camera/camera_linux/analysis_options.yaml create mode 100644 packages/camera/camera_linux/example/.gitignore create mode 100644 packages/camera/camera_linux/example/README.md create mode 100644 packages/camera/camera_linux/example/analysis_options.yaml create mode 100644 packages/camera/camera_linux/example/integration_test/plugin_integration_test.dart create mode 100644 packages/camera/camera_linux/example/lib/main.dart create mode 100644 packages/camera/camera_linux/example/linux/.gitignore create mode 100644 packages/camera/camera_linux/example/linux/CMakeLists.txt create mode 100644 packages/camera/camera_linux/example/linux/flutter/CMakeLists.txt create mode 100644 packages/camera/camera_linux/example/linux/main.cc create mode 100644 packages/camera/camera_linux/example/linux/my_application.cc create mode 100644 packages/camera/camera_linux/example/linux/my_application.h create mode 100644 packages/camera/camera_linux/example/pubspec.yaml create mode 100644 packages/camera/camera_linux/example/test/widget_test.dart create mode 100644 packages/camera/camera_linux/lib/camera_linux.dart create mode 100644 packages/camera/camera_linux/lib/camera_linux_method_channel.dart create mode 100644 packages/camera/camera_linux/lib/camera_linux_platform_interface.dart create mode 100644 packages/camera/camera_linux/linux/CMakeLists.txt create mode 100644 packages/camera/camera_linux/linux/camera_linux_plugin.cc create mode 100644 packages/camera/camera_linux/linux/camera_linux_plugin_private.h create mode 100644 packages/camera/camera_linux/linux/include/camera_linux/camera_linux_plugin.h create mode 100644 packages/camera/camera_linux/linux/test/camera_linux_plugin_test.cc create mode 100644 packages/camera/camera_linux/pubspec.yaml create mode 100644 packages/camera/camera_linux/test/camera_linux_method_channel_test.dart create mode 100644 packages/camera/camera_linux/test/camera_linux_test.dart diff --git a/packages/camera/camera/example/.metadata b/packages/camera/camera/example/.metadata new file mode 100644 index 000000000000..9c11960fa8d4 --- /dev/null +++ b/packages/camera/camera/example/.metadata @@ -0,0 +1,30 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: "603104015dd692ea3403755b55d07813d5cf8965" + channel: "[user-branch]" + +project_type: app + +# Tracks metadata for the flutter migrate command +migration: + platforms: + - platform: root + create_revision: 603104015dd692ea3403755b55d07813d5cf8965 + base_revision: 603104015dd692ea3403755b55d07813d5cf8965 + - platform: linux + create_revision: 603104015dd692ea3403755b55d07813d5cf8965 + base_revision: 603104015dd692ea3403755b55d07813d5cf8965 + + # User provided section + + # List of Local paths (relative to this file) that should be + # ignored by the migrate tool. + # + # Files that are not part of the templates will be ignored by default. + unmanaged_files: + - 'lib/main.dart' + - 'ios/Runner.xcodeproj/project.pbxproj' diff --git a/packages/camera/camera/example/README.md b/packages/camera/camera/example/README.md new file mode 100644 index 000000000000..2b3fce4c86a5 --- /dev/null +++ b/packages/camera/camera/example/README.md @@ -0,0 +1,16 @@ +# example + +A new Flutter project. + +## Getting Started + +This project is a starting point for a Flutter application. + +A few resources to get you started if this is your first Flutter project: + +- [Lab: Write your first Flutter app](https://docs.flutter.dev/get-started/codelab) +- [Cookbook: Useful Flutter samples](https://docs.flutter.dev/cookbook) + +For help getting started with Flutter development, view the +[online documentation](https://docs.flutter.dev/), which offers tutorials, +samples, guidance on mobile development, and a full API reference. diff --git a/packages/camera/camera/example/analysis_options.yaml b/packages/camera/camera/example/analysis_options.yaml new file mode 100644 index 000000000000..0d2902135cae --- /dev/null +++ b/packages/camera/camera/example/analysis_options.yaml @@ -0,0 +1,28 @@ +# This file configures the analyzer, which statically analyzes Dart code to +# check for errors, warnings, and lints. +# +# The issues identified by the analyzer are surfaced in the UI of Dart-enabled +# IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be +# invoked from the command line by running `flutter analyze`. + +# The following line activates a set of recommended lints for Flutter apps, +# packages, and plugins designed to encourage good coding practices. +include: package:flutter_lints/flutter.yaml + +linter: + # The lint rules applied to this project can be customized in the + # section below to disable rules from the `package:flutter_lints/flutter.yaml` + # included above or to enable additional rules. A list of all available lints + # and their documentation is published at https://dart.dev/lints. + # + # Instead of disabling a lint rule for the entire project in the + # section below, it can also be suppressed for a single line of code + # or a specific dart file by using the `// ignore: name_of_lint` and + # `// ignore_for_file: name_of_lint` syntax on the line or in the file + # producing the lint. + rules: + # avoid_print: false # Uncomment to disable the `avoid_print` rule + # prefer_single_quotes: true # Uncomment to enable the `prefer_single_quotes` rule + +# Additional information about this file can be found at +# https://dart.dev/guides/language/analysis-options diff --git a/packages/camera/camera/example/linux/.gitignore b/packages/camera/camera/example/linux/.gitignore new file mode 100644 index 000000000000..d3896c98444f --- /dev/null +++ b/packages/camera/camera/example/linux/.gitignore @@ -0,0 +1 @@ +flutter/ephemeral diff --git a/packages/camera/camera/example/linux/CMakeLists.txt b/packages/camera/camera/example/linux/CMakeLists.txt new file mode 100644 index 000000000000..c6d1dcf8e0e3 --- /dev/null +++ b/packages/camera/camera/example/linux/CMakeLists.txt @@ -0,0 +1,145 @@ +# Project-level configuration. +cmake_minimum_required(VERSION 3.10) +project(runner LANGUAGES CXX) + +# The name of the executable created for the application. Change this to change +# the on-disk name of your application. +set(BINARY_NAME "example") +# The unique GTK application identifier for this application. See: +# https://wiki.gnome.org/HowDoI/ChooseApplicationID +set(APPLICATION_ID "io.flutter.plugins.example") + +# Explicitly opt in to modern CMake behaviors to avoid warnings with recent +# versions of CMake. +cmake_policy(SET CMP0063 NEW) + +# Load bundled libraries from the lib/ directory relative to the binary. +set(CMAKE_INSTALL_RPATH "$ORIGIN/lib") + +# Root filesystem for cross-building. +if(FLUTTER_TARGET_PLATFORM_SYSROOT) + set(CMAKE_SYSROOT ${FLUTTER_TARGET_PLATFORM_SYSROOT}) + set(CMAKE_FIND_ROOT_PATH ${CMAKE_SYSROOT}) + set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) + set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +endif() + +# Define build configuration options. +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + set(CMAKE_BUILD_TYPE "Debug" CACHE + STRING "Flutter build mode" FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS + "Debug" "Profile" "Release") +endif() + +# Compilation settings that should be applied to most targets. +# +# Be cautious about adding new options here, as plugins use this function by +# default. In most cases, you should add new options to specific targets instead +# of modifying this function. +function(APPLY_STANDARD_SETTINGS TARGET) + target_compile_features(${TARGET} PUBLIC cxx_std_14) + target_compile_options(${TARGET} PRIVATE -Wall -Werror) + target_compile_options(${TARGET} PRIVATE "$<$>:-O3>") + target_compile_definitions(${TARGET} PRIVATE "$<$>:NDEBUG>") +endfunction() + +# Flutter library and tool build rules. +set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter") +add_subdirectory(${FLUTTER_MANAGED_DIR}) + +# System-level dependencies. +find_package(PkgConfig REQUIRED) +pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) + +add_definitions(-DAPPLICATION_ID="${APPLICATION_ID}") + +# Define the application target. To change its name, change BINARY_NAME above, +# not the value here, or `flutter run` will no longer work. +# +# Any new source files that you add to the application should be added here. +add_executable(${BINARY_NAME} + "main.cc" + "my_application.cc" + "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc" +) + +# Apply the standard set of build settings. This can be removed for applications +# that need different build settings. +apply_standard_settings(${BINARY_NAME}) + +# Add dependency libraries. Add any application-specific dependencies here. +target_link_libraries(${BINARY_NAME} PRIVATE flutter) +target_link_libraries(${BINARY_NAME} PRIVATE PkgConfig::GTK) + +# Run the Flutter tool portions of the build. This must not be removed. +add_dependencies(${BINARY_NAME} flutter_assemble) + +# Only the install-generated bundle's copy of the executable will launch +# correctly, since the resources must in the right relative locations. To avoid +# people trying to run the unbundled copy, put it in a subdirectory instead of +# the default top-level location. +set_target_properties(${BINARY_NAME} + PROPERTIES + RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/intermediates_do_not_run" +) + + +# Generated plugin build rules, which manage building the plugins and adding +# them to the application. +include(flutter/generated_plugins.cmake) + + +# === Installation === +# By default, "installing" just makes a relocatable bundle in the build +# directory. +set(BUILD_BUNDLE_DIR "${PROJECT_BINARY_DIR}/bundle") +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE) +endif() + +# Start with a clean build bundle directory every time. +install(CODE " + file(REMOVE_RECURSE \"${BUILD_BUNDLE_DIR}/\") + " COMPONENT Runtime) + +set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data") +set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}/lib") + +install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +foreach(bundled_library ${PLUGIN_BUNDLED_LIBRARIES}) + install(FILES "${bundled_library}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endforeach(bundled_library) + +# Copy the native assets provided by the build.dart from all packages. +set(NATIVE_ASSETS_DIR "${PROJECT_BUILD_DIR}native_assets/linux/") +install(DIRECTORY "${NATIVE_ASSETS_DIR}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +# Fully re-copy the assets directory on each build to avoid having stale files +# from a previous install. +set(FLUTTER_ASSET_DIR_NAME "flutter_assets") +install(CODE " + file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\") + " COMPONENT Runtime) +install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}" + DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime) + +# Install the AOT library on non-Debug builds only. +if(NOT CMAKE_BUILD_TYPE MATCHES "Debug") + install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() diff --git a/packages/camera/camera/example/linux/flutter/CMakeLists.txt b/packages/camera/camera/example/linux/flutter/CMakeLists.txt new file mode 100644 index 000000000000..d5bd01648a96 --- /dev/null +++ b/packages/camera/camera/example/linux/flutter/CMakeLists.txt @@ -0,0 +1,88 @@ +# This file controls Flutter-level build steps. It should not be edited. +cmake_minimum_required(VERSION 3.10) + +set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral") + +# Configuration provided via flutter tool. +include(${EPHEMERAL_DIR}/generated_config.cmake) + +# TODO: Move the rest of this into files in ephemeral. See +# https://github.com/flutter/flutter/issues/57146. + +# Serves the same purpose as list(TRANSFORM ... PREPEND ...), +# which isn't available in 3.10. +function(list_prepend LIST_NAME PREFIX) + set(NEW_LIST "") + foreach(element ${${LIST_NAME}}) + list(APPEND NEW_LIST "${PREFIX}${element}") + endforeach(element) + set(${LIST_NAME} "${NEW_LIST}" PARENT_SCOPE) +endfunction() + +# === Flutter Library === +# System-level dependencies. +find_package(PkgConfig REQUIRED) +pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) +pkg_check_modules(GLIB REQUIRED IMPORTED_TARGET glib-2.0) +pkg_check_modules(GIO REQUIRED IMPORTED_TARGET gio-2.0) + +set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/libflutter_linux_gtk.so") + +# Published to parent scope for install step. +set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE) +set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/" PARENT_SCOPE) +set(AOT_LIBRARY "${PROJECT_DIR}/build/lib/libapp.so" PARENT_SCOPE) + +list(APPEND FLUTTER_LIBRARY_HEADERS + "fl_basic_message_channel.h" + "fl_binary_codec.h" + "fl_binary_messenger.h" + "fl_dart_project.h" + "fl_engine.h" + "fl_json_message_codec.h" + "fl_json_method_codec.h" + "fl_message_codec.h" + "fl_method_call.h" + "fl_method_channel.h" + "fl_method_codec.h" + "fl_method_response.h" + "fl_plugin_registrar.h" + "fl_plugin_registry.h" + "fl_standard_message_codec.h" + "fl_standard_method_codec.h" + "fl_string_codec.h" + "fl_value.h" + "fl_view.h" + "flutter_linux.h" +) +list_prepend(FLUTTER_LIBRARY_HEADERS "${EPHEMERAL_DIR}/flutter_linux/") +add_library(flutter INTERFACE) +target_include_directories(flutter INTERFACE + "${EPHEMERAL_DIR}" +) +target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}") +target_link_libraries(flutter INTERFACE + PkgConfig::GTK + PkgConfig::GLIB + PkgConfig::GIO +) +add_dependencies(flutter flutter_assemble) + +# === Flutter tool backend === +# _phony_ is a non-existent file to force this command to run every time, +# since currently there's no way to get a full input/output list from the +# flutter tool. +add_custom_command( + OUTPUT ${FLUTTER_LIBRARY} ${FLUTTER_LIBRARY_HEADERS} + ${CMAKE_CURRENT_BINARY_DIR}/_phony_ + COMMAND ${CMAKE_COMMAND} -E env + ${FLUTTER_TOOL_ENVIRONMENT} + "${FLUTTER_ROOT}/packages/flutter_tools/bin/tool_backend.sh" + ${FLUTTER_TARGET_PLATFORM} ${CMAKE_BUILD_TYPE} + VERBATIM +) +add_custom_target(flutter_assemble DEPENDS + "${FLUTTER_LIBRARY}" + ${FLUTTER_LIBRARY_HEADERS} +) diff --git a/packages/camera/camera/example/linux/flutter/generated_plugins.cmake b/packages/camera/camera/example/linux/flutter/generated_plugins.cmake new file mode 100644 index 000000000000..2e1de87a7eb6 --- /dev/null +++ b/packages/camera/camera/example/linux/flutter/generated_plugins.cmake @@ -0,0 +1,23 @@ +# +# Generated file, do not edit. +# + +list(APPEND FLUTTER_PLUGIN_LIST +) + +list(APPEND FLUTTER_FFI_PLUGIN_LIST +) + +set(PLUGIN_BUNDLED_LIBRARIES) + +foreach(plugin ${FLUTTER_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${plugin}/linux plugins/${plugin}) + target_link_libraries(${BINARY_NAME} PRIVATE ${plugin}_plugin) + list(APPEND PLUGIN_BUNDLED_LIBRARIES $) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${plugin}_bundled_libraries}) +endforeach(plugin) + +foreach(ffi_plugin ${FLUTTER_FFI_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${ffi_plugin}/linux plugins/${ffi_plugin}) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${ffi_plugin}_bundled_libraries}) +endforeach(ffi_plugin) diff --git a/packages/camera/camera/example/linux/main.cc b/packages/camera/camera/example/linux/main.cc new file mode 100644 index 000000000000..e7c5c5437037 --- /dev/null +++ b/packages/camera/camera/example/linux/main.cc @@ -0,0 +1,6 @@ +#include "my_application.h" + +int main(int argc, char** argv) { + g_autoptr(MyApplication) app = my_application_new(); + return g_application_run(G_APPLICATION(app), argc, argv); +} diff --git a/packages/camera/camera/example/linux/my_application.cc b/packages/camera/camera/example/linux/my_application.cc new file mode 100644 index 000000000000..c0530d422cdd --- /dev/null +++ b/packages/camera/camera/example/linux/my_application.cc @@ -0,0 +1,124 @@ +#include "my_application.h" + +#include +#ifdef GDK_WINDOWING_X11 +#include +#endif + +#include "flutter/generated_plugin_registrant.h" + +struct _MyApplication { + GtkApplication parent_instance; + char** dart_entrypoint_arguments; +}; + +G_DEFINE_TYPE(MyApplication, my_application, GTK_TYPE_APPLICATION) + +// Implements GApplication::activate. +static void my_application_activate(GApplication* application) { + MyApplication* self = MY_APPLICATION(application); + GtkWindow* window = + GTK_WINDOW(gtk_application_window_new(GTK_APPLICATION(application))); + + // Use a header bar when running in GNOME as this is the common style used + // by applications and is the setup most users will be using (e.g. Ubuntu + // desktop). + // If running on X and not using GNOME then just use a traditional title bar + // in case the window manager does more exotic layout, e.g. tiling. + // If running on Wayland assume the header bar will work (may need changing + // if future cases occur). + gboolean use_header_bar = TRUE; +#ifdef GDK_WINDOWING_X11 + GdkScreen* screen = gtk_window_get_screen(window); + if (GDK_IS_X11_SCREEN(screen)) { + const gchar* wm_name = gdk_x11_screen_get_window_manager_name(screen); + if (g_strcmp0(wm_name, "GNOME Shell") != 0) { + use_header_bar = FALSE; + } + } +#endif + if (use_header_bar) { + GtkHeaderBar* header_bar = GTK_HEADER_BAR(gtk_header_bar_new()); + gtk_widget_show(GTK_WIDGET(header_bar)); + gtk_header_bar_set_title(header_bar, "example"); + gtk_header_bar_set_show_close_button(header_bar, TRUE); + gtk_window_set_titlebar(window, GTK_WIDGET(header_bar)); + } else { + gtk_window_set_title(window, "example"); + } + + gtk_window_set_default_size(window, 1280, 720); + gtk_widget_show(GTK_WIDGET(window)); + + g_autoptr(FlDartProject) project = fl_dart_project_new(); + fl_dart_project_set_dart_entrypoint_arguments(project, self->dart_entrypoint_arguments); + + FlView* view = fl_view_new(project); + gtk_widget_show(GTK_WIDGET(view)); + gtk_container_add(GTK_CONTAINER(window), GTK_WIDGET(view)); + + fl_register_plugins(FL_PLUGIN_REGISTRY(view)); + + gtk_widget_grab_focus(GTK_WIDGET(view)); +} + +// Implements GApplication::local_command_line. +static gboolean my_application_local_command_line(GApplication* application, gchar*** arguments, int* exit_status) { + MyApplication* self = MY_APPLICATION(application); + // Strip out the first argument as it is the binary name. + self->dart_entrypoint_arguments = g_strdupv(*arguments + 1); + + g_autoptr(GError) error = nullptr; + if (!g_application_register(application, nullptr, &error)) { + g_warning("Failed to register: %s", error->message); + *exit_status = 1; + return TRUE; + } + + g_application_activate(application); + *exit_status = 0; + + return TRUE; +} + +// Implements GApplication::startup. +static void my_application_startup(GApplication* application) { + //MyApplication* self = MY_APPLICATION(object); + + // Perform any actions required at application startup. + + G_APPLICATION_CLASS(my_application_parent_class)->startup(application); +} + +// Implements GApplication::shutdown. +static void my_application_shutdown(GApplication* application) { + //MyApplication* self = MY_APPLICATION(object); + + // Perform any actions required at application shutdown. + + G_APPLICATION_CLASS(my_application_parent_class)->shutdown(application); +} + +// Implements GObject::dispose. +static void my_application_dispose(GObject* object) { + MyApplication* self = MY_APPLICATION(object); + g_clear_pointer(&self->dart_entrypoint_arguments, g_strfreev); + G_OBJECT_CLASS(my_application_parent_class)->dispose(object); +} + +static void my_application_class_init(MyApplicationClass* klass) { + G_APPLICATION_CLASS(klass)->activate = my_application_activate; + G_APPLICATION_CLASS(klass)->local_command_line = my_application_local_command_line; + G_APPLICATION_CLASS(klass)->startup = my_application_startup; + G_APPLICATION_CLASS(klass)->shutdown = my_application_shutdown; + G_OBJECT_CLASS(klass)->dispose = my_application_dispose; +} + +static void my_application_init(MyApplication* self) {} + +MyApplication* my_application_new() { + return MY_APPLICATION(g_object_new(my_application_get_type(), + "application-id", APPLICATION_ID, + "flags", G_APPLICATION_NON_UNIQUE, + nullptr)); +} diff --git a/packages/camera/camera/example/linux/my_application.h b/packages/camera/camera/example/linux/my_application.h new file mode 100644 index 000000000000..72271d5e4170 --- /dev/null +++ b/packages/camera/camera/example/linux/my_application.h @@ -0,0 +1,18 @@ +#ifndef FLUTTER_MY_APPLICATION_H_ +#define FLUTTER_MY_APPLICATION_H_ + +#include + +G_DECLARE_FINAL_TYPE(MyApplication, my_application, MY, APPLICATION, + GtkApplication) + +/** + * my_application_new: + * + * Creates a new Flutter-based application. + * + * Returns: a new #MyApplication. + */ +MyApplication* my_application_new(); + +#endif // FLUTTER_MY_APPLICATION_H_ diff --git a/packages/camera/camera/example/test/widget_test.dart b/packages/camera/camera/example/test/widget_test.dart new file mode 100644 index 000000000000..092d222f7e16 --- /dev/null +++ b/packages/camera/camera/example/test/widget_test.dart @@ -0,0 +1,30 @@ +// This is a basic Flutter widget test. +// +// To perform an interaction with a widget in your test, use the WidgetTester +// utility in the flutter_test package. For example, you can send tap and scroll +// gestures. You can also use WidgetTester to find child widgets in the widget +// tree, read text, and verify that the values of widget properties are correct. + +import 'package:flutter/material.dart'; +import 'package:flutter_test/flutter_test.dart'; + +import 'package:example/main.dart'; + +void main() { + testWidgets('Counter increments smoke test', (WidgetTester tester) async { + // Build our app and trigger a frame. + await tester.pumpWidget(const MyApp()); + + // Verify that our counter starts at 0. + expect(find.text('0'), findsOneWidget); + expect(find.text('1'), findsNothing); + + // Tap the '+' icon and trigger a frame. + await tester.tap(find.byIcon(Icons.add)); + await tester.pump(); + + // Verify that our counter has incremented. + expect(find.text('0'), findsNothing); + expect(find.text('1'), findsOneWidget); + }); +} diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 1c9e8bb0b145..6401344cec70 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -19,11 +19,18 @@ flutter: default_package: camera_avfoundation web: default_package: camera_web + linux: + default_package: camera_linux dependencies: camera_android_camerax: ^0.7.0 camera_avfoundation: ^0.10.0 camera_platform_interface: ^2.12.0 + camera_linux: + git: + url: git@github.com:LightX-Innovations/flutter_packages.git + path: packages/camera/camera_linux + ref: main camera_web: ^0.3.3 flutter: sdk: flutter diff --git a/packages/camera/camera_linux/.gitignore b/packages/camera/camera_linux/.gitignore new file mode 100644 index 000000000000..ac5aa9893e48 --- /dev/null +++ b/packages/camera/camera_linux/.gitignore @@ -0,0 +1,29 @@ +# Miscellaneous +*.class +*.log +*.pyc +*.swp +.DS_Store +.atom/ +.buildlog/ +.history +.svn/ +migrate_working_dir/ + +# IntelliJ related +*.iml +*.ipr +*.iws +.idea/ + +# The .vscode folder contains launch configuration and tasks you configure in +# VS Code which you may wish to be included in version control, so this line +# is commented out by default. +#.vscode/ + +# Flutter/Dart/Pub related +# Libraries should not include pubspec.lock, per https://dart.dev/guides/libraries/private-files#pubspeclock. +/pubspec.lock +**/doc/api/ +.dart_tool/ +build/ diff --git a/packages/camera/camera_linux/.metadata b/packages/camera/camera_linux/.metadata new file mode 100644 index 000000000000..42a96a716b35 --- /dev/null +++ b/packages/camera/camera_linux/.metadata @@ -0,0 +1,30 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: "603104015dd692ea3403755b55d07813d5cf8965" + channel: "[user-branch]" + +project_type: plugin + +# Tracks metadata for the flutter migrate command +migration: + platforms: + - platform: root + create_revision: 603104015dd692ea3403755b55d07813d5cf8965 + base_revision: 603104015dd692ea3403755b55d07813d5cf8965 + - platform: linux + create_revision: 603104015dd692ea3403755b55d07813d5cf8965 + base_revision: 603104015dd692ea3403755b55d07813d5cf8965 + + # User provided section + + # List of Local paths (relative to this file) that should be + # ignored by the migrate tool. + # + # Files that are not part of the templates will be ignored by default. + unmanaged_files: + - 'lib/main.dart' + - 'ios/Runner.xcodeproj/project.pbxproj' diff --git a/packages/camera/camera_linux/CHANGELOG.md b/packages/camera/camera_linux/CHANGELOG.md new file mode 100644 index 000000000000..41cc7d8192ec --- /dev/null +++ b/packages/camera/camera_linux/CHANGELOG.md @@ -0,0 +1,3 @@ +## 0.0.1 + +* TODO: Describe initial release. diff --git a/packages/camera/camera_linux/LICENSE b/packages/camera/camera_linux/LICENSE new file mode 100644 index 000000000000..ba75c69f7f21 --- /dev/null +++ b/packages/camera/camera_linux/LICENSE @@ -0,0 +1 @@ +TODO: Add your license here. diff --git a/packages/camera/camera_linux/README.md b/packages/camera/camera_linux/README.md new file mode 100644 index 000000000000..7d5a9fc073ed --- /dev/null +++ b/packages/camera/camera_linux/README.md @@ -0,0 +1,15 @@ +# camera_linux + +A new Flutter plugin project. + +## Getting Started + +This project is a starting point for a Flutter +[plug-in package](https://flutter.dev/to/develop-plugins), +a specialized package that includes platform-specific implementation code for +Android and/or iOS. + +For help getting started with Flutter development, view the +[online documentation](https://docs.flutter.dev), which offers tutorials, +samples, guidance on mobile development, and a full API reference. + diff --git a/packages/camera/camera_linux/analysis_options.yaml b/packages/camera/camera_linux/analysis_options.yaml new file mode 100644 index 000000000000..a5744c1cfbe7 --- /dev/null +++ b/packages/camera/camera_linux/analysis_options.yaml @@ -0,0 +1,4 @@ +include: package:flutter_lints/flutter.yaml + +# Additional information about this file can be found at +# https://dart.dev/guides/language/analysis-options diff --git a/packages/camera/camera_linux/example/.gitignore b/packages/camera/camera_linux/example/.gitignore new file mode 100644 index 000000000000..29a3a5017f04 --- /dev/null +++ b/packages/camera/camera_linux/example/.gitignore @@ -0,0 +1,43 @@ +# Miscellaneous +*.class +*.log +*.pyc +*.swp +.DS_Store +.atom/ +.buildlog/ +.history +.svn/ +migrate_working_dir/ + +# IntelliJ related +*.iml +*.ipr +*.iws +.idea/ + +# The .vscode folder contains launch configuration and tasks you configure in +# VS Code which you may wish to be included in version control, so this line +# is commented out by default. +#.vscode/ + +# Flutter/Dart/Pub related +**/doc/api/ +**/ios/Flutter/.last_build_id +.dart_tool/ +.flutter-plugins +.flutter-plugins-dependencies +.pub-cache/ +.pub/ +/build/ + +# Symbolication related +app.*.symbols + +# Obfuscation related +app.*.map.json + +# Android Studio will place build artifacts here +/android/app/debug +/android/app/profile +/android/app/release diff --git a/packages/camera/camera_linux/example/README.md b/packages/camera/camera_linux/example/README.md new file mode 100644 index 000000000000..c7c9d957cdfa --- /dev/null +++ b/packages/camera/camera_linux/example/README.md @@ -0,0 +1,16 @@ +# camera_linux_example + +Demonstrates how to use the camera_linux plugin. + +## Getting Started + +This project is a starting point for a Flutter application. + +A few resources to get you started if this is your first Flutter project: + +- [Lab: Write your first Flutter app](https://docs.flutter.dev/get-started/codelab) +- [Cookbook: Useful Flutter samples](https://docs.flutter.dev/cookbook) + +For help getting started with Flutter development, view the +[online documentation](https://docs.flutter.dev/), which offers tutorials, +samples, guidance on mobile development, and a full API reference. diff --git a/packages/camera/camera_linux/example/analysis_options.yaml b/packages/camera/camera_linux/example/analysis_options.yaml new file mode 100644 index 000000000000..0d2902135cae --- /dev/null +++ b/packages/camera/camera_linux/example/analysis_options.yaml @@ -0,0 +1,28 @@ +# This file configures the analyzer, which statically analyzes Dart code to +# check for errors, warnings, and lints. +# +# The issues identified by the analyzer are surfaced in the UI of Dart-enabled +# IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be +# invoked from the command line by running `flutter analyze`. + +# The following line activates a set of recommended lints for Flutter apps, +# packages, and plugins designed to encourage good coding practices. +include: package:flutter_lints/flutter.yaml + +linter: + # The lint rules applied to this project can be customized in the + # section below to disable rules from the `package:flutter_lints/flutter.yaml` + # included above or to enable additional rules. A list of all available lints + # and their documentation is published at https://dart.dev/lints. + # + # Instead of disabling a lint rule for the entire project in the + # section below, it can also be suppressed for a single line of code + # or a specific dart file by using the `// ignore: name_of_lint` and + # `// ignore_for_file: name_of_lint` syntax on the line or in the file + # producing the lint. + rules: + # avoid_print: false # Uncomment to disable the `avoid_print` rule + # prefer_single_quotes: true # Uncomment to enable the `prefer_single_quotes` rule + +# Additional information about this file can be found at +# https://dart.dev/guides/language/analysis-options diff --git a/packages/camera/camera_linux/example/integration_test/plugin_integration_test.dart b/packages/camera/camera_linux/example/integration_test/plugin_integration_test.dart new file mode 100644 index 000000000000..0dd45a1c096f --- /dev/null +++ b/packages/camera/camera_linux/example/integration_test/plugin_integration_test.dart @@ -0,0 +1,25 @@ +// This is a basic Flutter integration test. +// +// Since integration tests run in a full Flutter application, they can interact +// with the host side of a plugin implementation, unlike Dart unit tests. +// +// For more information about Flutter integration tests, please see +// https://flutter.dev/to/integration-testing + + +import 'package:flutter_test/flutter_test.dart'; +import 'package:integration_test/integration_test.dart'; + +import 'package:camera_linux/camera_linux.dart'; + +void main() { + IntegrationTestWidgetsFlutterBinding.ensureInitialized(); + + testWidgets('getPlatformVersion test', (WidgetTester tester) async { + final CameraLinux plugin = CameraLinux(); + final String? version = await plugin.getPlatformVersion(); + // The version string depends on the host platform running the test, so + // just assert that some non-empty string is returned. + expect(version?.isNotEmpty, true); + }); +} diff --git a/packages/camera/camera_linux/example/lib/main.dart b/packages/camera/camera_linux/example/lib/main.dart new file mode 100644 index 000000000000..1ac60e306dcc --- /dev/null +++ b/packages/camera/camera_linux/example/lib/main.dart @@ -0,0 +1,63 @@ +import 'package:flutter/material.dart'; +import 'dart:async'; + +import 'package:flutter/services.dart'; +import 'package:camera_linux/camera_linux.dart'; + +void main() { + runApp(const MyApp()); +} + +class MyApp extends StatefulWidget { + const MyApp({super.key}); + + @override + State createState() => _MyAppState(); +} + +class _MyAppState extends State { + String _platformVersion = 'Unknown'; + final _cameraLinuxPlugin = CameraLinux(); + + @override + void initState() { + super.initState(); + initPlatformState(); + } + + // Platform messages are asynchronous, so we initialize in an async method. + Future initPlatformState() async { + String platformVersion; + // Platform messages may fail, so we use a try/catch PlatformException. + // We also handle the message potentially returning null. + try { + platformVersion = + await _cameraLinuxPlugin.getPlatformVersion() ?? 'Unknown platform version'; + } on PlatformException { + platformVersion = 'Failed to get platform version.'; + } + + // If the widget was removed from the tree while the asynchronous platform + // message was in flight, we want to discard the reply rather than calling + // setState to update our non-existent appearance. + if (!mounted) return; + + setState(() { + _platformVersion = platformVersion; + }); + } + + @override + Widget build(BuildContext context) { + return MaterialApp( + home: Scaffold( + appBar: AppBar( + title: const Text('Plugin example app'), + ), + body: Center( + child: Text('Running on: $_platformVersion\n'), + ), + ), + ); + } +} diff --git a/packages/camera/camera_linux/example/linux/.gitignore b/packages/camera/camera_linux/example/linux/.gitignore new file mode 100644 index 000000000000..d3896c98444f --- /dev/null +++ b/packages/camera/camera_linux/example/linux/.gitignore @@ -0,0 +1 @@ +flutter/ephemeral diff --git a/packages/camera/camera_linux/example/linux/CMakeLists.txt b/packages/camera/camera_linux/example/linux/CMakeLists.txt new file mode 100644 index 000000000000..ed12fda0cbe1 --- /dev/null +++ b/packages/camera/camera_linux/example/linux/CMakeLists.txt @@ -0,0 +1,147 @@ +# Project-level configuration. +cmake_minimum_required(VERSION 3.10) +project(runner LANGUAGES CXX) + +# The name of the executable created for the application. Change this to change +# the on-disk name of your application. +set(BINARY_NAME "camera_linux_example") +# The unique GTK application identifier for this application. See: +# https://wiki.gnome.org/HowDoI/ChooseApplicationID +set(APPLICATION_ID "com.example.camera_linux") + +# Explicitly opt in to modern CMake behaviors to avoid warnings with recent +# versions of CMake. +cmake_policy(SET CMP0063 NEW) + +# Load bundled libraries from the lib/ directory relative to the binary. +set(CMAKE_INSTALL_RPATH "$ORIGIN/lib") + +# Root filesystem for cross-building. +if(FLUTTER_TARGET_PLATFORM_SYSROOT) + set(CMAKE_SYSROOT ${FLUTTER_TARGET_PLATFORM_SYSROOT}) + set(CMAKE_FIND_ROOT_PATH ${CMAKE_SYSROOT}) + set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) + set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +endif() + +# Define build configuration options. +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + set(CMAKE_BUILD_TYPE "Debug" CACHE + STRING "Flutter build mode" FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS + "Debug" "Profile" "Release") +endif() + +# Compilation settings that should be applied to most targets. +# +# Be cautious about adding new options here, as plugins use this function by +# default. In most cases, you should add new options to specific targets instead +# of modifying this function. +function(APPLY_STANDARD_SETTINGS TARGET) + target_compile_features(${TARGET} PUBLIC cxx_std_14) + target_compile_options(${TARGET} PRIVATE -Wall -Werror) + target_compile_options(${TARGET} PRIVATE "$<$>:-O3>") + target_compile_definitions(${TARGET} PRIVATE "$<$>:NDEBUG>") +endfunction() + +# Flutter library and tool build rules. +set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter") +add_subdirectory(${FLUTTER_MANAGED_DIR}) + +# System-level dependencies. +find_package(PkgConfig REQUIRED) +pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) + +add_definitions(-DAPPLICATION_ID="${APPLICATION_ID}") + +# Define the application target. To change its name, change BINARY_NAME above, +# not the value here, or `flutter run` will no longer work. +# +# Any new source files that you add to the application should be added here. +add_executable(${BINARY_NAME} + "main.cc" + "my_application.cc" + "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc" +) + +# Apply the standard set of build settings. This can be removed for applications +# that need different build settings. +apply_standard_settings(${BINARY_NAME}) + +# Add dependency libraries. Add any application-specific dependencies here. +target_link_libraries(${BINARY_NAME} PRIVATE flutter) +target_link_libraries(${BINARY_NAME} PRIVATE PkgConfig::GTK) + +# Run the Flutter tool portions of the build. This must not be removed. +add_dependencies(${BINARY_NAME} flutter_assemble) + +# Only the install-generated bundle's copy of the executable will launch +# correctly, since the resources must in the right relative locations. To avoid +# people trying to run the unbundled copy, put it in a subdirectory instead of +# the default top-level location. +set_target_properties(${BINARY_NAME} + PROPERTIES + RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/intermediates_do_not_run" +) + +# Enable the test target. +set(include_camera_linux_tests TRUE) + +# Generated plugin build rules, which manage building the plugins and adding +# them to the application. +include(flutter/generated_plugins.cmake) + + +# === Installation === +# By default, "installing" just makes a relocatable bundle in the build +# directory. +set(BUILD_BUNDLE_DIR "${PROJECT_BINARY_DIR}/bundle") +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE) +endif() + +# Start with a clean build bundle directory every time. +install(CODE " + file(REMOVE_RECURSE \"${BUILD_BUNDLE_DIR}/\") + " COMPONENT Runtime) + +set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data") +set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}/lib") + +install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +foreach(bundled_library ${PLUGIN_BUNDLED_LIBRARIES}) + install(FILES "${bundled_library}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endforeach(bundled_library) + +# Copy the native assets provided by the build.dart from all packages. +set(NATIVE_ASSETS_DIR "${PROJECT_BUILD_DIR}native_assets/linux/") +install(DIRECTORY "${NATIVE_ASSETS_DIR}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +# Fully re-copy the assets directory on each build to avoid having stale files +# from a previous install. +set(FLUTTER_ASSET_DIR_NAME "flutter_assets") +install(CODE " + file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\") + " COMPONENT Runtime) +install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}" + DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime) + +# Install the AOT library on non-Debug builds only. +if(NOT CMAKE_BUILD_TYPE MATCHES "Debug") + install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() diff --git a/packages/camera/camera_linux/example/linux/flutter/CMakeLists.txt b/packages/camera/camera_linux/example/linux/flutter/CMakeLists.txt new file mode 100644 index 000000000000..d5bd01648a96 --- /dev/null +++ b/packages/camera/camera_linux/example/linux/flutter/CMakeLists.txt @@ -0,0 +1,88 @@ +# This file controls Flutter-level build steps. It should not be edited. +cmake_minimum_required(VERSION 3.10) + +set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral") + +# Configuration provided via flutter tool. +include(${EPHEMERAL_DIR}/generated_config.cmake) + +# TODO: Move the rest of this into files in ephemeral. See +# https://github.com/flutter/flutter/issues/57146. + +# Serves the same purpose as list(TRANSFORM ... PREPEND ...), +# which isn't available in 3.10. +function(list_prepend LIST_NAME PREFIX) + set(NEW_LIST "") + foreach(element ${${LIST_NAME}}) + list(APPEND NEW_LIST "${PREFIX}${element}") + endforeach(element) + set(${LIST_NAME} "${NEW_LIST}" PARENT_SCOPE) +endfunction() + +# === Flutter Library === +# System-level dependencies. +find_package(PkgConfig REQUIRED) +pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) +pkg_check_modules(GLIB REQUIRED IMPORTED_TARGET glib-2.0) +pkg_check_modules(GIO REQUIRED IMPORTED_TARGET gio-2.0) + +set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/libflutter_linux_gtk.so") + +# Published to parent scope for install step. +set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE) +set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/" PARENT_SCOPE) +set(AOT_LIBRARY "${PROJECT_DIR}/build/lib/libapp.so" PARENT_SCOPE) + +list(APPEND FLUTTER_LIBRARY_HEADERS + "fl_basic_message_channel.h" + "fl_binary_codec.h" + "fl_binary_messenger.h" + "fl_dart_project.h" + "fl_engine.h" + "fl_json_message_codec.h" + "fl_json_method_codec.h" + "fl_message_codec.h" + "fl_method_call.h" + "fl_method_channel.h" + "fl_method_codec.h" + "fl_method_response.h" + "fl_plugin_registrar.h" + "fl_plugin_registry.h" + "fl_standard_message_codec.h" + "fl_standard_method_codec.h" + "fl_string_codec.h" + "fl_value.h" + "fl_view.h" + "flutter_linux.h" +) +list_prepend(FLUTTER_LIBRARY_HEADERS "${EPHEMERAL_DIR}/flutter_linux/") +add_library(flutter INTERFACE) +target_include_directories(flutter INTERFACE + "${EPHEMERAL_DIR}" +) +target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}") +target_link_libraries(flutter INTERFACE + PkgConfig::GTK + PkgConfig::GLIB + PkgConfig::GIO +) +add_dependencies(flutter flutter_assemble) + +# === Flutter tool backend === +# _phony_ is a non-existent file to force this command to run every time, +# since currently there's no way to get a full input/output list from the +# flutter tool. +add_custom_command( + OUTPUT ${FLUTTER_LIBRARY} ${FLUTTER_LIBRARY_HEADERS} + ${CMAKE_CURRENT_BINARY_DIR}/_phony_ + COMMAND ${CMAKE_COMMAND} -E env + ${FLUTTER_TOOL_ENVIRONMENT} + "${FLUTTER_ROOT}/packages/flutter_tools/bin/tool_backend.sh" + ${FLUTTER_TARGET_PLATFORM} ${CMAKE_BUILD_TYPE} + VERBATIM +) +add_custom_target(flutter_assemble DEPENDS + "${FLUTTER_LIBRARY}" + ${FLUTTER_LIBRARY_HEADERS} +) diff --git a/packages/camera/camera_linux/example/linux/main.cc b/packages/camera/camera_linux/example/linux/main.cc new file mode 100644 index 000000000000..e7c5c5437037 --- /dev/null +++ b/packages/camera/camera_linux/example/linux/main.cc @@ -0,0 +1,6 @@ +#include "my_application.h" + +int main(int argc, char** argv) { + g_autoptr(MyApplication) app = my_application_new(); + return g_application_run(G_APPLICATION(app), argc, argv); +} diff --git a/packages/camera/camera_linux/example/linux/my_application.cc b/packages/camera/camera_linux/example/linux/my_application.cc new file mode 100644 index 000000000000..535ff33adbc2 --- /dev/null +++ b/packages/camera/camera_linux/example/linux/my_application.cc @@ -0,0 +1,124 @@ +#include "my_application.h" + +#include +#ifdef GDK_WINDOWING_X11 +#include +#endif + +#include "flutter/generated_plugin_registrant.h" + +struct _MyApplication { + GtkApplication parent_instance; + char** dart_entrypoint_arguments; +}; + +G_DEFINE_TYPE(MyApplication, my_application, GTK_TYPE_APPLICATION) + +// Implements GApplication::activate. +static void my_application_activate(GApplication* application) { + MyApplication* self = MY_APPLICATION(application); + GtkWindow* window = + GTK_WINDOW(gtk_application_window_new(GTK_APPLICATION(application))); + + // Use a header bar when running in GNOME as this is the common style used + // by applications and is the setup most users will be using (e.g. Ubuntu + // desktop). + // If running on X and not using GNOME then just use a traditional title bar + // in case the window manager does more exotic layout, e.g. tiling. + // If running on Wayland assume the header bar will work (may need changing + // if future cases occur). + gboolean use_header_bar = TRUE; +#ifdef GDK_WINDOWING_X11 + GdkScreen* screen = gtk_window_get_screen(window); + if (GDK_IS_X11_SCREEN(screen)) { + const gchar* wm_name = gdk_x11_screen_get_window_manager_name(screen); + if (g_strcmp0(wm_name, "GNOME Shell") != 0) { + use_header_bar = FALSE; + } + } +#endif + if (use_header_bar) { + GtkHeaderBar* header_bar = GTK_HEADER_BAR(gtk_header_bar_new()); + gtk_widget_show(GTK_WIDGET(header_bar)); + gtk_header_bar_set_title(header_bar, "camera_linux_example"); + gtk_header_bar_set_show_close_button(header_bar, TRUE); + gtk_window_set_titlebar(window, GTK_WIDGET(header_bar)); + } else { + gtk_window_set_title(window, "camera_linux_example"); + } + + gtk_window_set_default_size(window, 1280, 720); + gtk_widget_show(GTK_WIDGET(window)); + + g_autoptr(FlDartProject) project = fl_dart_project_new(); + fl_dart_project_set_dart_entrypoint_arguments(project, self->dart_entrypoint_arguments); + + FlView* view = fl_view_new(project); + gtk_widget_show(GTK_WIDGET(view)); + gtk_container_add(GTK_CONTAINER(window), GTK_WIDGET(view)); + + fl_register_plugins(FL_PLUGIN_REGISTRY(view)); + + gtk_widget_grab_focus(GTK_WIDGET(view)); +} + +// Implements GApplication::local_command_line. +static gboolean my_application_local_command_line(GApplication* application, gchar*** arguments, int* exit_status) { + MyApplication* self = MY_APPLICATION(application); + // Strip out the first argument as it is the binary name. + self->dart_entrypoint_arguments = g_strdupv(*arguments + 1); + + g_autoptr(GError) error = nullptr; + if (!g_application_register(application, nullptr, &error)) { + g_warning("Failed to register: %s", error->message); + *exit_status = 1; + return TRUE; + } + + g_application_activate(application); + *exit_status = 0; + + return TRUE; +} + +// Implements GApplication::startup. +static void my_application_startup(GApplication* application) { + //MyApplication* self = MY_APPLICATION(object); + + // Perform any actions required at application startup. + + G_APPLICATION_CLASS(my_application_parent_class)->startup(application); +} + +// Implements GApplication::shutdown. +static void my_application_shutdown(GApplication* application) { + //MyApplication* self = MY_APPLICATION(object); + + // Perform any actions required at application shutdown. + + G_APPLICATION_CLASS(my_application_parent_class)->shutdown(application); +} + +// Implements GObject::dispose. +static void my_application_dispose(GObject* object) { + MyApplication* self = MY_APPLICATION(object); + g_clear_pointer(&self->dart_entrypoint_arguments, g_strfreev); + G_OBJECT_CLASS(my_application_parent_class)->dispose(object); +} + +static void my_application_class_init(MyApplicationClass* klass) { + G_APPLICATION_CLASS(klass)->activate = my_application_activate; + G_APPLICATION_CLASS(klass)->local_command_line = my_application_local_command_line; + G_APPLICATION_CLASS(klass)->startup = my_application_startup; + G_APPLICATION_CLASS(klass)->shutdown = my_application_shutdown; + G_OBJECT_CLASS(klass)->dispose = my_application_dispose; +} + +static void my_application_init(MyApplication* self) {} + +MyApplication* my_application_new() { + return MY_APPLICATION(g_object_new(my_application_get_type(), + "application-id", APPLICATION_ID, + "flags", G_APPLICATION_NON_UNIQUE, + nullptr)); +} diff --git a/packages/camera/camera_linux/example/linux/my_application.h b/packages/camera/camera_linux/example/linux/my_application.h new file mode 100644 index 000000000000..72271d5e4170 --- /dev/null +++ b/packages/camera/camera_linux/example/linux/my_application.h @@ -0,0 +1,18 @@ +#ifndef FLUTTER_MY_APPLICATION_H_ +#define FLUTTER_MY_APPLICATION_H_ + +#include + +G_DECLARE_FINAL_TYPE(MyApplication, my_application, MY, APPLICATION, + GtkApplication) + +/** + * my_application_new: + * + * Creates a new Flutter-based application. + * + * Returns: a new #MyApplication. + */ +MyApplication* my_application_new(); + +#endif // FLUTTER_MY_APPLICATION_H_ diff --git a/packages/camera/camera_linux/example/pubspec.yaml b/packages/camera/camera_linux/example/pubspec.yaml new file mode 100644 index 000000000000..1079904426f4 --- /dev/null +++ b/packages/camera/camera_linux/example/pubspec.yaml @@ -0,0 +1,85 @@ +name: camera_linux_example +description: "Demonstrates how to use the camera_linux plugin." +# The following line prevents the package from being accidentally published to +# pub.dev using `flutter pub publish`. This is preferred for private packages. +publish_to: 'none' # Remove this line if you wish to publish to pub.dev + +environment: + sdk: ^3.5.4 + +# Dependencies specify other packages that your package needs in order to work. +# To automatically upgrade your package dependencies to the latest versions +# consider running `flutter pub upgrade --major-versions`. Alternatively, +# dependencies can be manually updated by changing the version numbers below to +# the latest version available on pub.dev. To see which dependencies have newer +# versions available, run `flutter pub outdated`. +dependencies: + flutter: + sdk: flutter + + camera_linux: + # When depending on this package from a real application you should use: + # camera_linux: ^x.y.z + # See https://dart.dev/tools/pub/dependencies#version-constraints + # The example app is bundled with the plugin so we use a path dependency on + # the parent directory to use the current plugin's version. + path: ../ + + # The following adds the Cupertino Icons font to your application. + # Use with the CupertinoIcons class for iOS style icons. + cupertino_icons: ^1.0.8 + +dev_dependencies: + integration_test: + sdk: flutter + flutter_test: + sdk: flutter + + # The "flutter_lints" package below contains a set of recommended lints to + # encourage good coding practices. The lint set provided by the package is + # activated in the `analysis_options.yaml` file located at the root of your + # package. See that file for information about deactivating specific lint + # rules and activating additional ones. + flutter_lints: ^4.0.0 + +# For information on the generic Dart part of this file, see the +# following page: https://dart.dev/tools/pub/pubspec + +# The following section is specific to Flutter packages. +flutter: + + # The following line ensures that the Material Icons font is + # included with your application, so that you can use the icons in + # the material Icons class. + uses-material-design: true + + # To add assets to your application, add an assets section, like this: + # assets: + # - images/a_dot_burr.jpeg + # - images/a_dot_ham.jpeg + + # An image asset can refer to one or more resolution-specific "variants", see + # https://flutter.dev/to/resolution-aware-images + + # For details regarding adding assets from package dependencies, see + # https://flutter.dev/to/asset-from-package + + # To add custom fonts to your application, add a fonts section here, + # in this "flutter" section. Each entry in this list should have a + # "family" key with the font family name, and a "fonts" key with a + # list giving the asset and other descriptors for the font. For + # example: + # fonts: + # - family: Schyler + # fonts: + # - asset: fonts/Schyler-Regular.ttf + # - asset: fonts/Schyler-Italic.ttf + # style: italic + # - family: Trajan Pro + # fonts: + # - asset: fonts/TrajanPro.ttf + # - asset: fonts/TrajanPro_Bold.ttf + # weight: 700 + # + # For details regarding fonts from package dependencies, + # see https://flutter.dev/to/font-from-package diff --git a/packages/camera/camera_linux/example/test/widget_test.dart b/packages/camera/camera_linux/example/test/widget_test.dart new file mode 100644 index 000000000000..566570b7e9f4 --- /dev/null +++ b/packages/camera/camera_linux/example/test/widget_test.dart @@ -0,0 +1,27 @@ +// This is a basic Flutter widget test. +// +// To perform an interaction with a widget in your test, use the WidgetTester +// utility in the flutter_test package. For example, you can send tap and scroll +// gestures. You can also use WidgetTester to find child widgets in the widget +// tree, read text, and verify that the values of widget properties are correct. + +import 'package:flutter/material.dart'; +import 'package:flutter_test/flutter_test.dart'; + +import 'package:camera_linux_example/main.dart'; + +void main() { + testWidgets('Verify Platform version', (WidgetTester tester) async { + // Build our app and trigger a frame. + await tester.pumpWidget(const MyApp()); + + // Verify that platform version is retrieved. + expect( + find.byWidgetPredicate( + (Widget widget) => widget is Text && + widget.data!.startsWith('Running on:'), + ), + findsOneWidget, + ); + }); +} diff --git a/packages/camera/camera_linux/lib/camera_linux.dart b/packages/camera/camera_linux/lib/camera_linux.dart new file mode 100644 index 000000000000..ca7b21fb01e0 --- /dev/null +++ b/packages/camera/camera_linux/lib/camera_linux.dart @@ -0,0 +1,8 @@ + +import 'camera_linux_platform_interface.dart'; + +class CameraLinux { + Future getPlatformVersion() { + return CameraLinuxPlatform.instance.getPlatformVersion(); + } +} diff --git a/packages/camera/camera_linux/lib/camera_linux_method_channel.dart b/packages/camera/camera_linux/lib/camera_linux_method_channel.dart new file mode 100644 index 000000000000..5332ffd00949 --- /dev/null +++ b/packages/camera/camera_linux/lib/camera_linux_method_channel.dart @@ -0,0 +1,17 @@ +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; + +import 'camera_linux_platform_interface.dart'; + +/// An implementation of [CameraLinuxPlatform] that uses method channels. +class MethodChannelCameraLinux extends CameraLinuxPlatform { + /// The method channel used to interact with the native platform. + @visibleForTesting + final methodChannel = const MethodChannel('camera_linux'); + + @override + Future getPlatformVersion() async { + final version = await methodChannel.invokeMethod('getPlatformVersion'); + return version; + } +} diff --git a/packages/camera/camera_linux/lib/camera_linux_platform_interface.dart b/packages/camera/camera_linux/lib/camera_linux_platform_interface.dart new file mode 100644 index 000000000000..6043c9e54a2c --- /dev/null +++ b/packages/camera/camera_linux/lib/camera_linux_platform_interface.dart @@ -0,0 +1,29 @@ +import 'package:plugin_platform_interface/plugin_platform_interface.dart'; + +import 'camera_linux_method_channel.dart'; + +abstract class CameraLinuxPlatform extends PlatformInterface { + /// Constructs a CameraLinuxPlatform. + CameraLinuxPlatform() : super(token: _token); + + static final Object _token = Object(); + + static CameraLinuxPlatform _instance = MethodChannelCameraLinux(); + + /// The default instance of [CameraLinuxPlatform] to use. + /// + /// Defaults to [MethodChannelCameraLinux]. + static CameraLinuxPlatform get instance => _instance; + + /// Platform-specific implementations should set this with their own + /// platform-specific class that extends [CameraLinuxPlatform] when + /// they register themselves. + static set instance(CameraLinuxPlatform instance) { + PlatformInterface.verifyToken(instance, _token); + _instance = instance; + } + + Future getPlatformVersion() { + throw UnimplementedError('platformVersion() has not been implemented.'); + } +} diff --git a/packages/camera/camera_linux/linux/CMakeLists.txt b/packages/camera/camera_linux/linux/CMakeLists.txt new file mode 100644 index 000000000000..80689ad7a9de --- /dev/null +++ b/packages/camera/camera_linux/linux/CMakeLists.txt @@ -0,0 +1,94 @@ +# The Flutter tooling requires that developers have CMake 3.10 or later +# installed. You should not increase this version, as doing so will cause +# the plugin to fail to compile for some customers of the plugin. +cmake_minimum_required(VERSION 3.10) + +# Project-level configuration. +set(PROJECT_NAME "camera_linux") +project(${PROJECT_NAME} LANGUAGES CXX) + +# This value is used when generating builds using this plugin, so it must +# not be changed. +set(PLUGIN_NAME "camera_linux_plugin") + +# Any new source files that you add to the plugin should be added here. +list(APPEND PLUGIN_SOURCES + "camera_linux_plugin.cc" +) + +# Define the plugin library target. Its name must not be changed (see comment +# on PLUGIN_NAME above). +add_library(${PLUGIN_NAME} SHARED + ${PLUGIN_SOURCES} +) + +# Apply a standard set of build settings that are configured in the +# application-level CMakeLists.txt. This can be removed for plugins that want +# full control over build settings. +apply_standard_settings(${PLUGIN_NAME}) + +# Symbols are hidden by default to reduce the chance of accidental conflicts +# between plugins. This should not be removed; any symbols that should be +# exported should be explicitly exported with the FLUTTER_PLUGIN_EXPORT macro. +set_target_properties(${PLUGIN_NAME} PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) + +# Source include directories and library dependencies. Add any plugin-specific +# dependencies here. +target_include_directories(${PLUGIN_NAME} INTERFACE + "${CMAKE_CURRENT_SOURCE_DIR}/include") +target_link_libraries(${PLUGIN_NAME} PRIVATE flutter) +target_link_libraries(${PLUGIN_NAME} PRIVATE PkgConfig::GTK) + +# List of absolute paths to libraries that should be bundled with the plugin. +# This list could contain prebuilt libraries, or libraries created by an +# external build triggered from this build file. +set(camera_linux_bundled_libraries + "" + PARENT_SCOPE +) + +# === Tests === +# These unit tests can be run from a terminal after building the example. + +# Only enable test builds when building the example (which sets this variable) +# so that plugin clients aren't building the tests. +if (${include_${PROJECT_NAME}_tests}) +if(${CMAKE_VERSION} VERSION_LESS "3.11.0") +message("Unit tests require CMake 3.11.0 or later") +else() +set(TEST_RUNNER "${PROJECT_NAME}_test") +enable_testing() + +# Add the Google Test dependency. +include(FetchContent) +FetchContent_Declare( + googletest + URL https://github.com/google/googletest/archive/release-1.11.0.zip +) +# Prevent overriding the parent project's compiler/linker settings +set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) +# Disable install commands for gtest so it doesn't end up in the bundle. +set(INSTALL_GTEST OFF CACHE BOOL "Disable installation of googletest" FORCE) + +FetchContent_MakeAvailable(googletest) + +# The plugin's exported API is not very useful for unit testing, so build the +# sources directly into the test binary rather than using the shared library. +add_executable(${TEST_RUNNER} + test/camera_linux_plugin_test.cc + ${PLUGIN_SOURCES} +) +apply_standard_settings(${TEST_RUNNER}) +target_include_directories(${TEST_RUNNER} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}") +target_link_libraries(${TEST_RUNNER} PRIVATE flutter) +target_link_libraries(${TEST_RUNNER} PRIVATE PkgConfig::GTK) +target_link_libraries(${TEST_RUNNER} PRIVATE gtest_main gmock) + +# Enable automatic test discovery. +include(GoogleTest) +gtest_discover_tests(${TEST_RUNNER}) + +endif() # CMake version check +endif() # include_${PROJECT_NAME}_tests \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/camera_linux_plugin.cc b/packages/camera/camera_linux/linux/camera_linux_plugin.cc new file mode 100644 index 000000000000..d3599d644965 --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_linux_plugin.cc @@ -0,0 +1,76 @@ +#include "include/camera_linux/camera_linux_plugin.h" + +#include +#include +#include + +#include + +#include "camera_linux_plugin_private.h" + +#define CAMERA_LINUX_PLUGIN(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), camera_linux_plugin_get_type(), \ + CameraLinuxPlugin)) + +struct _CameraLinuxPlugin { + GObject parent_instance; +}; + +G_DEFINE_TYPE(CameraLinuxPlugin, camera_linux_plugin, g_object_get_type()) + +// Called when a method call is received from Flutter. +static void camera_linux_plugin_handle_method_call( + CameraLinuxPlugin* self, + FlMethodCall* method_call) { + g_autoptr(FlMethodResponse) response = nullptr; + + const gchar* method = fl_method_call_get_name(method_call); + + if (strcmp(method, "getPlatformVersion") == 0) { + response = get_platform_version(); + } else { + response = FL_METHOD_RESPONSE(fl_method_not_implemented_response_new()); + } + + fl_method_call_respond(method_call, response, nullptr); +} + +FlMethodResponse* get_platform_version() { + struct utsname uname_data = {}; + uname(&uname_data); + g_autofree gchar *version = g_strdup_printf("Linux %s", uname_data.version); + g_autoptr(FlValue) result = fl_value_new_string(version); + return FL_METHOD_RESPONSE(fl_method_success_response_new(result)); +} + +static void camera_linux_plugin_dispose(GObject* object) { + G_OBJECT_CLASS(camera_linux_plugin_parent_class)->dispose(object); +} + +static void camera_linux_plugin_class_init(CameraLinuxPluginClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_plugin_dispose; +} + +static void camera_linux_plugin_init(CameraLinuxPlugin* self) {} + +static void method_call_cb(FlMethodChannel* channel, FlMethodCall* method_call, + gpointer user_data) { + CameraLinuxPlugin* plugin = CAMERA_LINUX_PLUGIN(user_data); + camera_linux_plugin_handle_method_call(plugin, method_call); +} + +void camera_linux_plugin_register_with_registrar(FlPluginRegistrar* registrar) { + CameraLinuxPlugin* plugin = CAMERA_LINUX_PLUGIN( + g_object_new(camera_linux_plugin_get_type(), nullptr)); + + g_autoptr(FlStandardMethodCodec) codec = fl_standard_method_codec_new(); + g_autoptr(FlMethodChannel) channel = + fl_method_channel_new(fl_plugin_registrar_get_messenger(registrar), + "camera_linux", + FL_METHOD_CODEC(codec)); + fl_method_channel_set_method_call_handler(channel, method_call_cb, + g_object_ref(plugin), + g_object_unref); + + g_object_unref(plugin); +} diff --git a/packages/camera/camera_linux/linux/camera_linux_plugin_private.h b/packages/camera/camera_linux/linux/camera_linux_plugin_private.h new file mode 100644 index 000000000000..1e590da5dbd4 --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_linux_plugin_private.h @@ -0,0 +1,10 @@ +#include + +#include "include/camera_linux/camera_linux_plugin.h" + +// This file exposes some plugin internals for unit testing. See +// https://github.com/flutter/flutter/issues/88724 for current limitations +// in the unit-testable API. + +// Handles the getPlatformVersion method call. +FlMethodResponse *get_platform_version(); diff --git a/packages/camera/camera_linux/linux/include/camera_linux/camera_linux_plugin.h b/packages/camera/camera_linux/linux/include/camera_linux/camera_linux_plugin.h new file mode 100644 index 000000000000..26c0ea39d8e0 --- /dev/null +++ b/packages/camera/camera_linux/linux/include/camera_linux/camera_linux_plugin.h @@ -0,0 +1,26 @@ +#ifndef FLUTTER_PLUGIN_CAMERA_LINUX_PLUGIN_H_ +#define FLUTTER_PLUGIN_CAMERA_LINUX_PLUGIN_H_ + +#include + +G_BEGIN_DECLS + +#ifdef FLUTTER_PLUGIN_IMPL +#define FLUTTER_PLUGIN_EXPORT __attribute__((visibility("default"))) +#else +#define FLUTTER_PLUGIN_EXPORT +#endif + +typedef struct _CameraLinuxPlugin CameraLinuxPlugin; +typedef struct { + GObjectClass parent_class; +} CameraLinuxPluginClass; + +FLUTTER_PLUGIN_EXPORT GType camera_linux_plugin_get_type(); + +FLUTTER_PLUGIN_EXPORT void camera_linux_plugin_register_with_registrar( + FlPluginRegistrar* registrar); + +G_END_DECLS + +#endif // FLUTTER_PLUGIN_CAMERA_LINUX_PLUGIN_H_ diff --git a/packages/camera/camera_linux/linux/test/camera_linux_plugin_test.cc b/packages/camera/camera_linux/linux/test/camera_linux_plugin_test.cc new file mode 100644 index 000000000000..a3771a632cb6 --- /dev/null +++ b/packages/camera/camera_linux/linux/test/camera_linux_plugin_test.cc @@ -0,0 +1,31 @@ +#include +#include +#include + +#include "include/camera_linux/camera_linux_plugin.h" +#include "camera_linux_plugin_private.h" + +// This demonstrates a simple unit test of the C portion of this plugin's +// implementation. +// +// Once you have built the plugin's example app, you can run these tests +// from the command line. For instance, for a plugin called my_plugin +// built for x64 debug, run: +// $ build/linux/x64/debug/plugins/my_plugin/my_plugin_test + +namespace camera_linux { +namespace test { + +TEST(CameraLinuxPlugin, GetPlatformVersion) { + g_autoptr(FlMethodResponse) response = get_platform_version(); + ASSERT_NE(response, nullptr); + ASSERT_TRUE(FL_IS_METHOD_SUCCESS_RESPONSE(response)); + FlValue* result = fl_method_success_response_get_result( + FL_METHOD_SUCCESS_RESPONSE(response)); + ASSERT_EQ(fl_value_get_type(result), FL_VALUE_TYPE_STRING); + // The full string varies, so just validate that it has the right format. + EXPECT_THAT(fl_value_get_string(result), testing::StartsWith("Linux ")); +} + +} // namespace test +} // namespace camera_linux diff --git a/packages/camera/camera_linux/pubspec.yaml b/packages/camera/camera_linux/pubspec.yaml new file mode 100644 index 000000000000..b0a978cdf8b8 --- /dev/null +++ b/packages/camera/camera_linux/pubspec.yaml @@ -0,0 +1,68 @@ +name: camera_linux +description: "Linux implementation of the camera plugin." +version: 0.0.1 + +environment: + sdk: ^3.5.4 + flutter: ">=3.3.0" + +dependencies: + flutter: + sdk: flutter + plugin_platform_interface: ^2.0.2 + +dev_dependencies: + flutter_test: + sdk: flutter + flutter_lints: ^4.0.0 + +# For information on the generic Dart part of this file, see the +# following page: https://dart.dev/tools/pub/pubspec + +# The following section is specific to Flutter packages. +flutter: + # This section identifies this Flutter project as a plugin project. + # The 'pluginClass' specifies the class (in Java, Kotlin, Swift, Objective-C, etc.) + # which should be registered in the plugin registry. This is required for + # using method channels. + # The Android 'package' specifies package in which the registered class is. + # This is required for using method channels on Android. + # The 'ffiPlugin' specifies that native code should be built and bundled. + # This is required for using `dart:ffi`. + # All these are used by the tooling to maintain consistency when + # adding or updating assets for this project. + plugin: + platforms: + linux: + pluginClass: CameraLinuxPlugin + + # To add assets to your plugin package, add an assets section, like this: + # assets: + # - images/a_dot_burr.jpeg + # - images/a_dot_ham.jpeg + # + # For details regarding assets in packages, see + # https://flutter.dev/to/asset-from-package + # + # An image asset can refer to one or more resolution-specific "variants", see + # https://flutter.dev/to/resolution-aware-images + + # To add custom fonts to your plugin package, add a fonts section here, + # in this "flutter" section. Each entry in this list should have a + # "family" key with the font family name, and a "fonts" key with a + # list giving the asset and other descriptors for the font. For + # example: + # fonts: + # - family: Schyler + # fonts: + # - asset: fonts/Schyler-Regular.ttf + # - asset: fonts/Schyler-Italic.ttf + # style: italic + # - family: Trajan Pro + # fonts: + # - asset: fonts/TrajanPro.ttf + # - asset: fonts/TrajanPro_Bold.ttf + # weight: 700 + # + # For details regarding fonts in packages, see + # https://flutter.dev/to/font-from-package diff --git a/packages/camera/camera_linux/test/camera_linux_method_channel_test.dart b/packages/camera/camera_linux/test/camera_linux_method_channel_test.dart new file mode 100644 index 000000000000..7ab5d5289a45 --- /dev/null +++ b/packages/camera/camera_linux/test/camera_linux_method_channel_test.dart @@ -0,0 +1,27 @@ +import 'package:flutter/services.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:camera_linux/camera_linux_method_channel.dart'; + +void main() { + TestWidgetsFlutterBinding.ensureInitialized(); + + MethodChannelCameraLinux platform = MethodChannelCameraLinux(); + const MethodChannel channel = MethodChannel('camera_linux'); + + setUp(() { + TestDefaultBinaryMessengerBinding.instance.defaultBinaryMessenger.setMockMethodCallHandler( + channel, + (MethodCall methodCall) async { + return '42'; + }, + ); + }); + + tearDown(() { + TestDefaultBinaryMessengerBinding.instance.defaultBinaryMessenger.setMockMethodCallHandler(channel, null); + }); + + test('getPlatformVersion', () async { + expect(await platform.getPlatformVersion(), '42'); + }); +} diff --git a/packages/camera/camera_linux/test/camera_linux_test.dart b/packages/camera/camera_linux/test/camera_linux_test.dart new file mode 100644 index 000000000000..6dec7f8cf8c3 --- /dev/null +++ b/packages/camera/camera_linux/test/camera_linux_test.dart @@ -0,0 +1,29 @@ +import 'package:flutter_test/flutter_test.dart'; +import 'package:camera_linux/camera_linux.dart'; +import 'package:camera_linux/camera_linux_platform_interface.dart'; +import 'package:camera_linux/camera_linux_method_channel.dart'; +import 'package:plugin_platform_interface/plugin_platform_interface.dart'; + +class MockCameraLinuxPlatform + with MockPlatformInterfaceMixin + implements CameraLinuxPlatform { + + @override + Future getPlatformVersion() => Future.value('42'); +} + +void main() { + final CameraLinuxPlatform initialPlatform = CameraLinuxPlatform.instance; + + test('$MethodChannelCameraLinux is the default instance', () { + expect(initialPlatform, isInstanceOf()); + }); + + test('getPlatformVersion', () async { + CameraLinux cameraLinuxPlugin = CameraLinux(); + MockCameraLinuxPlatform fakePlatform = MockCameraLinuxPlatform(); + CameraLinuxPlatform.instance = fakePlatform; + + expect(await cameraLinuxPlugin.getPlatformVersion(), '42'); + }); +} From 7ba6b41d349b571bf31937c05ba5dd3043aa8d39 Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Fri, 16 May 2025 08:23:55 -0400 Subject: [PATCH 04/21] camera plugin linked --- .../linux/flutter/generated_plugins.cmake | 1 + packages/camera/camera/example/pubspec.yaml | 6 + .../camera/camera_linux/example/.gitignore | 43 - .../camera/camera_linux/example/README.md | 16 - .../example/analysis_options.yaml | 28 - .../plugin_integration_test.dart | 25 - .../camera/camera_linux/example/lib/main.dart | 63 - .../camera_linux/example/linux/.gitignore | 1 - .../camera_linux/example/linux/CMakeLists.txt | 147 - .../example/linux/flutter/CMakeLists.txt | 88 - .../camera/camera_linux/example/linux/main.cc | 6 - .../example/linux/my_application.cc | 124 - .../example/linux/my_application.h | 18 - .../camera/camera_linux/example/pubspec.yaml | 85 - .../example/test/widget_test.dart | 27 - .../camera/camera_linux/lib/camera_linux.dart | 9 +- .../lib/camera_linux_method_channel.dart | 17 - .../lib/camera_linux_platform_interface.dart | 29 - .../camera_linux/lib/src/linux_camera.dart | 26 + .../camera_linux/lib/src/messages.g.dart | 1234 +++++++ .../camera/camera_linux/linux/CMakeLists.txt | 51 +- .../camera_linux/linux/camera_linux_plugin.cc | 76 - .../linux/camera_linux_plugin_private.h | 10 - .../camera_linux/linux/camera_plugin.cpp | 3 + ...{camera_linux_plugin.h => camera_plugin.h} | 9 +- .../camera/camera_linux/linux/messages.g.cc | 3286 +++++++++++++++++ .../camera/camera_linux/linux/messages.g.h | 1288 +++++++ .../linux/test/camera_linux_plugin_test.cc | 31 - .../camera/camera_linux/pigeons/copyright.txt | 3 + .../camera/camera_linux/pigeons/messages.dart | 316 ++ packages/camera/camera_linux/pubspec.yaml | 5 +- .../camera_linux_method_channel_test.dart | 27 - .../camera_linux/test/camera_linux_test.dart | 29 - 33 files changed, 6174 insertions(+), 953 deletions(-) delete mode 100644 packages/camera/camera_linux/example/.gitignore delete mode 100644 packages/camera/camera_linux/example/README.md delete mode 100644 packages/camera/camera_linux/example/analysis_options.yaml delete mode 100644 packages/camera/camera_linux/example/integration_test/plugin_integration_test.dart delete mode 100644 packages/camera/camera_linux/example/lib/main.dart delete mode 100644 packages/camera/camera_linux/example/linux/.gitignore delete mode 100644 packages/camera/camera_linux/example/linux/CMakeLists.txt delete mode 100644 packages/camera/camera_linux/example/linux/flutter/CMakeLists.txt delete mode 100644 packages/camera/camera_linux/example/linux/main.cc delete mode 100644 packages/camera/camera_linux/example/linux/my_application.cc delete mode 100644 packages/camera/camera_linux/example/linux/my_application.h delete mode 100644 packages/camera/camera_linux/example/pubspec.yaml delete mode 100644 packages/camera/camera_linux/example/test/widget_test.dart delete mode 100644 packages/camera/camera_linux/lib/camera_linux_method_channel.dart delete mode 100644 packages/camera/camera_linux/lib/camera_linux_platform_interface.dart create mode 100644 packages/camera/camera_linux/lib/src/linux_camera.dart create mode 100644 packages/camera/camera_linux/lib/src/messages.g.dart delete mode 100644 packages/camera/camera_linux/linux/camera_linux_plugin.cc delete mode 100644 packages/camera/camera_linux/linux/camera_linux_plugin_private.h create mode 100644 packages/camera/camera_linux/linux/camera_plugin.cpp rename packages/camera/camera_linux/linux/include/camera_linux/{camera_linux_plugin.h => camera_plugin.h} (59%) create mode 100644 packages/camera/camera_linux/linux/messages.g.cc create mode 100644 packages/camera/camera_linux/linux/messages.g.h delete mode 100644 packages/camera/camera_linux/linux/test/camera_linux_plugin_test.cc create mode 100644 packages/camera/camera_linux/pigeons/copyright.txt create mode 100644 packages/camera/camera_linux/pigeons/messages.dart delete mode 100644 packages/camera/camera_linux/test/camera_linux_method_channel_test.dart delete mode 100644 packages/camera/camera_linux/test/camera_linux_test.dart diff --git a/packages/camera/camera/example/linux/flutter/generated_plugins.cmake b/packages/camera/camera/example/linux/flutter/generated_plugins.cmake index 2e1de87a7eb6..1aa0c3eb78bd 100644 --- a/packages/camera/camera/example/linux/flutter/generated_plugins.cmake +++ b/packages/camera/camera/example/linux/flutter/generated_plugins.cmake @@ -3,6 +3,7 @@ # list(APPEND FLUTTER_PLUGIN_LIST + camera_linux ) list(APPEND FLUTTER_FFI_PLUGIN_LIST diff --git a/packages/camera/camera/example/pubspec.yaml b/packages/camera/camera/example/pubspec.yaml index b5d9dff6e913..62fde6862a14 100644 --- a/packages/camera/camera/example/pubspec.yaml +++ b/packages/camera/camera/example/pubspec.yaml @@ -29,5 +29,11 @@ dev_dependencies: sdk: flutter leak_tracker_flutter_testing: any +dependency_overrides: + camera_web: + path: ../../camera_web + camera_linux: + path: ../../camera_linux + flutter: uses-material-design: true diff --git a/packages/camera/camera_linux/example/.gitignore b/packages/camera/camera_linux/example/.gitignore deleted file mode 100644 index 29a3a5017f04..000000000000 --- a/packages/camera/camera_linux/example/.gitignore +++ /dev/null @@ -1,43 +0,0 @@ -# Miscellaneous -*.class -*.log -*.pyc -*.swp -.DS_Store -.atom/ -.buildlog/ -.history -.svn/ -migrate_working_dir/ - -# IntelliJ related -*.iml -*.ipr -*.iws -.idea/ - -# The .vscode folder contains launch configuration and tasks you configure in -# VS Code which you may wish to be included in version control, so this line -# is commented out by default. -#.vscode/ - -# Flutter/Dart/Pub related -**/doc/api/ -**/ios/Flutter/.last_build_id -.dart_tool/ -.flutter-plugins -.flutter-plugins-dependencies -.pub-cache/ -.pub/ -/build/ - -# Symbolication related -app.*.symbols - -# Obfuscation related -app.*.map.json - -# Android Studio will place build artifacts here -/android/app/debug -/android/app/profile -/android/app/release diff --git a/packages/camera/camera_linux/example/README.md b/packages/camera/camera_linux/example/README.md deleted file mode 100644 index c7c9d957cdfa..000000000000 --- a/packages/camera/camera_linux/example/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# camera_linux_example - -Demonstrates how to use the camera_linux plugin. - -## Getting Started - -This project is a starting point for a Flutter application. - -A few resources to get you started if this is your first Flutter project: - -- [Lab: Write your first Flutter app](https://docs.flutter.dev/get-started/codelab) -- [Cookbook: Useful Flutter samples](https://docs.flutter.dev/cookbook) - -For help getting started with Flutter development, view the -[online documentation](https://docs.flutter.dev/), which offers tutorials, -samples, guidance on mobile development, and a full API reference. diff --git a/packages/camera/camera_linux/example/analysis_options.yaml b/packages/camera/camera_linux/example/analysis_options.yaml deleted file mode 100644 index 0d2902135cae..000000000000 --- a/packages/camera/camera_linux/example/analysis_options.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# This file configures the analyzer, which statically analyzes Dart code to -# check for errors, warnings, and lints. -# -# The issues identified by the analyzer are surfaced in the UI of Dart-enabled -# IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be -# invoked from the command line by running `flutter analyze`. - -# The following line activates a set of recommended lints for Flutter apps, -# packages, and plugins designed to encourage good coding practices. -include: package:flutter_lints/flutter.yaml - -linter: - # The lint rules applied to this project can be customized in the - # section below to disable rules from the `package:flutter_lints/flutter.yaml` - # included above or to enable additional rules. A list of all available lints - # and their documentation is published at https://dart.dev/lints. - # - # Instead of disabling a lint rule for the entire project in the - # section below, it can also be suppressed for a single line of code - # or a specific dart file by using the `// ignore: name_of_lint` and - # `// ignore_for_file: name_of_lint` syntax on the line or in the file - # producing the lint. - rules: - # avoid_print: false # Uncomment to disable the `avoid_print` rule - # prefer_single_quotes: true # Uncomment to enable the `prefer_single_quotes` rule - -# Additional information about this file can be found at -# https://dart.dev/guides/language/analysis-options diff --git a/packages/camera/camera_linux/example/integration_test/plugin_integration_test.dart b/packages/camera/camera_linux/example/integration_test/plugin_integration_test.dart deleted file mode 100644 index 0dd45a1c096f..000000000000 --- a/packages/camera/camera_linux/example/integration_test/plugin_integration_test.dart +++ /dev/null @@ -1,25 +0,0 @@ -// This is a basic Flutter integration test. -// -// Since integration tests run in a full Flutter application, they can interact -// with the host side of a plugin implementation, unlike Dart unit tests. -// -// For more information about Flutter integration tests, please see -// https://flutter.dev/to/integration-testing - - -import 'package:flutter_test/flutter_test.dart'; -import 'package:integration_test/integration_test.dart'; - -import 'package:camera_linux/camera_linux.dart'; - -void main() { - IntegrationTestWidgetsFlutterBinding.ensureInitialized(); - - testWidgets('getPlatformVersion test', (WidgetTester tester) async { - final CameraLinux plugin = CameraLinux(); - final String? version = await plugin.getPlatformVersion(); - // The version string depends on the host platform running the test, so - // just assert that some non-empty string is returned. - expect(version?.isNotEmpty, true); - }); -} diff --git a/packages/camera/camera_linux/example/lib/main.dart b/packages/camera/camera_linux/example/lib/main.dart deleted file mode 100644 index 1ac60e306dcc..000000000000 --- a/packages/camera/camera_linux/example/lib/main.dart +++ /dev/null @@ -1,63 +0,0 @@ -import 'package:flutter/material.dart'; -import 'dart:async'; - -import 'package:flutter/services.dart'; -import 'package:camera_linux/camera_linux.dart'; - -void main() { - runApp(const MyApp()); -} - -class MyApp extends StatefulWidget { - const MyApp({super.key}); - - @override - State createState() => _MyAppState(); -} - -class _MyAppState extends State { - String _platformVersion = 'Unknown'; - final _cameraLinuxPlugin = CameraLinux(); - - @override - void initState() { - super.initState(); - initPlatformState(); - } - - // Platform messages are asynchronous, so we initialize in an async method. - Future initPlatformState() async { - String platformVersion; - // Platform messages may fail, so we use a try/catch PlatformException. - // We also handle the message potentially returning null. - try { - platformVersion = - await _cameraLinuxPlugin.getPlatformVersion() ?? 'Unknown platform version'; - } on PlatformException { - platformVersion = 'Failed to get platform version.'; - } - - // If the widget was removed from the tree while the asynchronous platform - // message was in flight, we want to discard the reply rather than calling - // setState to update our non-existent appearance. - if (!mounted) return; - - setState(() { - _platformVersion = platformVersion; - }); - } - - @override - Widget build(BuildContext context) { - return MaterialApp( - home: Scaffold( - appBar: AppBar( - title: const Text('Plugin example app'), - ), - body: Center( - child: Text('Running on: $_platformVersion\n'), - ), - ), - ); - } -} diff --git a/packages/camera/camera_linux/example/linux/.gitignore b/packages/camera/camera_linux/example/linux/.gitignore deleted file mode 100644 index d3896c98444f..000000000000 --- a/packages/camera/camera_linux/example/linux/.gitignore +++ /dev/null @@ -1 +0,0 @@ -flutter/ephemeral diff --git a/packages/camera/camera_linux/example/linux/CMakeLists.txt b/packages/camera/camera_linux/example/linux/CMakeLists.txt deleted file mode 100644 index ed12fda0cbe1..000000000000 --- a/packages/camera/camera_linux/example/linux/CMakeLists.txt +++ /dev/null @@ -1,147 +0,0 @@ -# Project-level configuration. -cmake_minimum_required(VERSION 3.10) -project(runner LANGUAGES CXX) - -# The name of the executable created for the application. Change this to change -# the on-disk name of your application. -set(BINARY_NAME "camera_linux_example") -# The unique GTK application identifier for this application. See: -# https://wiki.gnome.org/HowDoI/ChooseApplicationID -set(APPLICATION_ID "com.example.camera_linux") - -# Explicitly opt in to modern CMake behaviors to avoid warnings with recent -# versions of CMake. -cmake_policy(SET CMP0063 NEW) - -# Load bundled libraries from the lib/ directory relative to the binary. -set(CMAKE_INSTALL_RPATH "$ORIGIN/lib") - -# Root filesystem for cross-building. -if(FLUTTER_TARGET_PLATFORM_SYSROOT) - set(CMAKE_SYSROOT ${FLUTTER_TARGET_PLATFORM_SYSROOT}) - set(CMAKE_FIND_ROOT_PATH ${CMAKE_SYSROOT}) - set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) - set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) - set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) - set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) -endif() - -# Define build configuration options. -if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) - set(CMAKE_BUILD_TYPE "Debug" CACHE - STRING "Flutter build mode" FORCE) - set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS - "Debug" "Profile" "Release") -endif() - -# Compilation settings that should be applied to most targets. -# -# Be cautious about adding new options here, as plugins use this function by -# default. In most cases, you should add new options to specific targets instead -# of modifying this function. -function(APPLY_STANDARD_SETTINGS TARGET) - target_compile_features(${TARGET} PUBLIC cxx_std_14) - target_compile_options(${TARGET} PRIVATE -Wall -Werror) - target_compile_options(${TARGET} PRIVATE "$<$>:-O3>") - target_compile_definitions(${TARGET} PRIVATE "$<$>:NDEBUG>") -endfunction() - -# Flutter library and tool build rules. -set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter") -add_subdirectory(${FLUTTER_MANAGED_DIR}) - -# System-level dependencies. -find_package(PkgConfig REQUIRED) -pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) - -add_definitions(-DAPPLICATION_ID="${APPLICATION_ID}") - -# Define the application target. To change its name, change BINARY_NAME above, -# not the value here, or `flutter run` will no longer work. -# -# Any new source files that you add to the application should be added here. -add_executable(${BINARY_NAME} - "main.cc" - "my_application.cc" - "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc" -) - -# Apply the standard set of build settings. This can be removed for applications -# that need different build settings. -apply_standard_settings(${BINARY_NAME}) - -# Add dependency libraries. Add any application-specific dependencies here. -target_link_libraries(${BINARY_NAME} PRIVATE flutter) -target_link_libraries(${BINARY_NAME} PRIVATE PkgConfig::GTK) - -# Run the Flutter tool portions of the build. This must not be removed. -add_dependencies(${BINARY_NAME} flutter_assemble) - -# Only the install-generated bundle's copy of the executable will launch -# correctly, since the resources must in the right relative locations. To avoid -# people trying to run the unbundled copy, put it in a subdirectory instead of -# the default top-level location. -set_target_properties(${BINARY_NAME} - PROPERTIES - RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/intermediates_do_not_run" -) - -# Enable the test target. -set(include_camera_linux_tests TRUE) - -# Generated plugin build rules, which manage building the plugins and adding -# them to the application. -include(flutter/generated_plugins.cmake) - - -# === Installation === -# By default, "installing" just makes a relocatable bundle in the build -# directory. -set(BUILD_BUNDLE_DIR "${PROJECT_BINARY_DIR}/bundle") -if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) - set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE) -endif() - -# Start with a clean build bundle directory every time. -install(CODE " - file(REMOVE_RECURSE \"${BUILD_BUNDLE_DIR}/\") - " COMPONENT Runtime) - -set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data") -set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}/lib") - -install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}" - COMPONENT Runtime) - -install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" - COMPONENT Runtime) - -install(FILES "${FLUTTER_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" - COMPONENT Runtime) - -foreach(bundled_library ${PLUGIN_BUNDLED_LIBRARIES}) - install(FILES "${bundled_library}" - DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" - COMPONENT Runtime) -endforeach(bundled_library) - -# Copy the native assets provided by the build.dart from all packages. -set(NATIVE_ASSETS_DIR "${PROJECT_BUILD_DIR}native_assets/linux/") -install(DIRECTORY "${NATIVE_ASSETS_DIR}" - DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" - COMPONENT Runtime) - -# Fully re-copy the assets directory on each build to avoid having stale files -# from a previous install. -set(FLUTTER_ASSET_DIR_NAME "flutter_assets") -install(CODE " - file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\") - " COMPONENT Runtime) -install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}" - DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime) - -# Install the AOT library on non-Debug builds only. -if(NOT CMAKE_BUILD_TYPE MATCHES "Debug") - install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" - COMPONENT Runtime) -endif() diff --git a/packages/camera/camera_linux/example/linux/flutter/CMakeLists.txt b/packages/camera/camera_linux/example/linux/flutter/CMakeLists.txt deleted file mode 100644 index d5bd01648a96..000000000000 --- a/packages/camera/camera_linux/example/linux/flutter/CMakeLists.txt +++ /dev/null @@ -1,88 +0,0 @@ -# This file controls Flutter-level build steps. It should not be edited. -cmake_minimum_required(VERSION 3.10) - -set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral") - -# Configuration provided via flutter tool. -include(${EPHEMERAL_DIR}/generated_config.cmake) - -# TODO: Move the rest of this into files in ephemeral. See -# https://github.com/flutter/flutter/issues/57146. - -# Serves the same purpose as list(TRANSFORM ... PREPEND ...), -# which isn't available in 3.10. -function(list_prepend LIST_NAME PREFIX) - set(NEW_LIST "") - foreach(element ${${LIST_NAME}}) - list(APPEND NEW_LIST "${PREFIX}${element}") - endforeach(element) - set(${LIST_NAME} "${NEW_LIST}" PARENT_SCOPE) -endfunction() - -# === Flutter Library === -# System-level dependencies. -find_package(PkgConfig REQUIRED) -pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) -pkg_check_modules(GLIB REQUIRED IMPORTED_TARGET glib-2.0) -pkg_check_modules(GIO REQUIRED IMPORTED_TARGET gio-2.0) - -set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/libflutter_linux_gtk.so") - -# Published to parent scope for install step. -set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE) -set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE) -set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/" PARENT_SCOPE) -set(AOT_LIBRARY "${PROJECT_DIR}/build/lib/libapp.so" PARENT_SCOPE) - -list(APPEND FLUTTER_LIBRARY_HEADERS - "fl_basic_message_channel.h" - "fl_binary_codec.h" - "fl_binary_messenger.h" - "fl_dart_project.h" - "fl_engine.h" - "fl_json_message_codec.h" - "fl_json_method_codec.h" - "fl_message_codec.h" - "fl_method_call.h" - "fl_method_channel.h" - "fl_method_codec.h" - "fl_method_response.h" - "fl_plugin_registrar.h" - "fl_plugin_registry.h" - "fl_standard_message_codec.h" - "fl_standard_method_codec.h" - "fl_string_codec.h" - "fl_value.h" - "fl_view.h" - "flutter_linux.h" -) -list_prepend(FLUTTER_LIBRARY_HEADERS "${EPHEMERAL_DIR}/flutter_linux/") -add_library(flutter INTERFACE) -target_include_directories(flutter INTERFACE - "${EPHEMERAL_DIR}" -) -target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}") -target_link_libraries(flutter INTERFACE - PkgConfig::GTK - PkgConfig::GLIB - PkgConfig::GIO -) -add_dependencies(flutter flutter_assemble) - -# === Flutter tool backend === -# _phony_ is a non-existent file to force this command to run every time, -# since currently there's no way to get a full input/output list from the -# flutter tool. -add_custom_command( - OUTPUT ${FLUTTER_LIBRARY} ${FLUTTER_LIBRARY_HEADERS} - ${CMAKE_CURRENT_BINARY_DIR}/_phony_ - COMMAND ${CMAKE_COMMAND} -E env - ${FLUTTER_TOOL_ENVIRONMENT} - "${FLUTTER_ROOT}/packages/flutter_tools/bin/tool_backend.sh" - ${FLUTTER_TARGET_PLATFORM} ${CMAKE_BUILD_TYPE} - VERBATIM -) -add_custom_target(flutter_assemble DEPENDS - "${FLUTTER_LIBRARY}" - ${FLUTTER_LIBRARY_HEADERS} -) diff --git a/packages/camera/camera_linux/example/linux/main.cc b/packages/camera/camera_linux/example/linux/main.cc deleted file mode 100644 index e7c5c5437037..000000000000 --- a/packages/camera/camera_linux/example/linux/main.cc +++ /dev/null @@ -1,6 +0,0 @@ -#include "my_application.h" - -int main(int argc, char** argv) { - g_autoptr(MyApplication) app = my_application_new(); - return g_application_run(G_APPLICATION(app), argc, argv); -} diff --git a/packages/camera/camera_linux/example/linux/my_application.cc b/packages/camera/camera_linux/example/linux/my_application.cc deleted file mode 100644 index 535ff33adbc2..000000000000 --- a/packages/camera/camera_linux/example/linux/my_application.cc +++ /dev/null @@ -1,124 +0,0 @@ -#include "my_application.h" - -#include -#ifdef GDK_WINDOWING_X11 -#include -#endif - -#include "flutter/generated_plugin_registrant.h" - -struct _MyApplication { - GtkApplication parent_instance; - char** dart_entrypoint_arguments; -}; - -G_DEFINE_TYPE(MyApplication, my_application, GTK_TYPE_APPLICATION) - -// Implements GApplication::activate. -static void my_application_activate(GApplication* application) { - MyApplication* self = MY_APPLICATION(application); - GtkWindow* window = - GTK_WINDOW(gtk_application_window_new(GTK_APPLICATION(application))); - - // Use a header bar when running in GNOME as this is the common style used - // by applications and is the setup most users will be using (e.g. Ubuntu - // desktop). - // If running on X and not using GNOME then just use a traditional title bar - // in case the window manager does more exotic layout, e.g. tiling. - // If running on Wayland assume the header bar will work (may need changing - // if future cases occur). - gboolean use_header_bar = TRUE; -#ifdef GDK_WINDOWING_X11 - GdkScreen* screen = gtk_window_get_screen(window); - if (GDK_IS_X11_SCREEN(screen)) { - const gchar* wm_name = gdk_x11_screen_get_window_manager_name(screen); - if (g_strcmp0(wm_name, "GNOME Shell") != 0) { - use_header_bar = FALSE; - } - } -#endif - if (use_header_bar) { - GtkHeaderBar* header_bar = GTK_HEADER_BAR(gtk_header_bar_new()); - gtk_widget_show(GTK_WIDGET(header_bar)); - gtk_header_bar_set_title(header_bar, "camera_linux_example"); - gtk_header_bar_set_show_close_button(header_bar, TRUE); - gtk_window_set_titlebar(window, GTK_WIDGET(header_bar)); - } else { - gtk_window_set_title(window, "camera_linux_example"); - } - - gtk_window_set_default_size(window, 1280, 720); - gtk_widget_show(GTK_WIDGET(window)); - - g_autoptr(FlDartProject) project = fl_dart_project_new(); - fl_dart_project_set_dart_entrypoint_arguments(project, self->dart_entrypoint_arguments); - - FlView* view = fl_view_new(project); - gtk_widget_show(GTK_WIDGET(view)); - gtk_container_add(GTK_CONTAINER(window), GTK_WIDGET(view)); - - fl_register_plugins(FL_PLUGIN_REGISTRY(view)); - - gtk_widget_grab_focus(GTK_WIDGET(view)); -} - -// Implements GApplication::local_command_line. -static gboolean my_application_local_command_line(GApplication* application, gchar*** arguments, int* exit_status) { - MyApplication* self = MY_APPLICATION(application); - // Strip out the first argument as it is the binary name. - self->dart_entrypoint_arguments = g_strdupv(*arguments + 1); - - g_autoptr(GError) error = nullptr; - if (!g_application_register(application, nullptr, &error)) { - g_warning("Failed to register: %s", error->message); - *exit_status = 1; - return TRUE; - } - - g_application_activate(application); - *exit_status = 0; - - return TRUE; -} - -// Implements GApplication::startup. -static void my_application_startup(GApplication* application) { - //MyApplication* self = MY_APPLICATION(object); - - // Perform any actions required at application startup. - - G_APPLICATION_CLASS(my_application_parent_class)->startup(application); -} - -// Implements GApplication::shutdown. -static void my_application_shutdown(GApplication* application) { - //MyApplication* self = MY_APPLICATION(object); - - // Perform any actions required at application shutdown. - - G_APPLICATION_CLASS(my_application_parent_class)->shutdown(application); -} - -// Implements GObject::dispose. -static void my_application_dispose(GObject* object) { - MyApplication* self = MY_APPLICATION(object); - g_clear_pointer(&self->dart_entrypoint_arguments, g_strfreev); - G_OBJECT_CLASS(my_application_parent_class)->dispose(object); -} - -static void my_application_class_init(MyApplicationClass* klass) { - G_APPLICATION_CLASS(klass)->activate = my_application_activate; - G_APPLICATION_CLASS(klass)->local_command_line = my_application_local_command_line; - G_APPLICATION_CLASS(klass)->startup = my_application_startup; - G_APPLICATION_CLASS(klass)->shutdown = my_application_shutdown; - G_OBJECT_CLASS(klass)->dispose = my_application_dispose; -} - -static void my_application_init(MyApplication* self) {} - -MyApplication* my_application_new() { - return MY_APPLICATION(g_object_new(my_application_get_type(), - "application-id", APPLICATION_ID, - "flags", G_APPLICATION_NON_UNIQUE, - nullptr)); -} diff --git a/packages/camera/camera_linux/example/linux/my_application.h b/packages/camera/camera_linux/example/linux/my_application.h deleted file mode 100644 index 72271d5e4170..000000000000 --- a/packages/camera/camera_linux/example/linux/my_application.h +++ /dev/null @@ -1,18 +0,0 @@ -#ifndef FLUTTER_MY_APPLICATION_H_ -#define FLUTTER_MY_APPLICATION_H_ - -#include - -G_DECLARE_FINAL_TYPE(MyApplication, my_application, MY, APPLICATION, - GtkApplication) - -/** - * my_application_new: - * - * Creates a new Flutter-based application. - * - * Returns: a new #MyApplication. - */ -MyApplication* my_application_new(); - -#endif // FLUTTER_MY_APPLICATION_H_ diff --git a/packages/camera/camera_linux/example/pubspec.yaml b/packages/camera/camera_linux/example/pubspec.yaml deleted file mode 100644 index 1079904426f4..000000000000 --- a/packages/camera/camera_linux/example/pubspec.yaml +++ /dev/null @@ -1,85 +0,0 @@ -name: camera_linux_example -description: "Demonstrates how to use the camera_linux plugin." -# The following line prevents the package from being accidentally published to -# pub.dev using `flutter pub publish`. This is preferred for private packages. -publish_to: 'none' # Remove this line if you wish to publish to pub.dev - -environment: - sdk: ^3.5.4 - -# Dependencies specify other packages that your package needs in order to work. -# To automatically upgrade your package dependencies to the latest versions -# consider running `flutter pub upgrade --major-versions`. Alternatively, -# dependencies can be manually updated by changing the version numbers below to -# the latest version available on pub.dev. To see which dependencies have newer -# versions available, run `flutter pub outdated`. -dependencies: - flutter: - sdk: flutter - - camera_linux: - # When depending on this package from a real application you should use: - # camera_linux: ^x.y.z - # See https://dart.dev/tools/pub/dependencies#version-constraints - # The example app is bundled with the plugin so we use a path dependency on - # the parent directory to use the current plugin's version. - path: ../ - - # The following adds the Cupertino Icons font to your application. - # Use with the CupertinoIcons class for iOS style icons. - cupertino_icons: ^1.0.8 - -dev_dependencies: - integration_test: - sdk: flutter - flutter_test: - sdk: flutter - - # The "flutter_lints" package below contains a set of recommended lints to - # encourage good coding practices. The lint set provided by the package is - # activated in the `analysis_options.yaml` file located at the root of your - # package. See that file for information about deactivating specific lint - # rules and activating additional ones. - flutter_lints: ^4.0.0 - -# For information on the generic Dart part of this file, see the -# following page: https://dart.dev/tools/pub/pubspec - -# The following section is specific to Flutter packages. -flutter: - - # The following line ensures that the Material Icons font is - # included with your application, so that you can use the icons in - # the material Icons class. - uses-material-design: true - - # To add assets to your application, add an assets section, like this: - # assets: - # - images/a_dot_burr.jpeg - # - images/a_dot_ham.jpeg - - # An image asset can refer to one or more resolution-specific "variants", see - # https://flutter.dev/to/resolution-aware-images - - # For details regarding adding assets from package dependencies, see - # https://flutter.dev/to/asset-from-package - - # To add custom fonts to your application, add a fonts section here, - # in this "flutter" section. Each entry in this list should have a - # "family" key with the font family name, and a "fonts" key with a - # list giving the asset and other descriptors for the font. For - # example: - # fonts: - # - family: Schyler - # fonts: - # - asset: fonts/Schyler-Regular.ttf - # - asset: fonts/Schyler-Italic.ttf - # style: italic - # - family: Trajan Pro - # fonts: - # - asset: fonts/TrajanPro.ttf - # - asset: fonts/TrajanPro_Bold.ttf - # weight: 700 - # - # For details regarding fonts from package dependencies, - # see https://flutter.dev/to/font-from-package diff --git a/packages/camera/camera_linux/example/test/widget_test.dart b/packages/camera/camera_linux/example/test/widget_test.dart deleted file mode 100644 index 566570b7e9f4..000000000000 --- a/packages/camera/camera_linux/example/test/widget_test.dart +++ /dev/null @@ -1,27 +0,0 @@ -// This is a basic Flutter widget test. -// -// To perform an interaction with a widget in your test, use the WidgetTester -// utility in the flutter_test package. For example, you can send tap and scroll -// gestures. You can also use WidgetTester to find child widgets in the widget -// tree, read text, and verify that the values of widget properties are correct. - -import 'package:flutter/material.dart'; -import 'package:flutter_test/flutter_test.dart'; - -import 'package:camera_linux_example/main.dart'; - -void main() { - testWidgets('Verify Platform version', (WidgetTester tester) async { - // Build our app and trigger a frame. - await tester.pumpWidget(const MyApp()); - - // Verify that platform version is retrieved. - expect( - find.byWidgetPredicate( - (Widget widget) => widget is Text && - widget.data!.startsWith('Running on:'), - ), - findsOneWidget, - ); - }); -} diff --git a/packages/camera/camera_linux/lib/camera_linux.dart b/packages/camera/camera_linux/lib/camera_linux.dart index ca7b21fb01e0..bad23a19fa41 100644 --- a/packages/camera/camera_linux/lib/camera_linux.dart +++ b/packages/camera/camera_linux/lib/camera_linux.dart @@ -1,8 +1 @@ - -import 'camera_linux_platform_interface.dart'; - -class CameraLinux { - Future getPlatformVersion() { - return CameraLinuxPlatform.instance.getPlatformVersion(); - } -} +export 'src/linux_camera.dart'; diff --git a/packages/camera/camera_linux/lib/camera_linux_method_channel.dart b/packages/camera/camera_linux/lib/camera_linux_method_channel.dart deleted file mode 100644 index 5332ffd00949..000000000000 --- a/packages/camera/camera_linux/lib/camera_linux_method_channel.dart +++ /dev/null @@ -1,17 +0,0 @@ -import 'package:flutter/foundation.dart'; -import 'package:flutter/services.dart'; - -import 'camera_linux_platform_interface.dart'; - -/// An implementation of [CameraLinuxPlatform] that uses method channels. -class MethodChannelCameraLinux extends CameraLinuxPlatform { - /// The method channel used to interact with the native platform. - @visibleForTesting - final methodChannel = const MethodChannel('camera_linux'); - - @override - Future getPlatformVersion() async { - final version = await methodChannel.invokeMethod('getPlatformVersion'); - return version; - } -} diff --git a/packages/camera/camera_linux/lib/camera_linux_platform_interface.dart b/packages/camera/camera_linux/lib/camera_linux_platform_interface.dart deleted file mode 100644 index 6043c9e54a2c..000000000000 --- a/packages/camera/camera_linux/lib/camera_linux_platform_interface.dart +++ /dev/null @@ -1,29 +0,0 @@ -import 'package:plugin_platform_interface/plugin_platform_interface.dart'; - -import 'camera_linux_method_channel.dart'; - -abstract class CameraLinuxPlatform extends PlatformInterface { - /// Constructs a CameraLinuxPlatform. - CameraLinuxPlatform() : super(token: _token); - - static final Object _token = Object(); - - static CameraLinuxPlatform _instance = MethodChannelCameraLinux(); - - /// The default instance of [CameraLinuxPlatform] to use. - /// - /// Defaults to [MethodChannelCameraLinux]. - static CameraLinuxPlatform get instance => _instance; - - /// Platform-specific implementations should set this with their own - /// platform-specific class that extends [CameraLinuxPlatform] when - /// they register themselves. - static set instance(CameraLinuxPlatform instance) { - PlatformInterface.verifyToken(instance, _token); - _instance = instance; - } - - Future getPlatformVersion() { - throw UnimplementedError('platformVersion() has not been implemented.'); - } -} diff --git a/packages/camera/camera_linux/lib/src/linux_camera.dart b/packages/camera/camera_linux/lib/src/linux_camera.dart new file mode 100644 index 000000000000..80df997a1150 --- /dev/null +++ b/packages/camera/camera_linux/lib/src/linux_camera.dart @@ -0,0 +1,26 @@ +import 'package:camera_linux/src/messages.g.dart'; +import 'package:camera_platform_interface/camera_platform_interface.dart'; +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; + +class CameraLinux extends CameraPlatform { + final CameraApi _hostApi; + + CameraLinux({@visibleForTesting CameraApi? api}) : _hostApi = api ?? CameraApi(); + + /// Registers this class as the default instance of [CameraPlatform]. + static void registerWith() { + print("registerWith"); + CameraPlatform.instance = CameraLinux(); + } + + @override + Future> availableCameras() async { + try { + print("availableCameras"); + return []; //(await _hostApi.getAvailableCameras()).map(cameraDescriptionFromPlatform).toList(); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } +} diff --git a/packages/camera/camera_linux/lib/src/messages.g.dart b/packages/camera/camera_linux/lib/src/messages.g.dart new file mode 100644 index 000000000000..f9b7672e4c94 --- /dev/null +++ b/packages/camera/camera_linux/lib/src/messages.g.dart @@ -0,0 +1,1234 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// Autogenerated from Pigeon (v22.7.4), do not edit directly. +// See also: https://pub.dev/packages/pigeon +// ignore_for_file: public_member_api_docs, non_constant_identifier_names, avoid_as, unused_import, unnecessary_parenthesis, prefer_null_aware_operators, omit_local_variable_types, unused_shown_name, unnecessary_import, no_leading_underscores_for_local_identifiers + +import 'dart:async'; +import 'dart:typed_data' show Float64List, Int32List, Int64List, Uint8List; + +import 'package:flutter/foundation.dart' show ReadBuffer, WriteBuffer; +import 'package:flutter/services.dart'; + +PlatformException _createConnectionError(String channelName) { + return PlatformException( + code: 'channel-error', + message: 'Unable to establish connection on channel: "$channelName".', + ); +} + +List wrapResponse({Object? result, PlatformException? error, bool empty = false}) { + if (empty) { + return []; + } + if (error == null) { + return [result]; + } + return [error.code, error.message, error.details]; +} + +enum PlatformCameraLensDirection { + /// Front facing camera (a user looking at the screen is seen by the camera). + front, + /// Back facing camera (a user looking at the screen is not seen by the camera). + back, + /// External camera which may not be mounted to the device. + external, +} + +enum PlatformDeviceOrientation { + portraitUp, + landscapeLeft, + portraitDown, + landscapeRight, +} + +enum PlatformExposureMode { + auto, + locked, +} + +enum PlatformFlashMode { + off, + auto, + always, + torch, +} + +enum PlatformFocusMode { + auto, + locked, +} + +/// Pigeon version of ImageFileFormat. +enum PlatformImageFileFormat { + jpeg, + heif, +} + +enum PlatformImageFormatGroup { + bgra8888, + yuv420, +} + +enum PlatformResolutionPreset { + low, + medium, + high, + veryHigh, + ultraHigh, + max, +} + +class PlatformSize { + PlatformSize({ + required this.width, + required this.height, + }); + + double width; + + double height; + + Object encode() { + return [ + width, + height, + ]; + } + + static PlatformSize decode(Object result) { + result as List; + return PlatformSize( + width: result[0]! as double, + height: result[1]! as double, + ); + } +} + +class PlatformCameraDescription { + PlatformCameraDescription({ + required this.name, + required this.lensDirection, + }); + + /// The name of the camera device. + String name; + + /// The direction the camera is facing. + PlatformCameraLensDirection lensDirection; + + Object encode() { + return [ + name, + lensDirection, + ]; + } + + static PlatformCameraDescription decode(Object result) { + result as List; + return PlatformCameraDescription( + name: result[0]! as String, + lensDirection: result[1]! as PlatformCameraLensDirection, + ); + } +} + +class PlatformCameraState { + PlatformCameraState({ + required this.previewSize, + required this.exposureMode, + required this.focusMode, + required this.exposurePointSupported, + required this.focusPointSupported, + }); + + /// The size of the preview, in pixels. + PlatformSize previewSize; + + /// The default exposure mode + PlatformExposureMode exposureMode; + + /// The default focus mode + PlatformFocusMode focusMode; + + /// Whether setting exposure points is supported. + bool exposurePointSupported; + + /// Whether setting focus points is supported. + bool focusPointSupported; + + Object encode() { + return [ + previewSize, + exposureMode, + focusMode, + exposurePointSupported, + focusPointSupported, + ]; + } + + static PlatformCameraState decode(Object result) { + result as List; + return PlatformCameraState( + previewSize: result[0]! as PlatformSize, + exposureMode: result[1]! as PlatformExposureMode, + focusMode: result[2]! as PlatformFocusMode, + exposurePointSupported: result[3]! as bool, + focusPointSupported: result[4]! as bool, + ); + } +} + +class PlatformMediaSettings { + PlatformMediaSettings({ + required this.resolutionPreset, + this.framesPerSecond, + this.videoBitrate, + this.audioBitrate, + required this.enableAudio, + }); + + PlatformResolutionPreset resolutionPreset; + + int? framesPerSecond; + + int? videoBitrate; + + int? audioBitrate; + + bool enableAudio; + + Object encode() { + return [ + resolutionPreset, + framesPerSecond, + videoBitrate, + audioBitrate, + enableAudio, + ]; + } + + static PlatformMediaSettings decode(Object result) { + result as List; + return PlatformMediaSettings( + resolutionPreset: result[0]! as PlatformResolutionPreset, + framesPerSecond: result[1] as int?, + videoBitrate: result[2] as int?, + audioBitrate: result[3] as int?, + enableAudio: result[4]! as bool, + ); + } +} + +class PlatformPoint { + PlatformPoint({ + required this.x, + required this.y, + }); + + double x; + + double y; + + Object encode() { + return [ + x, + y, + ]; + } + + static PlatformPoint decode(Object result) { + result as List; + return PlatformPoint( + x: result[0]! as double, + y: result[1]! as double, + ); + } +} + + +class _PigeonCodec extends StandardMessageCodec { + const _PigeonCodec(); + @override + void writeValue(WriteBuffer buffer, Object? value) { + if (value is int) { + buffer.putUint8(4); + buffer.putInt64(value); + } else if (value is PlatformCameraLensDirection) { + buffer.putUint8(129); + writeValue(buffer, value.index); + } else if (value is PlatformDeviceOrientation) { + buffer.putUint8(130); + writeValue(buffer, value.index); + } else if (value is PlatformExposureMode) { + buffer.putUint8(131); + writeValue(buffer, value.index); + } else if (value is PlatformFlashMode) { + buffer.putUint8(132); + writeValue(buffer, value.index); + } else if (value is PlatformFocusMode) { + buffer.putUint8(133); + writeValue(buffer, value.index); + } else if (value is PlatformImageFileFormat) { + buffer.putUint8(134); + writeValue(buffer, value.index); + } else if (value is PlatformImageFormatGroup) { + buffer.putUint8(135); + writeValue(buffer, value.index); + } else if (value is PlatformResolutionPreset) { + buffer.putUint8(136); + writeValue(buffer, value.index); + } else if (value is PlatformSize) { + buffer.putUint8(137); + writeValue(buffer, value.encode()); + } else if (value is PlatformCameraDescription) { + buffer.putUint8(138); + writeValue(buffer, value.encode()); + } else if (value is PlatformCameraState) { + buffer.putUint8(139); + writeValue(buffer, value.encode()); + } else if (value is PlatformMediaSettings) { + buffer.putUint8(140); + writeValue(buffer, value.encode()); + } else if (value is PlatformPoint) { + buffer.putUint8(141); + writeValue(buffer, value.encode()); + } else { + super.writeValue(buffer, value); + } + } + + @override + Object? readValueOfType(int type, ReadBuffer buffer) { + switch (type) { + case 129: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformCameraLensDirection.values[value]; + case 130: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformDeviceOrientation.values[value]; + case 131: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformExposureMode.values[value]; + case 132: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformFlashMode.values[value]; + case 133: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformFocusMode.values[value]; + case 134: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformImageFileFormat.values[value]; + case 135: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformImageFormatGroup.values[value]; + case 136: + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformResolutionPreset.values[value]; + case 137: + return PlatformSize.decode(readValue(buffer)!); + case 138: + return PlatformCameraDescription.decode(readValue(buffer)!); + case 139: + return PlatformCameraState.decode(readValue(buffer)!); + case 140: + return PlatformMediaSettings.decode(readValue(buffer)!); + case 141: + return PlatformPoint.decode(readValue(buffer)!); + default: + return super.readValueOfType(type, buffer); + } + } +} + +class CameraApi { + /// Constructor for [CameraApi]. The [binaryMessenger] named argument is + /// available for dependency injection. If it is left null, the default + /// BinaryMessenger will be used which routes to the host platform. + CameraApi({BinaryMessenger? binaryMessenger, String messageChannelSuffix = ''}) + : pigeonVar_binaryMessenger = binaryMessenger, + pigeonVar_messageChannelSuffix = messageChannelSuffix.isNotEmpty ? '.$messageChannelSuffix' : ''; + final BinaryMessenger? pigeonVar_binaryMessenger; + + static const MessageCodec pigeonChannelCodec = _PigeonCodec(); + + final String pigeonVar_messageChannelSuffix; + + /// Returns the list of available cameras. + Future> getAvailableCameras() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCameras$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as List?)!.cast(); + } + } + + /// Create a new camera with the given settings, and returns its ID. + Future create(String cameraName, PlatformMediaSettings settings) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.create$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraName, settings]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as int?)!; + } + } + + /// Initializes the camera with the given ID. + Future initialize(int cameraId, PlatformImageFormatGroup imageFormat) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.initialize$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId, imageFormat]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Begins streaming frames from the camera. + Future startImageStream() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.startImageStream$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Stops streaming frames from the camera. + Future stopImageStream() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.stopImageStream$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Called by the Dart side of the plugin when it has received the last image + /// frame sent. + /// + /// This is used to throttle sending frames across the channel. + Future receivedImageStreamData() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.receivedImageStreamData$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Indicates that the given camera is no longer being used on the Dart side, + /// and any associated resources can be cleaned up. + Future dispose(int cameraId) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.dispose$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Locks the camera capture to the current device orientation. + Future lockCaptureOrientation(PlatformDeviceOrientation orientation) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.lockCaptureOrientation$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([orientation]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Unlocks camera capture orientation, allowing it to automatically adapt to + /// device orientation. + Future unlockCaptureOrientation() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.unlockCaptureOrientation$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Takes a picture with the current settings, and returns the path to the + /// resulting file. + Future takePicture() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.takePicture$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as String?)!; + } + } + + /// Does any preprocessing necessary before beginning to record video. + Future prepareForVideoRecording() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.prepareForVideoRecording$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Begins recording video, optionally enabling streaming to Dart at the same + /// time. + Future startVideoRecording(bool enableStream) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.startVideoRecording$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([enableStream]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Stops recording video, and results the path to the resulting file. + Future stopVideoRecording() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.stopVideoRecording$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as String?)!; + } + } + + /// Pauses video recording. + Future pauseVideoRecording() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.pauseVideoRecording$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Resumes a previously paused video recording. + Future resumeVideoRecording() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.resumeVideoRecording$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Switches the camera to the given flash mode. + Future setFlashMode(PlatformFlashMode mode) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setFlashMode$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([mode]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Switches the camera to the given exposure mode. + Future setExposureMode(PlatformExposureMode mode) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setExposureMode$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([mode]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Anchors auto-exposure to the given point in (0,1) coordinate space. + /// + /// A null value resets to the default exposure point. + Future setExposurePoint(PlatformPoint? point) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setExposurePoint$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([point]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + Future setLensPosition(double position) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setLensPosition$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([position]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Returns the minimum exposure offset supported by the camera. + Future getMinExposureOffset() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getMinExposureOffset$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as double?)!; + } + } + + /// Returns the maximum exposure offset supported by the camera. + Future getMaxExposureOffset() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getMaxExposureOffset$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as double?)!; + } + } + + /// Sets the exposure offset manually to the given value. + Future setExposureOffset(double offset) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setExposureOffset$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([offset]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Switches the camera to the given focus mode. + Future setFocusMode(PlatformFocusMode mode) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setFocusMode$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([mode]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Anchors auto-focus to the given point in (0,1) coordinate space. + /// + /// A null value resets to the default focus point. + Future setFocusPoint(PlatformPoint? point) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setFocusPoint$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([point]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Returns the minimum zoom level supported by the camera. + Future getMinZoomLevel() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getMinZoomLevel$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as double?)!; + } + } + + /// Returns the maximum zoom level supported by the camera. + Future getMaxZoomLevel() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getMaxZoomLevel$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as double?)!; + } + } + + /// Sets the zoom factor. + Future setZoomLevel(double zoom) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setZoomLevel$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([zoom]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Pauses streaming of preview frames. + Future pausePreview() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.pausePreview$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Resumes a previously paused preview stream. + Future resumePreview() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.resumePreview$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send(null) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Changes the camera used while recording video. + /// + /// This should only be called while video recording is active. + Future updateDescriptionWhileRecording(String cameraName) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.updateDescriptionWhileRecording$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraName]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } + + /// Sets the file format used for taking pictures. + Future setImageFileFormat(PlatformImageFileFormat format) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setImageFileFormat$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([format]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } +} + +/// Handler for native callbacks that are not tied to a specific camera ID. +abstract class CameraGlobalEventApi { + static const MessageCodec pigeonChannelCodec = _PigeonCodec(); + + /// Called when the device's physical orientation changes. + void deviceOrientationChanged(PlatformDeviceOrientation orientation); + + static void setUp(CameraGlobalEventApi? api, {BinaryMessenger? binaryMessenger, String messageChannelSuffix = '',}) { + messageChannelSuffix = messageChannelSuffix.isNotEmpty ? '.$messageChannelSuffix' : ''; + { + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + 'dev.flutter.pigeon.camera_linux.CameraGlobalEventApi.deviceOrientationChanged$messageChannelSuffix', pigeonChannelCodec, + binaryMessenger: binaryMessenger); + if (api == null) { + pigeonVar_channel.setMessageHandler(null); + } else { + pigeonVar_channel.setMessageHandler((Object? message) async { + assert(message != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraGlobalEventApi.deviceOrientationChanged was null.'); + final List args = (message as List?)!; + final PlatformDeviceOrientation? arg_orientation = (args[0] as PlatformDeviceOrientation?); + assert(arg_orientation != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraGlobalEventApi.deviceOrientationChanged was null, expected non-null PlatformDeviceOrientation.'); + try { + api.deviceOrientationChanged(arg_orientation!); + return wrapResponse(empty: true); + } on PlatformException catch (e) { + return wrapResponse(error: e); + } catch (e) { + return wrapResponse(error: PlatformException(code: 'error', message: e.toString())); + } + }); + } + } + } +} + +/// Handler for native callbacks that are tied to a specific camera ID. +/// +/// This is intended to be initialized with the camera ID as a suffix. +abstract class CameraEventApi { + static const MessageCodec pigeonChannelCodec = _PigeonCodec(); + + /// Called when the camera is inialitized for use. + void initialized(PlatformCameraState initialState); + + /// Called when an error occurs in the camera. + /// + /// This should be used for errors that occur outside of the context of + /// handling a specific HostApi call, such as during streaming. + void error(String message); + + static void setUp(CameraEventApi? api, {BinaryMessenger? binaryMessenger, String messageChannelSuffix = '',}) { + messageChannelSuffix = messageChannelSuffix.isNotEmpty ? '.$messageChannelSuffix' : ''; + { + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + 'dev.flutter.pigeon.camera_linux.CameraEventApi.initialized$messageChannelSuffix', pigeonChannelCodec, + binaryMessenger: binaryMessenger); + if (api == null) { + pigeonVar_channel.setMessageHandler(null); + } else { + pigeonVar_channel.setMessageHandler((Object? message) async { + assert(message != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.initialized was null.'); + final List args = (message as List?)!; + final PlatformCameraState? arg_initialState = (args[0] as PlatformCameraState?); + assert(arg_initialState != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.initialized was null, expected non-null PlatformCameraState.'); + try { + api.initialized(arg_initialState!); + return wrapResponse(empty: true); + } on PlatformException catch (e) { + return wrapResponse(error: e); + } catch (e) { + return wrapResponse(error: PlatformException(code: 'error', message: e.toString())); + } + }); + } + } + { + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + 'dev.flutter.pigeon.camera_linux.CameraEventApi.error$messageChannelSuffix', pigeonChannelCodec, + binaryMessenger: binaryMessenger); + if (api == null) { + pigeonVar_channel.setMessageHandler(null); + } else { + pigeonVar_channel.setMessageHandler((Object? message) async { + assert(message != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.error was null.'); + final List args = (message as List?)!; + final String? arg_message = (args[0] as String?); + assert(arg_message != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.error was null, expected non-null String.'); + try { + api.error(arg_message!); + return wrapResponse(empty: true); + } on PlatformException catch (e) { + return wrapResponse(error: e); + } catch (e) { + return wrapResponse(error: PlatformException(code: 'error', message: e.toString())); + } + }); + } + } + } +} diff --git a/packages/camera/camera_linux/linux/CMakeLists.txt b/packages/camera/camera_linux/linux/CMakeLists.txt index 80689ad7a9de..30858ec47992 100644 --- a/packages/camera/camera_linux/linux/CMakeLists.txt +++ b/packages/camera/camera_linux/linux/CMakeLists.txt @@ -13,7 +13,8 @@ set(PLUGIN_NAME "camera_linux_plugin") # Any new source files that you add to the plugin should be added here. list(APPEND PLUGIN_SOURCES - "camera_linux_plugin.cc" + "camera_plugin.cpp" + "messages.g.cc" ) # Define the plugin library target. Its name must not be changed (see comment @@ -37,7 +38,9 @@ target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) # Source include directories and library dependencies. Add any plugin-specific # dependencies here. target_include_directories(${PLUGIN_NAME} INTERFACE - "${CMAKE_CURRENT_SOURCE_DIR}/include") + "${CMAKE_CURRENT_SOURCE_DIR}/include" + "${CMAKE_CURRENT_SOURCE_DIR}" +) target_link_libraries(${PLUGIN_NAME} PRIVATE flutter) target_link_libraries(${PLUGIN_NAME} PRIVATE PkgConfig::GTK) @@ -48,47 +51,3 @@ set(camera_linux_bundled_libraries "" PARENT_SCOPE ) - -# === Tests === -# These unit tests can be run from a terminal after building the example. - -# Only enable test builds when building the example (which sets this variable) -# so that plugin clients aren't building the tests. -if (${include_${PROJECT_NAME}_tests}) -if(${CMAKE_VERSION} VERSION_LESS "3.11.0") -message("Unit tests require CMake 3.11.0 or later") -else() -set(TEST_RUNNER "${PROJECT_NAME}_test") -enable_testing() - -# Add the Google Test dependency. -include(FetchContent) -FetchContent_Declare( - googletest - URL https://github.com/google/googletest/archive/release-1.11.0.zip -) -# Prevent overriding the parent project's compiler/linker settings -set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) -# Disable install commands for gtest so it doesn't end up in the bundle. -set(INSTALL_GTEST OFF CACHE BOOL "Disable installation of googletest" FORCE) - -FetchContent_MakeAvailable(googletest) - -# The plugin's exported API is not very useful for unit testing, so build the -# sources directly into the test binary rather than using the shared library. -add_executable(${TEST_RUNNER} - test/camera_linux_plugin_test.cc - ${PLUGIN_SOURCES} -) -apply_standard_settings(${TEST_RUNNER}) -target_include_directories(${TEST_RUNNER} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}") -target_link_libraries(${TEST_RUNNER} PRIVATE flutter) -target_link_libraries(${TEST_RUNNER} PRIVATE PkgConfig::GTK) -target_link_libraries(${TEST_RUNNER} PRIVATE gtest_main gmock) - -# Enable automatic test discovery. -include(GoogleTest) -gtest_discover_tests(${TEST_RUNNER}) - -endif() # CMake version check -endif() # include_${PROJECT_NAME}_tests \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/camera_linux_plugin.cc b/packages/camera/camera_linux/linux/camera_linux_plugin.cc deleted file mode 100644 index d3599d644965..000000000000 --- a/packages/camera/camera_linux/linux/camera_linux_plugin.cc +++ /dev/null @@ -1,76 +0,0 @@ -#include "include/camera_linux/camera_linux_plugin.h" - -#include -#include -#include - -#include - -#include "camera_linux_plugin_private.h" - -#define CAMERA_LINUX_PLUGIN(obj) \ - (G_TYPE_CHECK_INSTANCE_CAST((obj), camera_linux_plugin_get_type(), \ - CameraLinuxPlugin)) - -struct _CameraLinuxPlugin { - GObject parent_instance; -}; - -G_DEFINE_TYPE(CameraLinuxPlugin, camera_linux_plugin, g_object_get_type()) - -// Called when a method call is received from Flutter. -static void camera_linux_plugin_handle_method_call( - CameraLinuxPlugin* self, - FlMethodCall* method_call) { - g_autoptr(FlMethodResponse) response = nullptr; - - const gchar* method = fl_method_call_get_name(method_call); - - if (strcmp(method, "getPlatformVersion") == 0) { - response = get_platform_version(); - } else { - response = FL_METHOD_RESPONSE(fl_method_not_implemented_response_new()); - } - - fl_method_call_respond(method_call, response, nullptr); -} - -FlMethodResponse* get_platform_version() { - struct utsname uname_data = {}; - uname(&uname_data); - g_autofree gchar *version = g_strdup_printf("Linux %s", uname_data.version); - g_autoptr(FlValue) result = fl_value_new_string(version); - return FL_METHOD_RESPONSE(fl_method_success_response_new(result)); -} - -static void camera_linux_plugin_dispose(GObject* object) { - G_OBJECT_CLASS(camera_linux_plugin_parent_class)->dispose(object); -} - -static void camera_linux_plugin_class_init(CameraLinuxPluginClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_plugin_dispose; -} - -static void camera_linux_plugin_init(CameraLinuxPlugin* self) {} - -static void method_call_cb(FlMethodChannel* channel, FlMethodCall* method_call, - gpointer user_data) { - CameraLinuxPlugin* plugin = CAMERA_LINUX_PLUGIN(user_data); - camera_linux_plugin_handle_method_call(plugin, method_call); -} - -void camera_linux_plugin_register_with_registrar(FlPluginRegistrar* registrar) { - CameraLinuxPlugin* plugin = CAMERA_LINUX_PLUGIN( - g_object_new(camera_linux_plugin_get_type(), nullptr)); - - g_autoptr(FlStandardMethodCodec) codec = fl_standard_method_codec_new(); - g_autoptr(FlMethodChannel) channel = - fl_method_channel_new(fl_plugin_registrar_get_messenger(registrar), - "camera_linux", - FL_METHOD_CODEC(codec)); - fl_method_channel_set_method_call_handler(channel, method_call_cb, - g_object_ref(plugin), - g_object_unref); - - g_object_unref(plugin); -} diff --git a/packages/camera/camera_linux/linux/camera_linux_plugin_private.h b/packages/camera/camera_linux/linux/camera_linux_plugin_private.h deleted file mode 100644 index 1e590da5dbd4..000000000000 --- a/packages/camera/camera_linux/linux/camera_linux_plugin_private.h +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#include "include/camera_linux/camera_linux_plugin.h" - -// This file exposes some plugin internals for unit testing. See -// https://github.com/flutter/flutter/issues/88724 for current limitations -// in the unit-testable API. - -// Handles the getPlatformVersion method call. -FlMethodResponse *get_platform_version(); diff --git a/packages/camera/camera_linux/linux/camera_plugin.cpp b/packages/camera/camera_linux/linux/camera_plugin.cpp new file mode 100644 index 000000000000..3973c0d044cd --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_plugin.cpp @@ -0,0 +1,3 @@ +#include "include/camera_linux/camera_plugin.h" + +void camera_plugin_register_with_registrar(FlPluginRegistrar* registrar) {} diff --git a/packages/camera/camera_linux/linux/include/camera_linux/camera_linux_plugin.h b/packages/camera/camera_linux/linux/include/camera_linux/camera_plugin.h similarity index 59% rename from packages/camera/camera_linux/linux/include/camera_linux/camera_linux_plugin.h rename to packages/camera/camera_linux/linux/include/camera_linux/camera_plugin.h index 26c0ea39d8e0..8352ed3f3fa1 100644 --- a/packages/camera/camera_linux/linux/include/camera_linux/camera_linux_plugin.h +++ b/packages/camera/camera_linux/linux/include/camera_linux/camera_plugin.h @@ -11,14 +11,7 @@ G_BEGIN_DECLS #define FLUTTER_PLUGIN_EXPORT #endif -typedef struct _CameraLinuxPlugin CameraLinuxPlugin; -typedef struct { - GObjectClass parent_class; -} CameraLinuxPluginClass; - -FLUTTER_PLUGIN_EXPORT GType camera_linux_plugin_get_type(); - -FLUTTER_PLUGIN_EXPORT void camera_linux_plugin_register_with_registrar( +FLUTTER_PLUGIN_EXPORT void camera_plugin_register_with_registrar( FlPluginRegistrar* registrar); G_END_DECLS diff --git a/packages/camera/camera_linux/linux/messages.g.cc b/packages/camera/camera_linux/linux/messages.g.cc new file mode 100644 index 000000000000..ae838c8e117f --- /dev/null +++ b/packages/camera/camera_linux/linux/messages.g.cc @@ -0,0 +1,3286 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// Autogenerated from Pigeon (v22.7.4), do not edit directly. +// See also: https://pub.dev/packages/pigeon + +#include "messages.g.h" + +struct _CameraLinuxPlatformSize { + GObject parent_instance; + + double width; + double height; +}; + +G_DEFINE_TYPE(CameraLinuxPlatformSize, camera_linux_platform_size, G_TYPE_OBJECT) + +static void camera_linux_platform_size_dispose(GObject* object) { + G_OBJECT_CLASS(camera_linux_platform_size_parent_class)->dispose(object); +} + +static void camera_linux_platform_size_init(CameraLinuxPlatformSize* self) { +} + +static void camera_linux_platform_size_class_init(CameraLinuxPlatformSizeClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_platform_size_dispose; +} + +CameraLinuxPlatformSize* camera_linux_platform_size_new(double width, double height) { + CameraLinuxPlatformSize* self = CAMERA_LINUX_PLATFORM_SIZE(g_object_new(camera_linux_platform_size_get_type(), nullptr)); + self->width = width; + self->height = height; + return self; +} + +double camera_linux_platform_size_get_width(CameraLinuxPlatformSize* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_SIZE(self), 0.0); + return self->width; +} + +double camera_linux_platform_size_get_height(CameraLinuxPlatformSize* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_SIZE(self), 0.0); + return self->height; +} + +static FlValue* camera_linux_platform_size_to_list(CameraLinuxPlatformSize* self) { + FlValue* values = fl_value_new_list(); + fl_value_append_take(values, fl_value_new_float(self->width)); + fl_value_append_take(values, fl_value_new_float(self->height)); + return values; +} + +static CameraLinuxPlatformSize* camera_linux_platform_size_new_from_list(FlValue* values) { + FlValue* value0 = fl_value_get_list_value(values, 0); + double width = fl_value_get_float(value0); + FlValue* value1 = fl_value_get_list_value(values, 1); + double height = fl_value_get_float(value1); + return camera_linux_platform_size_new(width, height); +} + +struct _CameraLinuxPlatformCameraDescription { + GObject parent_instance; + + gchar* name; + CameraLinuxPlatformCameraLensDirection lens_direction; +}; + +G_DEFINE_TYPE(CameraLinuxPlatformCameraDescription, camera_linux_platform_camera_description, G_TYPE_OBJECT) + +static void camera_linux_platform_camera_description_dispose(GObject* object) { + CameraLinuxPlatformCameraDescription* self = CAMERA_LINUX_PLATFORM_CAMERA_DESCRIPTION(object); + g_clear_pointer(&self->name, g_free); + G_OBJECT_CLASS(camera_linux_platform_camera_description_parent_class)->dispose(object); +} + +static void camera_linux_platform_camera_description_init(CameraLinuxPlatformCameraDescription* self) { +} + +static void camera_linux_platform_camera_description_class_init(CameraLinuxPlatformCameraDescriptionClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_platform_camera_description_dispose; +} + +CameraLinuxPlatformCameraDescription* camera_linux_platform_camera_description_new(const gchar* name, CameraLinuxPlatformCameraLensDirection lens_direction) { + CameraLinuxPlatformCameraDescription* self = CAMERA_LINUX_PLATFORM_CAMERA_DESCRIPTION(g_object_new(camera_linux_platform_camera_description_get_type(), nullptr)); + self->name = g_strdup(name); + self->lens_direction = lens_direction; + return self; +} + +const gchar* camera_linux_platform_camera_description_get_name(CameraLinuxPlatformCameraDescription* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_DESCRIPTION(self), nullptr); + return self->name; +} + +CameraLinuxPlatformCameraLensDirection camera_linux_platform_camera_description_get_lens_direction(CameraLinuxPlatformCameraDescription* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_DESCRIPTION(self), static_cast(0)); + return self->lens_direction; +} + +static FlValue* camera_linux_platform_camera_description_to_list(CameraLinuxPlatformCameraDescription* self) { + FlValue* values = fl_value_new_list(); + fl_value_append_take(values, fl_value_new_string(self->name)); + fl_value_append_take(values, fl_value_new_custom(129, fl_value_new_int(self->lens_direction), (GDestroyNotify)fl_value_unref)); + return values; +} + +static CameraLinuxPlatformCameraDescription* camera_linux_platform_camera_description_new_from_list(FlValue* values) { + FlValue* value0 = fl_value_get_list_value(values, 0); + const gchar* name = fl_value_get_string(value0); + FlValue* value1 = fl_value_get_list_value(values, 1); + CameraLinuxPlatformCameraLensDirection lens_direction = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); + return camera_linux_platform_camera_description_new(name, lens_direction); +} + +struct _CameraLinuxPlatformCameraState { + GObject parent_instance; + + CameraLinuxPlatformSize* preview_size; + CameraLinuxPlatformExposureMode exposure_mode; + CameraLinuxPlatformFocusMode focus_mode; + gboolean exposure_point_supported; + gboolean focus_point_supported; +}; + +G_DEFINE_TYPE(CameraLinuxPlatformCameraState, camera_linux_platform_camera_state, G_TYPE_OBJECT) + +static void camera_linux_platform_camera_state_dispose(GObject* object) { + CameraLinuxPlatformCameraState* self = CAMERA_LINUX_PLATFORM_CAMERA_STATE(object); + g_clear_object(&self->preview_size); + G_OBJECT_CLASS(camera_linux_platform_camera_state_parent_class)->dispose(object); +} + +static void camera_linux_platform_camera_state_init(CameraLinuxPlatformCameraState* self) { +} + +static void camera_linux_platform_camera_state_class_init(CameraLinuxPlatformCameraStateClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_platform_camera_state_dispose; +} + +CameraLinuxPlatformCameraState* camera_linux_platform_camera_state_new(CameraLinuxPlatformSize* preview_size, CameraLinuxPlatformExposureMode exposure_mode, CameraLinuxPlatformFocusMode focus_mode, gboolean exposure_point_supported, gboolean focus_point_supported) { + CameraLinuxPlatformCameraState* self = CAMERA_LINUX_PLATFORM_CAMERA_STATE(g_object_new(camera_linux_platform_camera_state_get_type(), nullptr)); + self->preview_size = CAMERA_LINUX_PLATFORM_SIZE(g_object_ref(preview_size)); + self->exposure_mode = exposure_mode; + self->focus_mode = focus_mode; + self->exposure_point_supported = exposure_point_supported; + self->focus_point_supported = focus_point_supported; + return self; +} + +CameraLinuxPlatformSize* camera_linux_platform_camera_state_get_preview_size(CameraLinuxPlatformCameraState* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_STATE(self), nullptr); + return self->preview_size; +} + +CameraLinuxPlatformExposureMode camera_linux_platform_camera_state_get_exposure_mode(CameraLinuxPlatformCameraState* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_STATE(self), static_cast(0)); + return self->exposure_mode; +} + +CameraLinuxPlatformFocusMode camera_linux_platform_camera_state_get_focus_mode(CameraLinuxPlatformCameraState* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_STATE(self), static_cast(0)); + return self->focus_mode; +} + +gboolean camera_linux_platform_camera_state_get_exposure_point_supported(CameraLinuxPlatformCameraState* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_STATE(self), FALSE); + return self->exposure_point_supported; +} + +gboolean camera_linux_platform_camera_state_get_focus_point_supported(CameraLinuxPlatformCameraState* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_STATE(self), FALSE); + return self->focus_point_supported; +} + +static FlValue* camera_linux_platform_camera_state_to_list(CameraLinuxPlatformCameraState* self) { + FlValue* values = fl_value_new_list(); + fl_value_append_take(values, fl_value_new_custom_object(137, G_OBJECT(self->preview_size))); + fl_value_append_take(values, fl_value_new_custom(131, fl_value_new_int(self->exposure_mode), (GDestroyNotify)fl_value_unref)); + fl_value_append_take(values, fl_value_new_custom(133, fl_value_new_int(self->focus_mode), (GDestroyNotify)fl_value_unref)); + fl_value_append_take(values, fl_value_new_bool(self->exposure_point_supported)); + fl_value_append_take(values, fl_value_new_bool(self->focus_point_supported)); + return values; +} + +static CameraLinuxPlatformCameraState* camera_linux_platform_camera_state_new_from_list(FlValue* values) { + FlValue* value0 = fl_value_get_list_value(values, 0); + CameraLinuxPlatformSize* preview_size = CAMERA_LINUX_PLATFORM_SIZE(fl_value_get_custom_value_object(value0)); + FlValue* value1 = fl_value_get_list_value(values, 1); + CameraLinuxPlatformExposureMode exposure_mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); + FlValue* value2 = fl_value_get_list_value(values, 2); + CameraLinuxPlatformFocusMode focus_mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value2))))); + FlValue* value3 = fl_value_get_list_value(values, 3); + gboolean exposure_point_supported = fl_value_get_bool(value3); + FlValue* value4 = fl_value_get_list_value(values, 4); + gboolean focus_point_supported = fl_value_get_bool(value4); + return camera_linux_platform_camera_state_new(preview_size, exposure_mode, focus_mode, exposure_point_supported, focus_point_supported); +} + +struct _CameraLinuxPlatformMediaSettings { + GObject parent_instance; + + CameraLinuxPlatformResolutionPreset resolution_preset; + int64_t* frames_per_second; + int64_t* video_bitrate; + int64_t* audio_bitrate; + gboolean enable_audio; +}; + +G_DEFINE_TYPE(CameraLinuxPlatformMediaSettings, camera_linux_platform_media_settings, G_TYPE_OBJECT) + +static void camera_linux_platform_media_settings_dispose(GObject* object) { + CameraLinuxPlatformMediaSettings* self = CAMERA_LINUX_PLATFORM_MEDIA_SETTINGS(object); + g_clear_pointer(&self->frames_per_second, g_free); + g_clear_pointer(&self->video_bitrate, g_free); + g_clear_pointer(&self->audio_bitrate, g_free); + G_OBJECT_CLASS(camera_linux_platform_media_settings_parent_class)->dispose(object); +} + +static void camera_linux_platform_media_settings_init(CameraLinuxPlatformMediaSettings* self) { +} + +static void camera_linux_platform_media_settings_class_init(CameraLinuxPlatformMediaSettingsClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_platform_media_settings_dispose; +} + +CameraLinuxPlatformMediaSettings* camera_linux_platform_media_settings_new(CameraLinuxPlatformResolutionPreset resolution_preset, int64_t* frames_per_second, int64_t* video_bitrate, int64_t* audio_bitrate, gboolean enable_audio) { + CameraLinuxPlatformMediaSettings* self = CAMERA_LINUX_PLATFORM_MEDIA_SETTINGS(g_object_new(camera_linux_platform_media_settings_get_type(), nullptr)); + self->resolution_preset = resolution_preset; + if (frames_per_second != nullptr) { + self->frames_per_second = static_cast(malloc(sizeof(int64_t))); + *self->frames_per_second = *frames_per_second; + } + else { + self->frames_per_second = nullptr; + } + if (video_bitrate != nullptr) { + self->video_bitrate = static_cast(malloc(sizeof(int64_t))); + *self->video_bitrate = *video_bitrate; + } + else { + self->video_bitrate = nullptr; + } + if (audio_bitrate != nullptr) { + self->audio_bitrate = static_cast(malloc(sizeof(int64_t))); + *self->audio_bitrate = *audio_bitrate; + } + else { + self->audio_bitrate = nullptr; + } + self->enable_audio = enable_audio; + return self; +} + +CameraLinuxPlatformResolutionPreset camera_linux_platform_media_settings_get_resolution_preset(CameraLinuxPlatformMediaSettings* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_MEDIA_SETTINGS(self), static_cast(0)); + return self->resolution_preset; +} + +int64_t* camera_linux_platform_media_settings_get_frames_per_second(CameraLinuxPlatformMediaSettings* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_MEDIA_SETTINGS(self), nullptr); + return self->frames_per_second; +} + +int64_t* camera_linux_platform_media_settings_get_video_bitrate(CameraLinuxPlatformMediaSettings* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_MEDIA_SETTINGS(self), nullptr); + return self->video_bitrate; +} + +int64_t* camera_linux_platform_media_settings_get_audio_bitrate(CameraLinuxPlatformMediaSettings* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_MEDIA_SETTINGS(self), nullptr); + return self->audio_bitrate; +} + +gboolean camera_linux_platform_media_settings_get_enable_audio(CameraLinuxPlatformMediaSettings* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_MEDIA_SETTINGS(self), FALSE); + return self->enable_audio; +} + +static FlValue* camera_linux_platform_media_settings_to_list(CameraLinuxPlatformMediaSettings* self) { + FlValue* values = fl_value_new_list(); + fl_value_append_take(values, fl_value_new_custom(136, fl_value_new_int(self->resolution_preset), (GDestroyNotify)fl_value_unref)); + fl_value_append_take(values, self->frames_per_second != nullptr ? fl_value_new_int(*self->frames_per_second) : fl_value_new_null()); + fl_value_append_take(values, self->video_bitrate != nullptr ? fl_value_new_int(*self->video_bitrate) : fl_value_new_null()); + fl_value_append_take(values, self->audio_bitrate != nullptr ? fl_value_new_int(*self->audio_bitrate) : fl_value_new_null()); + fl_value_append_take(values, fl_value_new_bool(self->enable_audio)); + return values; +} + +static CameraLinuxPlatformMediaSettings* camera_linux_platform_media_settings_new_from_list(FlValue* values) { + FlValue* value0 = fl_value_get_list_value(values, 0); + CameraLinuxPlatformResolutionPreset resolution_preset = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); + FlValue* value1 = fl_value_get_list_value(values, 1); + int64_t* frames_per_second = nullptr; + int64_t frames_per_second_value; + if (fl_value_get_type(value1) != FL_VALUE_TYPE_NULL) { + frames_per_second_value = fl_value_get_int(value1); + frames_per_second = &frames_per_second_value; + } + FlValue* value2 = fl_value_get_list_value(values, 2); + int64_t* video_bitrate = nullptr; + int64_t video_bitrate_value; + if (fl_value_get_type(value2) != FL_VALUE_TYPE_NULL) { + video_bitrate_value = fl_value_get_int(value2); + video_bitrate = &video_bitrate_value; + } + FlValue* value3 = fl_value_get_list_value(values, 3); + int64_t* audio_bitrate = nullptr; + int64_t audio_bitrate_value; + if (fl_value_get_type(value3) != FL_VALUE_TYPE_NULL) { + audio_bitrate_value = fl_value_get_int(value3); + audio_bitrate = &audio_bitrate_value; + } + FlValue* value4 = fl_value_get_list_value(values, 4); + gboolean enable_audio = fl_value_get_bool(value4); + return camera_linux_platform_media_settings_new(resolution_preset, frames_per_second, video_bitrate, audio_bitrate, enable_audio); +} + +struct _CameraLinuxPlatformPoint { + GObject parent_instance; + + double x; + double y; +}; + +G_DEFINE_TYPE(CameraLinuxPlatformPoint, camera_linux_platform_point, G_TYPE_OBJECT) + +static void camera_linux_platform_point_dispose(GObject* object) { + G_OBJECT_CLASS(camera_linux_platform_point_parent_class)->dispose(object); +} + +static void camera_linux_platform_point_init(CameraLinuxPlatformPoint* self) { +} + +static void camera_linux_platform_point_class_init(CameraLinuxPlatformPointClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_platform_point_dispose; +} + +CameraLinuxPlatformPoint* camera_linux_platform_point_new(double x, double y) { + CameraLinuxPlatformPoint* self = CAMERA_LINUX_PLATFORM_POINT(g_object_new(camera_linux_platform_point_get_type(), nullptr)); + self->x = x; + self->y = y; + return self; +} + +double camera_linux_platform_point_get_x(CameraLinuxPlatformPoint* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_POINT(self), 0.0); + return self->x; +} + +double camera_linux_platform_point_get_y(CameraLinuxPlatformPoint* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_POINT(self), 0.0); + return self->y; +} + +static FlValue* camera_linux_platform_point_to_list(CameraLinuxPlatformPoint* self) { + FlValue* values = fl_value_new_list(); + fl_value_append_take(values, fl_value_new_float(self->x)); + fl_value_append_take(values, fl_value_new_float(self->y)); + return values; +} + +static CameraLinuxPlatformPoint* camera_linux_platform_point_new_from_list(FlValue* values) { + FlValue* value0 = fl_value_get_list_value(values, 0); + double x = fl_value_get_float(value0); + FlValue* value1 = fl_value_get_list_value(values, 1); + double y = fl_value_get_float(value1); + return camera_linux_platform_point_new(x, y); +} + +struct _CameraLinuxMessageCodec { + FlStandardMessageCodec parent_instance; + +}; + +G_DEFINE_TYPE(CameraLinuxMessageCodec, camera_linux_message_codec, fl_standard_message_codec_get_type()) + +static gboolean camera_linux_message_codec_write_camera_linux_platform_camera_lens_direction(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 129; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_device_orientation(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 130; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_exposure_mode(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 131; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_flash_mode(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 132; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_focus_mode(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 133; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_image_file_format(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 134; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_image_format_group(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 135; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_resolution_preset(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + uint8_t type = 136; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_size(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformSize* value, GError** error) { + uint8_t type = 137; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + g_autoptr(FlValue) values = camera_linux_platform_size_to_list(value); + return fl_standard_message_codec_write_value(codec, buffer, values, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_camera_description(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformCameraDescription* value, GError** error) { + uint8_t type = 138; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + g_autoptr(FlValue) values = camera_linux_platform_camera_description_to_list(value); + return fl_standard_message_codec_write_value(codec, buffer, values, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_camera_state(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformCameraState* value, GError** error) { + uint8_t type = 139; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + g_autoptr(FlValue) values = camera_linux_platform_camera_state_to_list(value); + return fl_standard_message_codec_write_value(codec, buffer, values, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_media_settings(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformMediaSettings* value, GError** error) { + uint8_t type = 140; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + g_autoptr(FlValue) values = camera_linux_platform_media_settings_to_list(value); + return fl_standard_message_codec_write_value(codec, buffer, values, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_point(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformPoint* value, GError** error) { + uint8_t type = 141; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); + g_autoptr(FlValue) values = camera_linux_platform_point_to_list(value); + return fl_standard_message_codec_write_value(codec, buffer, values, error); +} + +static gboolean camera_linux_message_codec_write_value(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { + if (fl_value_get_type(value) == FL_VALUE_TYPE_CUSTOM) { + switch (fl_value_get_custom_type(value)) { + case 129: + return camera_linux_message_codec_write_camera_linux_platform_camera_lens_direction(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 130: + return camera_linux_message_codec_write_camera_linux_platform_device_orientation(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 131: + return camera_linux_message_codec_write_camera_linux_platform_exposure_mode(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 132: + return camera_linux_message_codec_write_camera_linux_platform_flash_mode(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 133: + return camera_linux_message_codec_write_camera_linux_platform_focus_mode(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 134: + return camera_linux_message_codec_write_camera_linux_platform_image_file_format(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 135: + return camera_linux_message_codec_write_camera_linux_platform_image_format_group(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 136: + return camera_linux_message_codec_write_camera_linux_platform_resolution_preset(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); + case 137: + return camera_linux_message_codec_write_camera_linux_platform_size(codec, buffer, CAMERA_LINUX_PLATFORM_SIZE(fl_value_get_custom_value_object(value)), error); + case 138: + return camera_linux_message_codec_write_camera_linux_platform_camera_description(codec, buffer, CAMERA_LINUX_PLATFORM_CAMERA_DESCRIPTION(fl_value_get_custom_value_object(value)), error); + case 139: + return camera_linux_message_codec_write_camera_linux_platform_camera_state(codec, buffer, CAMERA_LINUX_PLATFORM_CAMERA_STATE(fl_value_get_custom_value_object(value)), error); + case 140: + return camera_linux_message_codec_write_camera_linux_platform_media_settings(codec, buffer, CAMERA_LINUX_PLATFORM_MEDIA_SETTINGS(fl_value_get_custom_value_object(value)), error); + case 141: + return camera_linux_message_codec_write_camera_linux_platform_point(codec, buffer, CAMERA_LINUX_PLATFORM_POINT(fl_value_get_custom_value_object(value)), error); + } + } + + return FL_STANDARD_MESSAGE_CODEC_CLASS(camera_linux_message_codec_parent_class)->write_value(codec, buffer, value, error); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_lens_direction(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(129, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_device_orientation(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(130, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_exposure_mode(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(131, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_flash_mode(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(132, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_focus_mode(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(133, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_image_file_format(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(134, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_image_format_group(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(135, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_resolution_preset(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(136, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_size(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + g_autoptr(FlValue) values = fl_standard_message_codec_read_value(codec, buffer, offset, error); + if (values == nullptr) { + return nullptr; + } + + g_autoptr(CameraLinuxPlatformSize) value = camera_linux_platform_size_new_from_list(values); + if (value == nullptr) { + g_set_error(error, FL_MESSAGE_CODEC_ERROR, FL_MESSAGE_CODEC_ERROR_FAILED, "Invalid data received for MessageData"); + return nullptr; + } + + return fl_value_new_custom_object(137, G_OBJECT(value)); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_description(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + g_autoptr(FlValue) values = fl_standard_message_codec_read_value(codec, buffer, offset, error); + if (values == nullptr) { + return nullptr; + } + + g_autoptr(CameraLinuxPlatformCameraDescription) value = camera_linux_platform_camera_description_new_from_list(values); + if (value == nullptr) { + g_set_error(error, FL_MESSAGE_CODEC_ERROR, FL_MESSAGE_CODEC_ERROR_FAILED, "Invalid data received for MessageData"); + return nullptr; + } + + return fl_value_new_custom_object(138, G_OBJECT(value)); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_state(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + g_autoptr(FlValue) values = fl_standard_message_codec_read_value(codec, buffer, offset, error); + if (values == nullptr) { + return nullptr; + } + + g_autoptr(CameraLinuxPlatformCameraState) value = camera_linux_platform_camera_state_new_from_list(values); + if (value == nullptr) { + g_set_error(error, FL_MESSAGE_CODEC_ERROR, FL_MESSAGE_CODEC_ERROR_FAILED, "Invalid data received for MessageData"); + return nullptr; + } + + return fl_value_new_custom_object(139, G_OBJECT(value)); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_media_settings(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + g_autoptr(FlValue) values = fl_standard_message_codec_read_value(codec, buffer, offset, error); + if (values == nullptr) { + return nullptr; + } + + g_autoptr(CameraLinuxPlatformMediaSettings) value = camera_linux_platform_media_settings_new_from_list(values); + if (value == nullptr) { + g_set_error(error, FL_MESSAGE_CODEC_ERROR, FL_MESSAGE_CODEC_ERROR_FAILED, "Invalid data received for MessageData"); + return nullptr; + } + + return fl_value_new_custom_object(140, G_OBJECT(value)); +} + +static FlValue* camera_linux_message_codec_read_camera_linux_platform_point(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + g_autoptr(FlValue) values = fl_standard_message_codec_read_value(codec, buffer, offset, error); + if (values == nullptr) { + return nullptr; + } + + g_autoptr(CameraLinuxPlatformPoint) value = camera_linux_platform_point_new_from_list(values); + if (value == nullptr) { + g_set_error(error, FL_MESSAGE_CODEC_ERROR, FL_MESSAGE_CODEC_ERROR_FAILED, "Invalid data received for MessageData"); + return nullptr; + } + + return fl_value_new_custom_object(141, G_OBJECT(value)); +} + +static FlValue* camera_linux_message_codec_read_value_of_type(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, int type, GError** error) { + switch (type) { + case 129: + return camera_linux_message_codec_read_camera_linux_platform_camera_lens_direction(codec, buffer, offset, error); + case 130: + return camera_linux_message_codec_read_camera_linux_platform_device_orientation(codec, buffer, offset, error); + case 131: + return camera_linux_message_codec_read_camera_linux_platform_exposure_mode(codec, buffer, offset, error); + case 132: + return camera_linux_message_codec_read_camera_linux_platform_flash_mode(codec, buffer, offset, error); + case 133: + return camera_linux_message_codec_read_camera_linux_platform_focus_mode(codec, buffer, offset, error); + case 134: + return camera_linux_message_codec_read_camera_linux_platform_image_file_format(codec, buffer, offset, error); + case 135: + return camera_linux_message_codec_read_camera_linux_platform_image_format_group(codec, buffer, offset, error); + case 136: + return camera_linux_message_codec_read_camera_linux_platform_resolution_preset(codec, buffer, offset, error); + case 137: + return camera_linux_message_codec_read_camera_linux_platform_size(codec, buffer, offset, error); + case 138: + return camera_linux_message_codec_read_camera_linux_platform_camera_description(codec, buffer, offset, error); + case 139: + return camera_linux_message_codec_read_camera_linux_platform_camera_state(codec, buffer, offset, error); + case 140: + return camera_linux_message_codec_read_camera_linux_platform_media_settings(codec, buffer, offset, error); + case 141: + return camera_linux_message_codec_read_camera_linux_platform_point(codec, buffer, offset, error); + default: + return FL_STANDARD_MESSAGE_CODEC_CLASS(camera_linux_message_codec_parent_class)->read_value_of_type(codec, buffer, offset, type, error); + } +} + +static void camera_linux_message_codec_init(CameraLinuxMessageCodec* self) { +} + +static void camera_linux_message_codec_class_init(CameraLinuxMessageCodecClass* klass) { + FL_STANDARD_MESSAGE_CODEC_CLASS(klass)->write_value = camera_linux_message_codec_write_value; + FL_STANDARD_MESSAGE_CODEC_CLASS(klass)->read_value_of_type = camera_linux_message_codec_read_value_of_type; +} + +static CameraLinuxMessageCodec* camera_linux_message_codec_new() { + CameraLinuxMessageCodec* self = CAMERA_LINUX_MESSAGE_CODEC(g_object_new(camera_linux_message_codec_get_type(), nullptr)); + return self; +} + +struct _CameraLinuxCameraApiResponseHandle { + GObject parent_instance; + + FlBasicMessageChannel* channel; + FlBasicMessageChannelResponseHandle* response_handle; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiResponseHandle, camera_linux_camera_api_response_handle, G_TYPE_OBJECT) + +static void camera_linux_camera_api_response_handle_dispose(GObject* object) { + CameraLinuxCameraApiResponseHandle* self = CAMERA_LINUX_CAMERA_API_RESPONSE_HANDLE(object); + g_clear_object(&self->channel); + g_clear_object(&self->response_handle); + G_OBJECT_CLASS(camera_linux_camera_api_response_handle_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_response_handle_init(CameraLinuxCameraApiResponseHandle* self) { +} + +static void camera_linux_camera_api_response_handle_class_init(CameraLinuxCameraApiResponseHandleClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_response_handle_dispose; +} + +static CameraLinuxCameraApiResponseHandle* camera_linux_camera_api_response_handle_new(FlBasicMessageChannel* channel, FlBasicMessageChannelResponseHandle* response_handle) { + CameraLinuxCameraApiResponseHandle* self = CAMERA_LINUX_CAMERA_API_RESPONSE_HANDLE(g_object_new(camera_linux_camera_api_response_handle_get_type(), nullptr)); + self->channel = FL_BASIC_MESSAGE_CHANNEL(g_object_ref(channel)); + self->response_handle = FL_BASIC_MESSAGE_CHANNEL_RESPONSE_HANDLE(g_object_ref(response_handle)); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetAvailableCamerasResponse, camera_linux_camera_api_get_available_cameras_response, CAMERA_LINUX, CAMERA_API_GET_AVAILABLE_CAMERAS_RESPONSE, GObject) + +struct _CameraLinuxCameraApiGetAvailableCamerasResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiGetAvailableCamerasResponse, camera_linux_camera_api_get_available_cameras_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_get_available_cameras_response_dispose(GObject* object) { + CameraLinuxCameraApiGetAvailableCamerasResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_get_available_cameras_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_get_available_cameras_response_init(CameraLinuxCameraApiGetAvailableCamerasResponse* self) { +} + +static void camera_linux_camera_api_get_available_cameras_response_class_init(CameraLinuxCameraApiGetAvailableCamerasResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_available_cameras_response_dispose; +} + +static CameraLinuxCameraApiGetAvailableCamerasResponse* camera_linux_camera_api_get_available_cameras_response_new(FlValue* return_value) { + CameraLinuxCameraApiGetAvailableCamerasResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_RESPONSE(g_object_new(camera_linux_camera_api_get_available_cameras_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_ref(return_value)); + return self; +} + +static CameraLinuxCameraApiGetAvailableCamerasResponse* camera_linux_camera_api_get_available_cameras_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiGetAvailableCamerasResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_RESPONSE(g_object_new(camera_linux_camera_api_get_available_cameras_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiCreateResponse, camera_linux_camera_api_create_response, CAMERA_LINUX, CAMERA_API_CREATE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiCreateResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiCreateResponse, camera_linux_camera_api_create_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_create_response_dispose(GObject* object) { + CameraLinuxCameraApiCreateResponse* self = CAMERA_LINUX_CAMERA_API_CREATE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_create_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_create_response_init(CameraLinuxCameraApiCreateResponse* self) { +} + +static void camera_linux_camera_api_create_response_class_init(CameraLinuxCameraApiCreateResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_create_response_dispose; +} + +static CameraLinuxCameraApiCreateResponse* camera_linux_camera_api_create_response_new(int64_t return_value) { + CameraLinuxCameraApiCreateResponse* self = CAMERA_LINUX_CAMERA_API_CREATE_RESPONSE(g_object_new(camera_linux_camera_api_create_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_int(return_value)); + return self; +} + +static CameraLinuxCameraApiCreateResponse* camera_linux_camera_api_create_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiCreateResponse* self = CAMERA_LINUX_CAMERA_API_CREATE_RESPONSE(g_object_new(camera_linux_camera_api_create_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiInitializeResponse, camera_linux_camera_api_initialize_response, CAMERA_LINUX, CAMERA_API_INITIALIZE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiInitializeResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiInitializeResponse, camera_linux_camera_api_initialize_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_initialize_response_dispose(GObject* object) { + CameraLinuxCameraApiInitializeResponse* self = CAMERA_LINUX_CAMERA_API_INITIALIZE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_initialize_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_initialize_response_init(CameraLinuxCameraApiInitializeResponse* self) { +} + +static void camera_linux_camera_api_initialize_response_class_init(CameraLinuxCameraApiInitializeResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_initialize_response_dispose; +} + +static CameraLinuxCameraApiInitializeResponse* camera_linux_camera_api_initialize_response_new() { + CameraLinuxCameraApiInitializeResponse* self = CAMERA_LINUX_CAMERA_API_INITIALIZE_RESPONSE(g_object_new(camera_linux_camera_api_initialize_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiInitializeResponse* camera_linux_camera_api_initialize_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiInitializeResponse* self = CAMERA_LINUX_CAMERA_API_INITIALIZE_RESPONSE(g_object_new(camera_linux_camera_api_initialize_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiStartImageStreamResponse, camera_linux_camera_api_start_image_stream_response, CAMERA_LINUX, CAMERA_API_START_IMAGE_STREAM_RESPONSE, GObject) + +struct _CameraLinuxCameraApiStartImageStreamResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiStartImageStreamResponse, camera_linux_camera_api_start_image_stream_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_start_image_stream_response_dispose(GObject* object) { + CameraLinuxCameraApiStartImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_START_IMAGE_STREAM_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_start_image_stream_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_start_image_stream_response_init(CameraLinuxCameraApiStartImageStreamResponse* self) { +} + +static void camera_linux_camera_api_start_image_stream_response_class_init(CameraLinuxCameraApiStartImageStreamResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_start_image_stream_response_dispose; +} + +static CameraLinuxCameraApiStartImageStreamResponse* camera_linux_camera_api_start_image_stream_response_new() { + CameraLinuxCameraApiStartImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_START_IMAGE_STREAM_RESPONSE(g_object_new(camera_linux_camera_api_start_image_stream_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiStartImageStreamResponse* camera_linux_camera_api_start_image_stream_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiStartImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_START_IMAGE_STREAM_RESPONSE(g_object_new(camera_linux_camera_api_start_image_stream_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiStopImageStreamResponse, camera_linux_camera_api_stop_image_stream_response, CAMERA_LINUX, CAMERA_API_STOP_IMAGE_STREAM_RESPONSE, GObject) + +struct _CameraLinuxCameraApiStopImageStreamResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiStopImageStreamResponse, camera_linux_camera_api_stop_image_stream_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_stop_image_stream_response_dispose(GObject* object) { + CameraLinuxCameraApiStopImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_STOP_IMAGE_STREAM_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_stop_image_stream_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_stop_image_stream_response_init(CameraLinuxCameraApiStopImageStreamResponse* self) { +} + +static void camera_linux_camera_api_stop_image_stream_response_class_init(CameraLinuxCameraApiStopImageStreamResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_stop_image_stream_response_dispose; +} + +static CameraLinuxCameraApiStopImageStreamResponse* camera_linux_camera_api_stop_image_stream_response_new() { + CameraLinuxCameraApiStopImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_STOP_IMAGE_STREAM_RESPONSE(g_object_new(camera_linux_camera_api_stop_image_stream_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiStopImageStreamResponse* camera_linux_camera_api_stop_image_stream_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiStopImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_STOP_IMAGE_STREAM_RESPONSE(g_object_new(camera_linux_camera_api_stop_image_stream_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiReceivedImageStreamDataResponse, camera_linux_camera_api_received_image_stream_data_response, CAMERA_LINUX, CAMERA_API_RECEIVED_IMAGE_STREAM_DATA_RESPONSE, GObject) + +struct _CameraLinuxCameraApiReceivedImageStreamDataResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiReceivedImageStreamDataResponse, camera_linux_camera_api_received_image_stream_data_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_received_image_stream_data_response_dispose(GObject* object) { + CameraLinuxCameraApiReceivedImageStreamDataResponse* self = CAMERA_LINUX_CAMERA_API_RECEIVED_IMAGE_STREAM_DATA_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_received_image_stream_data_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_received_image_stream_data_response_init(CameraLinuxCameraApiReceivedImageStreamDataResponse* self) { +} + +static void camera_linux_camera_api_received_image_stream_data_response_class_init(CameraLinuxCameraApiReceivedImageStreamDataResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_received_image_stream_data_response_dispose; +} + +static CameraLinuxCameraApiReceivedImageStreamDataResponse* camera_linux_camera_api_received_image_stream_data_response_new() { + CameraLinuxCameraApiReceivedImageStreamDataResponse* self = CAMERA_LINUX_CAMERA_API_RECEIVED_IMAGE_STREAM_DATA_RESPONSE(g_object_new(camera_linux_camera_api_received_image_stream_data_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiReceivedImageStreamDataResponse* camera_linux_camera_api_received_image_stream_data_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiReceivedImageStreamDataResponse* self = CAMERA_LINUX_CAMERA_API_RECEIVED_IMAGE_STREAM_DATA_RESPONSE(g_object_new(camera_linux_camera_api_received_image_stream_data_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiDisposeResponse, camera_linux_camera_api_dispose_response, CAMERA_LINUX, CAMERA_API_DISPOSE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiDisposeResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiDisposeResponse, camera_linux_camera_api_dispose_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_dispose_response_dispose(GObject* object) { + CameraLinuxCameraApiDisposeResponse* self = CAMERA_LINUX_CAMERA_API_DISPOSE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_dispose_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_dispose_response_init(CameraLinuxCameraApiDisposeResponse* self) { +} + +static void camera_linux_camera_api_dispose_response_class_init(CameraLinuxCameraApiDisposeResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_dispose_response_dispose; +} + +static CameraLinuxCameraApiDisposeResponse* camera_linux_camera_api_dispose_response_new() { + CameraLinuxCameraApiDisposeResponse* self = CAMERA_LINUX_CAMERA_API_DISPOSE_RESPONSE(g_object_new(camera_linux_camera_api_dispose_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiDisposeResponse* camera_linux_camera_api_dispose_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiDisposeResponse* self = CAMERA_LINUX_CAMERA_API_DISPOSE_RESPONSE(g_object_new(camera_linux_camera_api_dispose_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiLockCaptureOrientationResponse, camera_linux_camera_api_lock_capture_orientation_response, CAMERA_LINUX, CAMERA_API_LOCK_CAPTURE_ORIENTATION_RESPONSE, GObject) + +struct _CameraLinuxCameraApiLockCaptureOrientationResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiLockCaptureOrientationResponse, camera_linux_camera_api_lock_capture_orientation_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_lock_capture_orientation_response_dispose(GObject* object) { + CameraLinuxCameraApiLockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_LOCK_CAPTURE_ORIENTATION_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_lock_capture_orientation_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_lock_capture_orientation_response_init(CameraLinuxCameraApiLockCaptureOrientationResponse* self) { +} + +static void camera_linux_camera_api_lock_capture_orientation_response_class_init(CameraLinuxCameraApiLockCaptureOrientationResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_lock_capture_orientation_response_dispose; +} + +static CameraLinuxCameraApiLockCaptureOrientationResponse* camera_linux_camera_api_lock_capture_orientation_response_new() { + CameraLinuxCameraApiLockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_LOCK_CAPTURE_ORIENTATION_RESPONSE(g_object_new(camera_linux_camera_api_lock_capture_orientation_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiLockCaptureOrientationResponse* camera_linux_camera_api_lock_capture_orientation_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiLockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_LOCK_CAPTURE_ORIENTATION_RESPONSE(g_object_new(camera_linux_camera_api_lock_capture_orientation_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiUnlockCaptureOrientationResponse, camera_linux_camera_api_unlock_capture_orientation_response, CAMERA_LINUX, CAMERA_API_UNLOCK_CAPTURE_ORIENTATION_RESPONSE, GObject) + +struct _CameraLinuxCameraApiUnlockCaptureOrientationResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiUnlockCaptureOrientationResponse, camera_linux_camera_api_unlock_capture_orientation_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_unlock_capture_orientation_response_dispose(GObject* object) { + CameraLinuxCameraApiUnlockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_UNLOCK_CAPTURE_ORIENTATION_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_unlock_capture_orientation_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_unlock_capture_orientation_response_init(CameraLinuxCameraApiUnlockCaptureOrientationResponse* self) { +} + +static void camera_linux_camera_api_unlock_capture_orientation_response_class_init(CameraLinuxCameraApiUnlockCaptureOrientationResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_unlock_capture_orientation_response_dispose; +} + +static CameraLinuxCameraApiUnlockCaptureOrientationResponse* camera_linux_camera_api_unlock_capture_orientation_response_new() { + CameraLinuxCameraApiUnlockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_UNLOCK_CAPTURE_ORIENTATION_RESPONSE(g_object_new(camera_linux_camera_api_unlock_capture_orientation_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiUnlockCaptureOrientationResponse* camera_linux_camera_api_unlock_capture_orientation_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiUnlockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_UNLOCK_CAPTURE_ORIENTATION_RESPONSE(g_object_new(camera_linux_camera_api_unlock_capture_orientation_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiTakePictureResponse, camera_linux_camera_api_take_picture_response, CAMERA_LINUX, CAMERA_API_TAKE_PICTURE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiTakePictureResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiTakePictureResponse, camera_linux_camera_api_take_picture_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_take_picture_response_dispose(GObject* object) { + CameraLinuxCameraApiTakePictureResponse* self = CAMERA_LINUX_CAMERA_API_TAKE_PICTURE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_take_picture_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_take_picture_response_init(CameraLinuxCameraApiTakePictureResponse* self) { +} + +static void camera_linux_camera_api_take_picture_response_class_init(CameraLinuxCameraApiTakePictureResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_take_picture_response_dispose; +} + +static CameraLinuxCameraApiTakePictureResponse* camera_linux_camera_api_take_picture_response_new(const gchar* return_value) { + CameraLinuxCameraApiTakePictureResponse* self = CAMERA_LINUX_CAMERA_API_TAKE_PICTURE_RESPONSE(g_object_new(camera_linux_camera_api_take_picture_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(return_value)); + return self; +} + +static CameraLinuxCameraApiTakePictureResponse* camera_linux_camera_api_take_picture_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiTakePictureResponse* self = CAMERA_LINUX_CAMERA_API_TAKE_PICTURE_RESPONSE(g_object_new(camera_linux_camera_api_take_picture_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiPrepareForVideoRecordingResponse, camera_linux_camera_api_prepare_for_video_recording_response, CAMERA_LINUX, CAMERA_API_PREPARE_FOR_VIDEO_RECORDING_RESPONSE, GObject) + +struct _CameraLinuxCameraApiPrepareForVideoRecordingResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiPrepareForVideoRecordingResponse, camera_linux_camera_api_prepare_for_video_recording_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_prepare_for_video_recording_response_dispose(GObject* object) { + CameraLinuxCameraApiPrepareForVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PREPARE_FOR_VIDEO_RECORDING_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_prepare_for_video_recording_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_prepare_for_video_recording_response_init(CameraLinuxCameraApiPrepareForVideoRecordingResponse* self) { +} + +static void camera_linux_camera_api_prepare_for_video_recording_response_class_init(CameraLinuxCameraApiPrepareForVideoRecordingResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_prepare_for_video_recording_response_dispose; +} + +static CameraLinuxCameraApiPrepareForVideoRecordingResponse* camera_linux_camera_api_prepare_for_video_recording_response_new() { + CameraLinuxCameraApiPrepareForVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PREPARE_FOR_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_prepare_for_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiPrepareForVideoRecordingResponse* camera_linux_camera_api_prepare_for_video_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiPrepareForVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PREPARE_FOR_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_prepare_for_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiStartVideoRecordingResponse, camera_linux_camera_api_start_video_recording_response, CAMERA_LINUX, CAMERA_API_START_VIDEO_RECORDING_RESPONSE, GObject) + +struct _CameraLinuxCameraApiStartVideoRecordingResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiStartVideoRecordingResponse, camera_linux_camera_api_start_video_recording_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_start_video_recording_response_dispose(GObject* object) { + CameraLinuxCameraApiStartVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_START_VIDEO_RECORDING_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_start_video_recording_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_start_video_recording_response_init(CameraLinuxCameraApiStartVideoRecordingResponse* self) { +} + +static void camera_linux_camera_api_start_video_recording_response_class_init(CameraLinuxCameraApiStartVideoRecordingResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_start_video_recording_response_dispose; +} + +static CameraLinuxCameraApiStartVideoRecordingResponse* camera_linux_camera_api_start_video_recording_response_new() { + CameraLinuxCameraApiStartVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_START_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_start_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiStartVideoRecordingResponse* camera_linux_camera_api_start_video_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiStartVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_START_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_start_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiStopVideoRecordingResponse, camera_linux_camera_api_stop_video_recording_response, CAMERA_LINUX, CAMERA_API_STOP_VIDEO_RECORDING_RESPONSE, GObject) + +struct _CameraLinuxCameraApiStopVideoRecordingResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiStopVideoRecordingResponse, camera_linux_camera_api_stop_video_recording_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_stop_video_recording_response_dispose(GObject* object) { + CameraLinuxCameraApiStopVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_STOP_VIDEO_RECORDING_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_stop_video_recording_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_stop_video_recording_response_init(CameraLinuxCameraApiStopVideoRecordingResponse* self) { +} + +static void camera_linux_camera_api_stop_video_recording_response_class_init(CameraLinuxCameraApiStopVideoRecordingResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_stop_video_recording_response_dispose; +} + +static CameraLinuxCameraApiStopVideoRecordingResponse* camera_linux_camera_api_stop_video_recording_response_new(const gchar* return_value) { + CameraLinuxCameraApiStopVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_STOP_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_stop_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(return_value)); + return self; +} + +static CameraLinuxCameraApiStopVideoRecordingResponse* camera_linux_camera_api_stop_video_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiStopVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_STOP_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_stop_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiPauseVideoRecordingResponse, camera_linux_camera_api_pause_video_recording_response, CAMERA_LINUX, CAMERA_API_PAUSE_VIDEO_RECORDING_RESPONSE, GObject) + +struct _CameraLinuxCameraApiPauseVideoRecordingResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiPauseVideoRecordingResponse, camera_linux_camera_api_pause_video_recording_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_pause_video_recording_response_dispose(GObject* object) { + CameraLinuxCameraApiPauseVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_VIDEO_RECORDING_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_pause_video_recording_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_pause_video_recording_response_init(CameraLinuxCameraApiPauseVideoRecordingResponse* self) { +} + +static void camera_linux_camera_api_pause_video_recording_response_class_init(CameraLinuxCameraApiPauseVideoRecordingResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_pause_video_recording_response_dispose; +} + +static CameraLinuxCameraApiPauseVideoRecordingResponse* camera_linux_camera_api_pause_video_recording_response_new() { + CameraLinuxCameraApiPauseVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_pause_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiPauseVideoRecordingResponse* camera_linux_camera_api_pause_video_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiPauseVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_pause_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiResumeVideoRecordingResponse, camera_linux_camera_api_resume_video_recording_response, CAMERA_LINUX, CAMERA_API_RESUME_VIDEO_RECORDING_RESPONSE, GObject) + +struct _CameraLinuxCameraApiResumeVideoRecordingResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiResumeVideoRecordingResponse, camera_linux_camera_api_resume_video_recording_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_resume_video_recording_response_dispose(GObject* object) { + CameraLinuxCameraApiResumeVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_VIDEO_RECORDING_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_resume_video_recording_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_resume_video_recording_response_init(CameraLinuxCameraApiResumeVideoRecordingResponse* self) { +} + +static void camera_linux_camera_api_resume_video_recording_response_class_init(CameraLinuxCameraApiResumeVideoRecordingResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_resume_video_recording_response_dispose; +} + +static CameraLinuxCameraApiResumeVideoRecordingResponse* camera_linux_camera_api_resume_video_recording_response_new() { + CameraLinuxCameraApiResumeVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_resume_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiResumeVideoRecordingResponse* camera_linux_camera_api_resume_video_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiResumeVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_resume_video_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetFlashModeResponse, camera_linux_camera_api_set_flash_mode_response, CAMERA_LINUX, CAMERA_API_SET_FLASH_MODE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetFlashModeResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetFlashModeResponse, camera_linux_camera_api_set_flash_mode_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_flash_mode_response_dispose(GObject* object) { + CameraLinuxCameraApiSetFlashModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FLASH_MODE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_flash_mode_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_flash_mode_response_init(CameraLinuxCameraApiSetFlashModeResponse* self) { +} + +static void camera_linux_camera_api_set_flash_mode_response_class_init(CameraLinuxCameraApiSetFlashModeResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_flash_mode_response_dispose; +} + +static CameraLinuxCameraApiSetFlashModeResponse* camera_linux_camera_api_set_flash_mode_response_new() { + CameraLinuxCameraApiSetFlashModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FLASH_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_flash_mode_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetFlashModeResponse* camera_linux_camera_api_set_flash_mode_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetFlashModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FLASH_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_flash_mode_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetExposureModeResponse, camera_linux_camera_api_set_exposure_mode_response, CAMERA_LINUX, CAMERA_API_SET_EXPOSURE_MODE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetExposureModeResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetExposureModeResponse, camera_linux_camera_api_set_exposure_mode_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_exposure_mode_response_dispose(GObject* object) { + CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_exposure_mode_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_exposure_mode_response_init(CameraLinuxCameraApiSetExposureModeResponse* self) { +} + +static void camera_linux_camera_api_set_exposure_mode_response_class_init(CameraLinuxCameraApiSetExposureModeResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_exposure_mode_response_dispose; +} + +static CameraLinuxCameraApiSetExposureModeResponse* camera_linux_camera_api_set_exposure_mode_response_new() { + CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_mode_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetExposureModeResponse* camera_linux_camera_api_set_exposure_mode_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_mode_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetExposurePointResponse, camera_linux_camera_api_set_exposure_point_response, CAMERA_LINUX, CAMERA_API_SET_EXPOSURE_POINT_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetExposurePointResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetExposurePointResponse, camera_linux_camera_api_set_exposure_point_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_exposure_point_response_dispose(GObject* object) { + CameraLinuxCameraApiSetExposurePointResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_POINT_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_exposure_point_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_exposure_point_response_init(CameraLinuxCameraApiSetExposurePointResponse* self) { +} + +static void camera_linux_camera_api_set_exposure_point_response_class_init(CameraLinuxCameraApiSetExposurePointResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_exposure_point_response_dispose; +} + +static CameraLinuxCameraApiSetExposurePointResponse* camera_linux_camera_api_set_exposure_point_response_new() { + CameraLinuxCameraApiSetExposurePointResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_POINT_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_point_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetExposurePointResponse* camera_linux_camera_api_set_exposure_point_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetExposurePointResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_POINT_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_point_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetLensPositionResponse, camera_linux_camera_api_set_lens_position_response, CAMERA_LINUX, CAMERA_API_SET_LENS_POSITION_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetLensPositionResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetLensPositionResponse, camera_linux_camera_api_set_lens_position_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_lens_position_response_dispose(GObject* object) { + CameraLinuxCameraApiSetLensPositionResponse* self = CAMERA_LINUX_CAMERA_API_SET_LENS_POSITION_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_lens_position_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_lens_position_response_init(CameraLinuxCameraApiSetLensPositionResponse* self) { +} + +static void camera_linux_camera_api_set_lens_position_response_class_init(CameraLinuxCameraApiSetLensPositionResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_lens_position_response_dispose; +} + +static CameraLinuxCameraApiSetLensPositionResponse* camera_linux_camera_api_set_lens_position_response_new() { + CameraLinuxCameraApiSetLensPositionResponse* self = CAMERA_LINUX_CAMERA_API_SET_LENS_POSITION_RESPONSE(g_object_new(camera_linux_camera_api_set_lens_position_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetLensPositionResponse* camera_linux_camera_api_set_lens_position_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetLensPositionResponse* self = CAMERA_LINUX_CAMERA_API_SET_LENS_POSITION_RESPONSE(g_object_new(camera_linux_camera_api_set_lens_position_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetMinExposureOffsetResponse, camera_linux_camera_api_get_min_exposure_offset_response, CAMERA_LINUX, CAMERA_API_GET_MIN_EXPOSURE_OFFSET_RESPONSE, GObject) + +struct _CameraLinuxCameraApiGetMinExposureOffsetResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiGetMinExposureOffsetResponse, camera_linux_camera_api_get_min_exposure_offset_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_get_min_exposure_offset_response_dispose(GObject* object) { + CameraLinuxCameraApiGetMinExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_EXPOSURE_OFFSET_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_get_min_exposure_offset_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_get_min_exposure_offset_response_init(CameraLinuxCameraApiGetMinExposureOffsetResponse* self) { +} + +static void camera_linux_camera_api_get_min_exposure_offset_response_class_init(CameraLinuxCameraApiGetMinExposureOffsetResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_min_exposure_offset_response_dispose; +} + +static CameraLinuxCameraApiGetMinExposureOffsetResponse* camera_linux_camera_api_get_min_exposure_offset_response_new(double return_value) { + CameraLinuxCameraApiGetMinExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_get_min_exposure_offset_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_float(return_value)); + return self; +} + +static CameraLinuxCameraApiGetMinExposureOffsetResponse* camera_linux_camera_api_get_min_exposure_offset_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiGetMinExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_get_min_exposure_offset_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetMaxExposureOffsetResponse, camera_linux_camera_api_get_max_exposure_offset_response, CAMERA_LINUX, CAMERA_API_GET_MAX_EXPOSURE_OFFSET_RESPONSE, GObject) + +struct _CameraLinuxCameraApiGetMaxExposureOffsetResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiGetMaxExposureOffsetResponse, camera_linux_camera_api_get_max_exposure_offset_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_get_max_exposure_offset_response_dispose(GObject* object) { + CameraLinuxCameraApiGetMaxExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_EXPOSURE_OFFSET_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_get_max_exposure_offset_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_get_max_exposure_offset_response_init(CameraLinuxCameraApiGetMaxExposureOffsetResponse* self) { +} + +static void camera_linux_camera_api_get_max_exposure_offset_response_class_init(CameraLinuxCameraApiGetMaxExposureOffsetResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_max_exposure_offset_response_dispose; +} + +static CameraLinuxCameraApiGetMaxExposureOffsetResponse* camera_linux_camera_api_get_max_exposure_offset_response_new(double return_value) { + CameraLinuxCameraApiGetMaxExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_get_max_exposure_offset_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_float(return_value)); + return self; +} + +static CameraLinuxCameraApiGetMaxExposureOffsetResponse* camera_linux_camera_api_get_max_exposure_offset_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiGetMaxExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_get_max_exposure_offset_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetExposureOffsetResponse, camera_linux_camera_api_set_exposure_offset_response, CAMERA_LINUX, CAMERA_API_SET_EXPOSURE_OFFSET_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetExposureOffsetResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetExposureOffsetResponse, camera_linux_camera_api_set_exposure_offset_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_exposure_offset_response_dispose(GObject* object) { + CameraLinuxCameraApiSetExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_OFFSET_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_exposure_offset_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_exposure_offset_response_init(CameraLinuxCameraApiSetExposureOffsetResponse* self) { +} + +static void camera_linux_camera_api_set_exposure_offset_response_class_init(CameraLinuxCameraApiSetExposureOffsetResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_exposure_offset_response_dispose; +} + +static CameraLinuxCameraApiSetExposureOffsetResponse* camera_linux_camera_api_set_exposure_offset_response_new() { + CameraLinuxCameraApiSetExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_offset_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetExposureOffsetResponse* camera_linux_camera_api_set_exposure_offset_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_offset_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetFocusModeResponse, camera_linux_camera_api_set_focus_mode_response, CAMERA_LINUX, CAMERA_API_SET_FOCUS_MODE_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetFocusModeResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetFocusModeResponse, camera_linux_camera_api_set_focus_mode_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_focus_mode_response_dispose(GObject* object) { + CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_focus_mode_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_focus_mode_response_init(CameraLinuxCameraApiSetFocusModeResponse* self) { +} + +static void camera_linux_camera_api_set_focus_mode_response_class_init(CameraLinuxCameraApiSetFocusModeResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_focus_mode_response_dispose; +} + +static CameraLinuxCameraApiSetFocusModeResponse* camera_linux_camera_api_set_focus_mode_response_new() { + CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_mode_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetFocusModeResponse* camera_linux_camera_api_set_focus_mode_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_mode_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetFocusPointResponse, camera_linux_camera_api_set_focus_point_response, CAMERA_LINUX, CAMERA_API_SET_FOCUS_POINT_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetFocusPointResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetFocusPointResponse, camera_linux_camera_api_set_focus_point_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_focus_point_response_dispose(GObject* object) { + CameraLinuxCameraApiSetFocusPointResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_POINT_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_focus_point_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_focus_point_response_init(CameraLinuxCameraApiSetFocusPointResponse* self) { +} + +static void camera_linux_camera_api_set_focus_point_response_class_init(CameraLinuxCameraApiSetFocusPointResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_focus_point_response_dispose; +} + +static CameraLinuxCameraApiSetFocusPointResponse* camera_linux_camera_api_set_focus_point_response_new() { + CameraLinuxCameraApiSetFocusPointResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_POINT_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_point_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetFocusPointResponse* camera_linux_camera_api_set_focus_point_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetFocusPointResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_POINT_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_point_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetMinZoomLevelResponse, camera_linux_camera_api_get_min_zoom_level_response, CAMERA_LINUX, CAMERA_API_GET_MIN_ZOOM_LEVEL_RESPONSE, GObject) + +struct _CameraLinuxCameraApiGetMinZoomLevelResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiGetMinZoomLevelResponse, camera_linux_camera_api_get_min_zoom_level_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_get_min_zoom_level_response_dispose(GObject* object) { + CameraLinuxCameraApiGetMinZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_ZOOM_LEVEL_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_get_min_zoom_level_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_get_min_zoom_level_response_init(CameraLinuxCameraApiGetMinZoomLevelResponse* self) { +} + +static void camera_linux_camera_api_get_min_zoom_level_response_class_init(CameraLinuxCameraApiGetMinZoomLevelResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_min_zoom_level_response_dispose; +} + +static CameraLinuxCameraApiGetMinZoomLevelResponse* camera_linux_camera_api_get_min_zoom_level_response_new(double return_value) { + CameraLinuxCameraApiGetMinZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_get_min_zoom_level_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_float(return_value)); + return self; +} + +static CameraLinuxCameraApiGetMinZoomLevelResponse* camera_linux_camera_api_get_min_zoom_level_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiGetMinZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_get_min_zoom_level_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetMaxZoomLevelResponse, camera_linux_camera_api_get_max_zoom_level_response, CAMERA_LINUX, CAMERA_API_GET_MAX_ZOOM_LEVEL_RESPONSE, GObject) + +struct _CameraLinuxCameraApiGetMaxZoomLevelResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiGetMaxZoomLevelResponse, camera_linux_camera_api_get_max_zoom_level_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_get_max_zoom_level_response_dispose(GObject* object) { + CameraLinuxCameraApiGetMaxZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_ZOOM_LEVEL_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_get_max_zoom_level_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_get_max_zoom_level_response_init(CameraLinuxCameraApiGetMaxZoomLevelResponse* self) { +} + +static void camera_linux_camera_api_get_max_zoom_level_response_class_init(CameraLinuxCameraApiGetMaxZoomLevelResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_max_zoom_level_response_dispose; +} + +static CameraLinuxCameraApiGetMaxZoomLevelResponse* camera_linux_camera_api_get_max_zoom_level_response_new(double return_value) { + CameraLinuxCameraApiGetMaxZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_get_max_zoom_level_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_float(return_value)); + return self; +} + +static CameraLinuxCameraApiGetMaxZoomLevelResponse* camera_linux_camera_api_get_max_zoom_level_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiGetMaxZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_get_max_zoom_level_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetZoomLevelResponse, camera_linux_camera_api_set_zoom_level_response, CAMERA_LINUX, CAMERA_API_SET_ZOOM_LEVEL_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetZoomLevelResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetZoomLevelResponse, camera_linux_camera_api_set_zoom_level_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_zoom_level_response_dispose(GObject* object) { + CameraLinuxCameraApiSetZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_SET_ZOOM_LEVEL_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_zoom_level_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_zoom_level_response_init(CameraLinuxCameraApiSetZoomLevelResponse* self) { +} + +static void camera_linux_camera_api_set_zoom_level_response_class_init(CameraLinuxCameraApiSetZoomLevelResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_zoom_level_response_dispose; +} + +static CameraLinuxCameraApiSetZoomLevelResponse* camera_linux_camera_api_set_zoom_level_response_new() { + CameraLinuxCameraApiSetZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_SET_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_set_zoom_level_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetZoomLevelResponse* camera_linux_camera_api_set_zoom_level_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_SET_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_set_zoom_level_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiPausePreviewResponse, camera_linux_camera_api_pause_preview_response, CAMERA_LINUX, CAMERA_API_PAUSE_PREVIEW_RESPONSE, GObject) + +struct _CameraLinuxCameraApiPausePreviewResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiPausePreviewResponse, camera_linux_camera_api_pause_preview_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_pause_preview_response_dispose(GObject* object) { + CameraLinuxCameraApiPausePreviewResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_PREVIEW_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_pause_preview_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_pause_preview_response_init(CameraLinuxCameraApiPausePreviewResponse* self) { +} + +static void camera_linux_camera_api_pause_preview_response_class_init(CameraLinuxCameraApiPausePreviewResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_pause_preview_response_dispose; +} + +static CameraLinuxCameraApiPausePreviewResponse* camera_linux_camera_api_pause_preview_response_new() { + CameraLinuxCameraApiPausePreviewResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_PREVIEW_RESPONSE(g_object_new(camera_linux_camera_api_pause_preview_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiPausePreviewResponse* camera_linux_camera_api_pause_preview_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiPausePreviewResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_PREVIEW_RESPONSE(g_object_new(camera_linux_camera_api_pause_preview_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiResumePreviewResponse, camera_linux_camera_api_resume_preview_response, CAMERA_LINUX, CAMERA_API_RESUME_PREVIEW_RESPONSE, GObject) + +struct _CameraLinuxCameraApiResumePreviewResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiResumePreviewResponse, camera_linux_camera_api_resume_preview_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_resume_preview_response_dispose(GObject* object) { + CameraLinuxCameraApiResumePreviewResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_PREVIEW_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_resume_preview_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_resume_preview_response_init(CameraLinuxCameraApiResumePreviewResponse* self) { +} + +static void camera_linux_camera_api_resume_preview_response_class_init(CameraLinuxCameraApiResumePreviewResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_resume_preview_response_dispose; +} + +static CameraLinuxCameraApiResumePreviewResponse* camera_linux_camera_api_resume_preview_response_new() { + CameraLinuxCameraApiResumePreviewResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_PREVIEW_RESPONSE(g_object_new(camera_linux_camera_api_resume_preview_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiResumePreviewResponse* camera_linux_camera_api_resume_preview_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiResumePreviewResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_PREVIEW_RESPONSE(g_object_new(camera_linux_camera_api_resume_preview_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse, camera_linux_camera_api_update_description_while_recording_response, CAMERA_LINUX, CAMERA_API_UPDATE_DESCRIPTION_WHILE_RECORDING_RESPONSE, GObject) + +struct _CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse, camera_linux_camera_api_update_description_while_recording_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_update_description_while_recording_response_dispose(GObject* object) { + CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* self = CAMERA_LINUX_CAMERA_API_UPDATE_DESCRIPTION_WHILE_RECORDING_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_update_description_while_recording_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_update_description_while_recording_response_init(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* self) { +} + +static void camera_linux_camera_api_update_description_while_recording_response_class_init(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_update_description_while_recording_response_dispose; +} + +static CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* camera_linux_camera_api_update_description_while_recording_response_new() { + CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* self = CAMERA_LINUX_CAMERA_API_UPDATE_DESCRIPTION_WHILE_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_update_description_while_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* camera_linux_camera_api_update_description_while_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* self = CAMERA_LINUX_CAMERA_API_UPDATE_DESCRIPTION_WHILE_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_update_description_while_recording_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetImageFileFormatResponse, camera_linux_camera_api_set_image_file_format_response, CAMERA_LINUX, CAMERA_API_SET_IMAGE_FILE_FORMAT_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetImageFileFormatResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetImageFileFormatResponse, camera_linux_camera_api_set_image_file_format_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_image_file_format_response_dispose(GObject* object) { + CameraLinuxCameraApiSetImageFileFormatResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FILE_FORMAT_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_image_file_format_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_image_file_format_response_init(CameraLinuxCameraApiSetImageFileFormatResponse* self) { +} + +static void camera_linux_camera_api_set_image_file_format_response_class_init(CameraLinuxCameraApiSetImageFileFormatResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_image_file_format_response_dispose; +} + +static CameraLinuxCameraApiSetImageFileFormatResponse* camera_linux_camera_api_set_image_file_format_response_new() { + CameraLinuxCameraApiSetImageFileFormatResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FILE_FORMAT_RESPONSE(g_object_new(camera_linux_camera_api_set_image_file_format_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetImageFileFormatResponse* camera_linux_camera_api_set_image_file_format_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetImageFileFormatResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FILE_FORMAT_RESPONSE(g_object_new(camera_linux_camera_api_set_image_file_format_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + +struct _CameraLinuxCameraApi { + GObject parent_instance; + + const CameraLinuxCameraApiVTable* vtable; + gpointer user_data; + GDestroyNotify user_data_free_func; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApi, camera_linux_camera_api, G_TYPE_OBJECT) + +static void camera_linux_camera_api_dispose(GObject* object) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(object); + if (self->user_data != nullptr) { + self->user_data_free_func(self->user_data); + } + self->user_data = nullptr; + G_OBJECT_CLASS(camera_linux_camera_api_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_init(CameraLinuxCameraApi* self) { +} + +static void camera_linux_camera_api_class_init(CameraLinuxCameraApiClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_dispose; +} + +static CameraLinuxCameraApi* camera_linux_camera_api_new(const CameraLinuxCameraApiVTable* vtable, gpointer user_data, GDestroyNotify user_data_free_func) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(g_object_new(camera_linux_camera_api_get_type(), nullptr)); + self->vtable = vtable; + self->user_data = user_data; + self->user_data_free_func = user_data_free_func; + return self; +} + +static void camera_linux_camera_api_get_available_cameras_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->get_available_cameras == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->get_available_cameras(handle, self->user_data); +} + +static void camera_linux_camera_api_create_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->create == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + const gchar* camera_name = fl_value_get_string(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformMediaSettings* settings = CAMERA_LINUX_PLATFORM_MEDIA_SETTINGS(fl_value_get_custom_value_object(value1)); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->create(camera_name, settings, handle, self->user_data); +} + +static void camera_linux_camera_api_initialize_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->initialize == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformImageFormatGroup image_format = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->initialize(camera_id, image_format, handle, self->user_data); +} + +static void camera_linux_camera_api_start_image_stream_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->start_image_stream == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->start_image_stream(handle, self->user_data); +} + +static void camera_linux_camera_api_stop_image_stream_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->stop_image_stream == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->stop_image_stream(handle, self->user_data); +} + +static void camera_linux_camera_api_received_image_stream_data_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->received_image_stream_data == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->received_image_stream_data(handle, self->user_data); +} + +static void camera_linux_camera_api_dispose_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->dispose == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->dispose(camera_id, handle, self->user_data); +} + +static void camera_linux_camera_api_lock_capture_orientation_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->lock_capture_orientation == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + CameraLinuxPlatformDeviceOrientation orientation = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->lock_capture_orientation(orientation, handle, self->user_data); +} + +static void camera_linux_camera_api_unlock_capture_orientation_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->unlock_capture_orientation == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->unlock_capture_orientation(handle, self->user_data); +} + +static void camera_linux_camera_api_take_picture_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->take_picture == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->take_picture(handle, self->user_data); +} + +static void camera_linux_camera_api_prepare_for_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->prepare_for_video_recording == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->prepare_for_video_recording(handle, self->user_data); +} + +static void camera_linux_camera_api_start_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->start_video_recording == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + gboolean enable_stream = fl_value_get_bool(value0); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->start_video_recording(enable_stream, handle, self->user_data); +} + +static void camera_linux_camera_api_stop_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->stop_video_recording == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->stop_video_recording(handle, self->user_data); +} + +static void camera_linux_camera_api_pause_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->pause_video_recording == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->pause_video_recording(handle, self->user_data); +} + +static void camera_linux_camera_api_resume_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->resume_video_recording == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->resume_video_recording(handle, self->user_data); +} + +static void camera_linux_camera_api_set_flash_mode_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_flash_mode == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + CameraLinuxPlatformFlashMode mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_flash_mode(mode, handle, self->user_data); +} + +static void camera_linux_camera_api_set_exposure_mode_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_exposure_mode == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + CameraLinuxPlatformExposureMode mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_exposure_mode(mode, handle, self->user_data); +} + +static void camera_linux_camera_api_set_exposure_point_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_exposure_point == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + CameraLinuxPlatformPoint* point = CAMERA_LINUX_PLATFORM_POINT(fl_value_get_custom_value_object(value0)); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_exposure_point(point, handle, self->user_data); +} + +static void camera_linux_camera_api_set_lens_position_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_lens_position == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + double position = fl_value_get_float(value0); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_lens_position(position, handle, self->user_data); +} + +static void camera_linux_camera_api_get_min_exposure_offset_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->get_min_exposure_offset == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->get_min_exposure_offset(handle, self->user_data); +} + +static void camera_linux_camera_api_get_max_exposure_offset_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->get_max_exposure_offset == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->get_max_exposure_offset(handle, self->user_data); +} + +static void camera_linux_camera_api_set_exposure_offset_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_exposure_offset == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + double offset = fl_value_get_float(value0); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_exposure_offset(offset, handle, self->user_data); +} + +static void camera_linux_camera_api_set_focus_mode_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_focus_mode == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + CameraLinuxPlatformFocusMode mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_focus_mode(mode, handle, self->user_data); +} + +static void camera_linux_camera_api_set_focus_point_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_focus_point == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + CameraLinuxPlatformPoint* point = CAMERA_LINUX_PLATFORM_POINT(fl_value_get_custom_value_object(value0)); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_focus_point(point, handle, self->user_data); +} + +static void camera_linux_camera_api_get_min_zoom_level_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->get_min_zoom_level == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->get_min_zoom_level(handle, self->user_data); +} + +static void camera_linux_camera_api_get_max_zoom_level_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->get_max_zoom_level == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->get_max_zoom_level(handle, self->user_data); +} + +static void camera_linux_camera_api_set_zoom_level_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_zoom_level == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + double zoom = fl_value_get_float(value0); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_zoom_level(zoom, handle, self->user_data); +} + +static void camera_linux_camera_api_pause_preview_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->pause_preview == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->pause_preview(handle, self->user_data); +} + +static void camera_linux_camera_api_resume_preview_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->resume_preview == nullptr) { + return; + } + + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->resume_preview(handle, self->user_data); +} + +static void camera_linux_camera_api_update_description_while_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->update_description_while_recording == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + const gchar* camera_name = fl_value_get_string(value0); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->update_description_while_recording(camera_name, handle, self->user_data); +} + +static void camera_linux_camera_api_set_image_file_format_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_image_file_format == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + CameraLinuxPlatformImageFileFormat format = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_image_file_format(format, handle, self->user_data); +} + +void camera_linux_camera_api_set_method_handlers(FlBinaryMessenger* messenger, const gchar* suffix, const CameraLinuxCameraApiVTable* vtable, gpointer user_data, GDestroyNotify user_data_free_func) { + g_autofree gchar* dot_suffix = suffix != nullptr ? g_strdup_printf(".%s", suffix) : g_strdup(""); + g_autoptr(CameraLinuxCameraApi) api_data = camera_linux_camera_api_new(vtable, user_data, user_data_free_func); + + g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); + g_autofree gchar* get_available_cameras_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCameras%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_available_cameras_channel = fl_basic_message_channel_new(messenger, get_available_cameras_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_available_cameras_channel, camera_linux_camera_api_get_available_cameras_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* create_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.create%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) create_channel = fl_basic_message_channel_new(messenger, create_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(create_channel, camera_linux_camera_api_create_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* initialize_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.initialize%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) initialize_channel = fl_basic_message_channel_new(messenger, initialize_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(initialize_channel, camera_linux_camera_api_initialize_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* start_image_stream_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.startImageStream%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) start_image_stream_channel = fl_basic_message_channel_new(messenger, start_image_stream_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(start_image_stream_channel, camera_linux_camera_api_start_image_stream_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* stop_image_stream_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopImageStream%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) stop_image_stream_channel = fl_basic_message_channel_new(messenger, stop_image_stream_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(stop_image_stream_channel, camera_linux_camera_api_stop_image_stream_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* received_image_stream_data_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.receivedImageStreamData%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) received_image_stream_data_channel = fl_basic_message_channel_new(messenger, received_image_stream_data_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(received_image_stream_data_channel, camera_linux_camera_api_received_image_stream_data_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* dispose_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.dispose%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) dispose_channel = fl_basic_message_channel_new(messenger, dispose_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(dispose_channel, camera_linux_camera_api_dispose_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* lock_capture_orientation_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.lockCaptureOrientation%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) lock_capture_orientation_channel = fl_basic_message_channel_new(messenger, lock_capture_orientation_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(lock_capture_orientation_channel, camera_linux_camera_api_lock_capture_orientation_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* unlock_capture_orientation_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.unlockCaptureOrientation%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) unlock_capture_orientation_channel = fl_basic_message_channel_new(messenger, unlock_capture_orientation_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(unlock_capture_orientation_channel, camera_linux_camera_api_unlock_capture_orientation_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* take_picture_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.takePicture%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) take_picture_channel = fl_basic_message_channel_new(messenger, take_picture_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(take_picture_channel, camera_linux_camera_api_take_picture_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* prepare_for_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.prepareForVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) prepare_for_video_recording_channel = fl_basic_message_channel_new(messenger, prepare_for_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(prepare_for_video_recording_channel, camera_linux_camera_api_prepare_for_video_recording_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* start_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.startVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) start_video_recording_channel = fl_basic_message_channel_new(messenger, start_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(start_video_recording_channel, camera_linux_camera_api_start_video_recording_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* stop_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) stop_video_recording_channel = fl_basic_message_channel_new(messenger, stop_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(stop_video_recording_channel, camera_linux_camera_api_stop_video_recording_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* pause_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.pauseVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) pause_video_recording_channel = fl_basic_message_channel_new(messenger, pause_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(pause_video_recording_channel, camera_linux_camera_api_pause_video_recording_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* resume_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.resumeVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) resume_video_recording_channel = fl_basic_message_channel_new(messenger, resume_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(resume_video_recording_channel, camera_linux_camera_api_resume_video_recording_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_flash_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFlashMode%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_flash_mode_channel = fl_basic_message_channel_new(messenger, set_flash_mode_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_flash_mode_channel, camera_linux_camera_api_set_flash_mode_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_exposure_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposureMode%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_exposure_mode_channel = fl_basic_message_channel_new(messenger, set_exposure_mode_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_exposure_mode_channel, camera_linux_camera_api_set_exposure_mode_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_exposure_point_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposurePoint%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_exposure_point_channel = fl_basic_message_channel_new(messenger, set_exposure_point_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_exposure_point_channel, camera_linux_camera_api_set_exposure_point_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_lens_position_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setLensPosition%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_lens_position_channel = fl_basic_message_channel_new(messenger, set_lens_position_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_lens_position_channel, camera_linux_camera_api_set_lens_position_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* get_min_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMinExposureOffset%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_min_exposure_offset_channel = fl_basic_message_channel_new(messenger, get_min_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_min_exposure_offset_channel, camera_linux_camera_api_get_min_exposure_offset_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* get_max_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMaxExposureOffset%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_max_exposure_offset_channel = fl_basic_message_channel_new(messenger, get_max_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_max_exposure_offset_channel, camera_linux_camera_api_get_max_exposure_offset_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposureOffset%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_exposure_offset_channel = fl_basic_message_channel_new(messenger, set_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_exposure_offset_channel, camera_linux_camera_api_set_exposure_offset_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_focus_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFocusMode%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_focus_mode_channel = fl_basic_message_channel_new(messenger, set_focus_mode_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_focus_mode_channel, camera_linux_camera_api_set_focus_mode_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_focus_point_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFocusPoint%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_focus_point_channel = fl_basic_message_channel_new(messenger, set_focus_point_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_focus_point_channel, camera_linux_camera_api_set_focus_point_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* get_min_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMinZoomLevel%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_min_zoom_level_channel = fl_basic_message_channel_new(messenger, get_min_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_min_zoom_level_channel, camera_linux_camera_api_get_min_zoom_level_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* get_max_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMaxZoomLevel%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_max_zoom_level_channel = fl_basic_message_channel_new(messenger, get_max_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_max_zoom_level_channel, camera_linux_camera_api_get_max_zoom_level_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setZoomLevel%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_zoom_level_channel = fl_basic_message_channel_new(messenger, set_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_zoom_level_channel, camera_linux_camera_api_set_zoom_level_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* pause_preview_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.pausePreview%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) pause_preview_channel = fl_basic_message_channel_new(messenger, pause_preview_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(pause_preview_channel, camera_linux_camera_api_pause_preview_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* resume_preview_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.resumePreview%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) resume_preview_channel = fl_basic_message_channel_new(messenger, resume_preview_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(resume_preview_channel, camera_linux_camera_api_resume_preview_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* update_description_while_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.updateDescriptionWhileRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) update_description_while_recording_channel = fl_basic_message_channel_new(messenger, update_description_while_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(update_description_while_recording_channel, camera_linux_camera_api_update_description_while_recording_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_image_file_format_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFileFormat%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_image_file_format_channel = fl_basic_message_channel_new(messenger, set_image_file_format_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_image_file_format_channel, camera_linux_camera_api_set_image_file_format_cb, g_object_ref(api_data), g_object_unref); +} + +void camera_linux_camera_api_clear_method_handlers(FlBinaryMessenger* messenger, const gchar* suffix) { + g_autofree gchar* dot_suffix = suffix != nullptr ? g_strdup_printf(".%s", suffix) : g_strdup(""); + + g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); + g_autofree gchar* get_available_cameras_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCameras%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_available_cameras_channel = fl_basic_message_channel_new(messenger, get_available_cameras_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_available_cameras_channel, nullptr, nullptr, nullptr); + g_autofree gchar* create_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.create%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) create_channel = fl_basic_message_channel_new(messenger, create_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(create_channel, nullptr, nullptr, nullptr); + g_autofree gchar* initialize_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.initialize%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) initialize_channel = fl_basic_message_channel_new(messenger, initialize_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(initialize_channel, nullptr, nullptr, nullptr); + g_autofree gchar* start_image_stream_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.startImageStream%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) start_image_stream_channel = fl_basic_message_channel_new(messenger, start_image_stream_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(start_image_stream_channel, nullptr, nullptr, nullptr); + g_autofree gchar* stop_image_stream_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopImageStream%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) stop_image_stream_channel = fl_basic_message_channel_new(messenger, stop_image_stream_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(stop_image_stream_channel, nullptr, nullptr, nullptr); + g_autofree gchar* received_image_stream_data_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.receivedImageStreamData%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) received_image_stream_data_channel = fl_basic_message_channel_new(messenger, received_image_stream_data_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(received_image_stream_data_channel, nullptr, nullptr, nullptr); + g_autofree gchar* dispose_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.dispose%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) dispose_channel = fl_basic_message_channel_new(messenger, dispose_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(dispose_channel, nullptr, nullptr, nullptr); + g_autofree gchar* lock_capture_orientation_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.lockCaptureOrientation%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) lock_capture_orientation_channel = fl_basic_message_channel_new(messenger, lock_capture_orientation_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(lock_capture_orientation_channel, nullptr, nullptr, nullptr); + g_autofree gchar* unlock_capture_orientation_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.unlockCaptureOrientation%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) unlock_capture_orientation_channel = fl_basic_message_channel_new(messenger, unlock_capture_orientation_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(unlock_capture_orientation_channel, nullptr, nullptr, nullptr); + g_autofree gchar* take_picture_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.takePicture%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) take_picture_channel = fl_basic_message_channel_new(messenger, take_picture_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(take_picture_channel, nullptr, nullptr, nullptr); + g_autofree gchar* prepare_for_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.prepareForVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) prepare_for_video_recording_channel = fl_basic_message_channel_new(messenger, prepare_for_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(prepare_for_video_recording_channel, nullptr, nullptr, nullptr); + g_autofree gchar* start_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.startVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) start_video_recording_channel = fl_basic_message_channel_new(messenger, start_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(start_video_recording_channel, nullptr, nullptr, nullptr); + g_autofree gchar* stop_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) stop_video_recording_channel = fl_basic_message_channel_new(messenger, stop_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(stop_video_recording_channel, nullptr, nullptr, nullptr); + g_autofree gchar* pause_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.pauseVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) pause_video_recording_channel = fl_basic_message_channel_new(messenger, pause_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(pause_video_recording_channel, nullptr, nullptr, nullptr); + g_autofree gchar* resume_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.resumeVideoRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) resume_video_recording_channel = fl_basic_message_channel_new(messenger, resume_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(resume_video_recording_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_flash_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFlashMode%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_flash_mode_channel = fl_basic_message_channel_new(messenger, set_flash_mode_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_flash_mode_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_exposure_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposureMode%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_exposure_mode_channel = fl_basic_message_channel_new(messenger, set_exposure_mode_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_exposure_mode_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_exposure_point_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposurePoint%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_exposure_point_channel = fl_basic_message_channel_new(messenger, set_exposure_point_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_exposure_point_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_lens_position_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setLensPosition%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_lens_position_channel = fl_basic_message_channel_new(messenger, set_lens_position_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_lens_position_channel, nullptr, nullptr, nullptr); + g_autofree gchar* get_min_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMinExposureOffset%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_min_exposure_offset_channel = fl_basic_message_channel_new(messenger, get_min_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_min_exposure_offset_channel, nullptr, nullptr, nullptr); + g_autofree gchar* get_max_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMaxExposureOffset%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_max_exposure_offset_channel = fl_basic_message_channel_new(messenger, get_max_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_max_exposure_offset_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposureOffset%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_exposure_offset_channel = fl_basic_message_channel_new(messenger, set_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_exposure_offset_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_focus_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFocusMode%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_focus_mode_channel = fl_basic_message_channel_new(messenger, set_focus_mode_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_focus_mode_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_focus_point_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFocusPoint%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_focus_point_channel = fl_basic_message_channel_new(messenger, set_focus_point_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_focus_point_channel, nullptr, nullptr, nullptr); + g_autofree gchar* get_min_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMinZoomLevel%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_min_zoom_level_channel = fl_basic_message_channel_new(messenger, get_min_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_min_zoom_level_channel, nullptr, nullptr, nullptr); + g_autofree gchar* get_max_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMaxZoomLevel%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_max_zoom_level_channel = fl_basic_message_channel_new(messenger, get_max_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_max_zoom_level_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setZoomLevel%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_zoom_level_channel = fl_basic_message_channel_new(messenger, set_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_zoom_level_channel, nullptr, nullptr, nullptr); + g_autofree gchar* pause_preview_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.pausePreview%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) pause_preview_channel = fl_basic_message_channel_new(messenger, pause_preview_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(pause_preview_channel, nullptr, nullptr, nullptr); + g_autofree gchar* resume_preview_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.resumePreview%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) resume_preview_channel = fl_basic_message_channel_new(messenger, resume_preview_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(resume_preview_channel, nullptr, nullptr, nullptr); + g_autofree gchar* update_description_while_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.updateDescriptionWhileRecording%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) update_description_while_recording_channel = fl_basic_message_channel_new(messenger, update_description_while_recording_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(update_description_while_recording_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_image_file_format_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFileFormat%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_image_file_format_channel = fl_basic_message_channel_new(messenger, set_image_file_format_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_image_file_format_channel, nullptr, nullptr, nullptr); +} + +void camera_linux_camera_api_respond_get_available_cameras(CameraLinuxCameraApiResponseHandle* response_handle, FlValue* return_value) { + g_autoptr(CameraLinuxCameraApiGetAvailableCamerasResponse) response = camera_linux_camera_api_get_available_cameras_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getAvailableCameras", error->message); + } +} + +void camera_linux_camera_api_respond_error_get_available_cameras(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiGetAvailableCamerasResponse) response = camera_linux_camera_api_get_available_cameras_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getAvailableCameras", error->message); + } +} + +void camera_linux_camera_api_respond_create(CameraLinuxCameraApiResponseHandle* response_handle, int64_t return_value) { + g_autoptr(CameraLinuxCameraApiCreateResponse) response = camera_linux_camera_api_create_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "create", error->message); + } +} + +void camera_linux_camera_api_respond_error_create(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiCreateResponse) response = camera_linux_camera_api_create_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "create", error->message); + } +} + +void camera_linux_camera_api_respond_initialize(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiInitializeResponse) response = camera_linux_camera_api_initialize_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "initialize", error->message); + } +} + +void camera_linux_camera_api_respond_error_initialize(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiInitializeResponse) response = camera_linux_camera_api_initialize_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "initialize", error->message); + } +} + +void camera_linux_camera_api_respond_start_image_stream(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiStartImageStreamResponse) response = camera_linux_camera_api_start_image_stream_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "startImageStream", error->message); + } +} + +void camera_linux_camera_api_respond_error_start_image_stream(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiStartImageStreamResponse) response = camera_linux_camera_api_start_image_stream_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "startImageStream", error->message); + } +} + +void camera_linux_camera_api_respond_stop_image_stream(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiStopImageStreamResponse) response = camera_linux_camera_api_stop_image_stream_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "stopImageStream", error->message); + } +} + +void camera_linux_camera_api_respond_error_stop_image_stream(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiStopImageStreamResponse) response = camera_linux_camera_api_stop_image_stream_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "stopImageStream", error->message); + } +} + +void camera_linux_camera_api_respond_received_image_stream_data(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiReceivedImageStreamDataResponse) response = camera_linux_camera_api_received_image_stream_data_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "receivedImageStreamData", error->message); + } +} + +void camera_linux_camera_api_respond_error_received_image_stream_data(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiReceivedImageStreamDataResponse) response = camera_linux_camera_api_received_image_stream_data_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "receivedImageStreamData", error->message); + } +} + +void camera_linux_camera_api_respond_dispose(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiDisposeResponse) response = camera_linux_camera_api_dispose_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "dispose", error->message); + } +} + +void camera_linux_camera_api_respond_error_dispose(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiDisposeResponse) response = camera_linux_camera_api_dispose_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "dispose", error->message); + } +} + +void camera_linux_camera_api_respond_lock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiLockCaptureOrientationResponse) response = camera_linux_camera_api_lock_capture_orientation_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "lockCaptureOrientation", error->message); + } +} + +void camera_linux_camera_api_respond_error_lock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiLockCaptureOrientationResponse) response = camera_linux_camera_api_lock_capture_orientation_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "lockCaptureOrientation", error->message); + } +} + +void camera_linux_camera_api_respond_unlock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiUnlockCaptureOrientationResponse) response = camera_linux_camera_api_unlock_capture_orientation_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "unlockCaptureOrientation", error->message); + } +} + +void camera_linux_camera_api_respond_error_unlock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiUnlockCaptureOrientationResponse) response = camera_linux_camera_api_unlock_capture_orientation_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "unlockCaptureOrientation", error->message); + } +} + +void camera_linux_camera_api_respond_take_picture(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* return_value) { + g_autoptr(CameraLinuxCameraApiTakePictureResponse) response = camera_linux_camera_api_take_picture_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "takePicture", error->message); + } +} + +void camera_linux_camera_api_respond_error_take_picture(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiTakePictureResponse) response = camera_linux_camera_api_take_picture_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "takePicture", error->message); + } +} + +void camera_linux_camera_api_respond_prepare_for_video_recording(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiPrepareForVideoRecordingResponse) response = camera_linux_camera_api_prepare_for_video_recording_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "prepareForVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_error_prepare_for_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiPrepareForVideoRecordingResponse) response = camera_linux_camera_api_prepare_for_video_recording_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "prepareForVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_start_video_recording(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiStartVideoRecordingResponse) response = camera_linux_camera_api_start_video_recording_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "startVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_error_start_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiStartVideoRecordingResponse) response = camera_linux_camera_api_start_video_recording_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "startVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_stop_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* return_value) { + g_autoptr(CameraLinuxCameraApiStopVideoRecordingResponse) response = camera_linux_camera_api_stop_video_recording_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "stopVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_error_stop_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiStopVideoRecordingResponse) response = camera_linux_camera_api_stop_video_recording_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "stopVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_pause_video_recording(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiPauseVideoRecordingResponse) response = camera_linux_camera_api_pause_video_recording_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "pauseVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_error_pause_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiPauseVideoRecordingResponse) response = camera_linux_camera_api_pause_video_recording_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "pauseVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_resume_video_recording(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiResumeVideoRecordingResponse) response = camera_linux_camera_api_resume_video_recording_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "resumeVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_error_resume_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiResumeVideoRecordingResponse) response = camera_linux_camera_api_resume_video_recording_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "resumeVideoRecording", error->message); + } +} + +void camera_linux_camera_api_respond_set_flash_mode(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetFlashModeResponse) response = camera_linux_camera_api_set_flash_mode_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFlashMode", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_flash_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetFlashModeResponse) response = camera_linux_camera_api_set_flash_mode_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFlashMode", error->message); + } +} + +void camera_linux_camera_api_respond_set_exposure_mode(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetExposureModeResponse) response = camera_linux_camera_api_set_exposure_mode_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposureMode", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_exposure_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetExposureModeResponse) response = camera_linux_camera_api_set_exposure_mode_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposureMode", error->message); + } +} + +void camera_linux_camera_api_respond_set_exposure_point(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetExposurePointResponse) response = camera_linux_camera_api_set_exposure_point_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposurePoint", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_exposure_point(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetExposurePointResponse) response = camera_linux_camera_api_set_exposure_point_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposurePoint", error->message); + } +} + +void camera_linux_camera_api_respond_set_lens_position(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetLensPositionResponse) response = camera_linux_camera_api_set_lens_position_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setLensPosition", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_lens_position(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetLensPositionResponse) response = camera_linux_camera_api_set_lens_position_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setLensPosition", error->message); + } +} + +void camera_linux_camera_api_respond_get_min_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, double return_value) { + g_autoptr(CameraLinuxCameraApiGetMinExposureOffsetResponse) response = camera_linux_camera_api_get_min_exposure_offset_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMinExposureOffset", error->message); + } +} + +void camera_linux_camera_api_respond_error_get_min_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiGetMinExposureOffsetResponse) response = camera_linux_camera_api_get_min_exposure_offset_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMinExposureOffset", error->message); + } +} + +void camera_linux_camera_api_respond_get_max_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, double return_value) { + g_autoptr(CameraLinuxCameraApiGetMaxExposureOffsetResponse) response = camera_linux_camera_api_get_max_exposure_offset_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMaxExposureOffset", error->message); + } +} + +void camera_linux_camera_api_respond_error_get_max_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiGetMaxExposureOffsetResponse) response = camera_linux_camera_api_get_max_exposure_offset_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMaxExposureOffset", error->message); + } +} + +void camera_linux_camera_api_respond_set_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetExposureOffsetResponse) response = camera_linux_camera_api_set_exposure_offset_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposureOffset", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetExposureOffsetResponse) response = camera_linux_camera_api_set_exposure_offset_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposureOffset", error->message); + } +} + +void camera_linux_camera_api_respond_set_focus_mode(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetFocusModeResponse) response = camera_linux_camera_api_set_focus_mode_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFocusMode", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_focus_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetFocusModeResponse) response = camera_linux_camera_api_set_focus_mode_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFocusMode", error->message); + } +} + +void camera_linux_camera_api_respond_set_focus_point(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetFocusPointResponse) response = camera_linux_camera_api_set_focus_point_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFocusPoint", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_focus_point(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetFocusPointResponse) response = camera_linux_camera_api_set_focus_point_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFocusPoint", error->message); + } +} + +void camera_linux_camera_api_respond_get_min_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, double return_value) { + g_autoptr(CameraLinuxCameraApiGetMinZoomLevelResponse) response = camera_linux_camera_api_get_min_zoom_level_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMinZoomLevel", error->message); + } +} + +void camera_linux_camera_api_respond_error_get_min_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiGetMinZoomLevelResponse) response = camera_linux_camera_api_get_min_zoom_level_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMinZoomLevel", error->message); + } +} + +void camera_linux_camera_api_respond_get_max_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, double return_value) { + g_autoptr(CameraLinuxCameraApiGetMaxZoomLevelResponse) response = camera_linux_camera_api_get_max_zoom_level_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMaxZoomLevel", error->message); + } +} + +void camera_linux_camera_api_respond_error_get_max_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiGetMaxZoomLevelResponse) response = camera_linux_camera_api_get_max_zoom_level_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMaxZoomLevel", error->message); + } +} + +void camera_linux_camera_api_respond_set_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetZoomLevelResponse) response = camera_linux_camera_api_set_zoom_level_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setZoomLevel", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetZoomLevelResponse) response = camera_linux_camera_api_set_zoom_level_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setZoomLevel", error->message); + } +} + +void camera_linux_camera_api_respond_pause_preview(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiPausePreviewResponse) response = camera_linux_camera_api_pause_preview_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "pausePreview", error->message); + } +} + +void camera_linux_camera_api_respond_error_pause_preview(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiPausePreviewResponse) response = camera_linux_camera_api_pause_preview_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "pausePreview", error->message); + } +} + +void camera_linux_camera_api_respond_resume_preview(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiResumePreviewResponse) response = camera_linux_camera_api_resume_preview_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "resumePreview", error->message); + } +} + +void camera_linux_camera_api_respond_error_resume_preview(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiResumePreviewResponse) response = camera_linux_camera_api_resume_preview_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "resumePreview", error->message); + } +} + +void camera_linux_camera_api_respond_update_description_while_recording(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse) response = camera_linux_camera_api_update_description_while_recording_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "updateDescriptionWhileRecording", error->message); + } +} + +void camera_linux_camera_api_respond_error_update_description_while_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse) response = camera_linux_camera_api_update_description_while_recording_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "updateDescriptionWhileRecording", error->message); + } +} + +void camera_linux_camera_api_respond_set_image_file_format(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetImageFileFormatResponse) response = camera_linux_camera_api_set_image_file_format_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setImageFileFormat", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_image_file_format(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetImageFileFormatResponse) response = camera_linux_camera_api_set_image_file_format_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setImageFileFormat", error->message); + } +} + +struct _CameraLinuxCameraGlobalEventApi { + GObject parent_instance; + + FlBinaryMessenger* messenger; + gchar *suffix; +}; + +G_DEFINE_TYPE(CameraLinuxCameraGlobalEventApi, camera_linux_camera_global_event_api, G_TYPE_OBJECT) + +static void camera_linux_camera_global_event_api_dispose(GObject* object) { + CameraLinuxCameraGlobalEventApi* self = CAMERA_LINUX_CAMERA_GLOBAL_EVENT_API(object); + g_clear_object(&self->messenger); + g_clear_pointer(&self->suffix, g_free); + G_OBJECT_CLASS(camera_linux_camera_global_event_api_parent_class)->dispose(object); +} + +static void camera_linux_camera_global_event_api_init(CameraLinuxCameraGlobalEventApi* self) { +} + +static void camera_linux_camera_global_event_api_class_init(CameraLinuxCameraGlobalEventApiClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_global_event_api_dispose; +} + +CameraLinuxCameraGlobalEventApi* camera_linux_camera_global_event_api_new(FlBinaryMessenger* messenger, const gchar* suffix) { + CameraLinuxCameraGlobalEventApi* self = CAMERA_LINUX_CAMERA_GLOBAL_EVENT_API(g_object_new(camera_linux_camera_global_event_api_get_type(), nullptr)); + self->messenger = FL_BINARY_MESSENGER(g_object_ref(messenger)); + self->suffix = suffix != nullptr ? g_strdup_printf(".%s", suffix) : g_strdup(""); + return self; +} + +struct _CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse { + GObject parent_instance; + + FlValue* error; +}; + +G_DEFINE_TYPE(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse, camera_linux_camera_global_event_api_device_orientation_changed_response, G_TYPE_OBJECT) + +static void camera_linux_camera_global_event_api_device_orientation_changed_response_dispose(GObject* object) { + CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self = CAMERA_LINUX_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(object); + g_clear_pointer(&self->error, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_global_event_api_device_orientation_changed_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_global_event_api_device_orientation_changed_response_init(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self) { +} + +static void camera_linux_camera_global_event_api_device_orientation_changed_response_class_init(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_global_event_api_device_orientation_changed_response_dispose; +} + +static CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* camera_linux_camera_global_event_api_device_orientation_changed_response_new(FlValue* response) { + CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self = CAMERA_LINUX_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(g_object_new(camera_linux_camera_global_event_api_device_orientation_changed_response_get_type(), nullptr)); + if (fl_value_get_length(response) > 1) { + self->error = fl_value_ref(response); + } + return self; +} + +gboolean camera_linux_camera_global_event_api_device_orientation_changed_response_is_error(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(self), FALSE); + return self->error != nullptr; +} + +const gchar* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_code(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_global_event_api_device_orientation_changed_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 0)); +} + +const gchar* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_message(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_global_event_api_device_orientation_changed_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 1)); +} + +FlValue* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_details(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_global_event_api_device_orientation_changed_response_is_error(self)); + return fl_value_get_list_value(self->error, 2); +} + +static void camera_linux_camera_global_event_api_device_orientation_changed_cb(GObject* object, GAsyncResult* result, gpointer user_data) { + GTask* task = G_TASK(user_data); + g_task_return_pointer(task, result, g_object_unref); +} + +void camera_linux_camera_global_event_api_device_orientation_changed(CameraLinuxCameraGlobalEventApi* self, CameraLinuxPlatformDeviceOrientation orientation, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data) { + g_autoptr(FlValue) args = fl_value_new_list(); + fl_value_append_take(args, fl_value_new_custom(130, fl_value_new_int(orientation), (GDestroyNotify)fl_value_unref)); + g_autofree gchar* channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraGlobalEventApi.deviceOrientationChanged%s", self->suffix); + g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); + FlBasicMessageChannel* channel = fl_basic_message_channel_new(self->messenger, channel_name, FL_MESSAGE_CODEC(codec)); + GTask* task = g_task_new(self, cancellable, callback, user_data); + g_task_set_task_data(task, channel, g_object_unref); + fl_basic_message_channel_send(channel, args, cancellable, camera_linux_camera_global_event_api_device_orientation_changed_cb, task); +} + +CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* camera_linux_camera_global_event_api_device_orientation_changed_finish(CameraLinuxCameraGlobalEventApi* self, GAsyncResult* result, GError** error) { + g_autoptr(GTask) task = G_TASK(result); + GAsyncResult* r = G_ASYNC_RESULT(g_task_propagate_pointer(task, nullptr)); + FlBasicMessageChannel* channel = FL_BASIC_MESSAGE_CHANNEL(g_task_get_task_data(task)); + g_autoptr(FlValue) response = fl_basic_message_channel_send_finish(channel, r, error); + if (response == nullptr) { + return nullptr; + } + return camera_linux_camera_global_event_api_device_orientation_changed_response_new(response); +} + +struct _CameraLinuxCameraEventApi { + GObject parent_instance; + + FlBinaryMessenger* messenger; + gchar *suffix; +}; + +G_DEFINE_TYPE(CameraLinuxCameraEventApi, camera_linux_camera_event_api, G_TYPE_OBJECT) + +static void camera_linux_camera_event_api_dispose(GObject* object) { + CameraLinuxCameraEventApi* self = CAMERA_LINUX_CAMERA_EVENT_API(object); + g_clear_object(&self->messenger); + g_clear_pointer(&self->suffix, g_free); + G_OBJECT_CLASS(camera_linux_camera_event_api_parent_class)->dispose(object); +} + +static void camera_linux_camera_event_api_init(CameraLinuxCameraEventApi* self) { +} + +static void camera_linux_camera_event_api_class_init(CameraLinuxCameraEventApiClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_event_api_dispose; +} + +CameraLinuxCameraEventApi* camera_linux_camera_event_api_new(FlBinaryMessenger* messenger, const gchar* suffix) { + CameraLinuxCameraEventApi* self = CAMERA_LINUX_CAMERA_EVENT_API(g_object_new(camera_linux_camera_event_api_get_type(), nullptr)); + self->messenger = FL_BINARY_MESSENGER(g_object_ref(messenger)); + self->suffix = suffix != nullptr ? g_strdup_printf(".%s", suffix) : g_strdup(""); + return self; +} + +struct _CameraLinuxCameraEventApiInitializedResponse { + GObject parent_instance; + + FlValue* error; +}; + +G_DEFINE_TYPE(CameraLinuxCameraEventApiInitializedResponse, camera_linux_camera_event_api_initialized_response, G_TYPE_OBJECT) + +static void camera_linux_camera_event_api_initialized_response_dispose(GObject* object) { + CameraLinuxCameraEventApiInitializedResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_INITIALIZED_RESPONSE(object); + g_clear_pointer(&self->error, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_event_api_initialized_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_event_api_initialized_response_init(CameraLinuxCameraEventApiInitializedResponse* self) { +} + +static void camera_linux_camera_event_api_initialized_response_class_init(CameraLinuxCameraEventApiInitializedResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_event_api_initialized_response_dispose; +} + +static CameraLinuxCameraEventApiInitializedResponse* camera_linux_camera_event_api_initialized_response_new(FlValue* response) { + CameraLinuxCameraEventApiInitializedResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_INITIALIZED_RESPONSE(g_object_new(camera_linux_camera_event_api_initialized_response_get_type(), nullptr)); + if (fl_value_get_length(response) > 1) { + self->error = fl_value_ref(response); + } + return self; +} + +gboolean camera_linux_camera_event_api_initialized_response_is_error(CameraLinuxCameraEventApiInitializedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_INITIALIZED_RESPONSE(self), FALSE); + return self->error != nullptr; +} + +const gchar* camera_linux_camera_event_api_initialized_response_get_error_code(CameraLinuxCameraEventApiInitializedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_INITIALIZED_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_initialized_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 0)); +} + +const gchar* camera_linux_camera_event_api_initialized_response_get_error_message(CameraLinuxCameraEventApiInitializedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_INITIALIZED_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_initialized_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 1)); +} + +FlValue* camera_linux_camera_event_api_initialized_response_get_error_details(CameraLinuxCameraEventApiInitializedResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_INITIALIZED_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_initialized_response_is_error(self)); + return fl_value_get_list_value(self->error, 2); +} + +static void camera_linux_camera_event_api_initialized_cb(GObject* object, GAsyncResult* result, gpointer user_data) { + GTask* task = G_TASK(user_data); + g_task_return_pointer(task, result, g_object_unref); +} + +void camera_linux_camera_event_api_initialized(CameraLinuxCameraEventApi* self, CameraLinuxPlatformCameraState* initial_state, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data) { + g_autoptr(FlValue) args = fl_value_new_list(); + fl_value_append_take(args, fl_value_new_custom_object(139, G_OBJECT(initial_state))); + g_autofree gchar* channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraEventApi.initialized%s", self->suffix); + g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); + FlBasicMessageChannel* channel = fl_basic_message_channel_new(self->messenger, channel_name, FL_MESSAGE_CODEC(codec)); + GTask* task = g_task_new(self, cancellable, callback, user_data); + g_task_set_task_data(task, channel, g_object_unref); + fl_basic_message_channel_send(channel, args, cancellable, camera_linux_camera_event_api_initialized_cb, task); +} + +CameraLinuxCameraEventApiInitializedResponse* camera_linux_camera_event_api_initialized_finish(CameraLinuxCameraEventApi* self, GAsyncResult* result, GError** error) { + g_autoptr(GTask) task = G_TASK(result); + GAsyncResult* r = G_ASYNC_RESULT(g_task_propagate_pointer(task, nullptr)); + FlBasicMessageChannel* channel = FL_BASIC_MESSAGE_CHANNEL(g_task_get_task_data(task)); + g_autoptr(FlValue) response = fl_basic_message_channel_send_finish(channel, r, error); + if (response == nullptr) { + return nullptr; + } + return camera_linux_camera_event_api_initialized_response_new(response); +} + +struct _CameraLinuxCameraEventApiErrorResponse { + GObject parent_instance; + + FlValue* error; +}; + +G_DEFINE_TYPE(CameraLinuxCameraEventApiErrorResponse, camera_linux_camera_event_api_error_response, G_TYPE_OBJECT) + +static void camera_linux_camera_event_api_error_response_dispose(GObject* object) { + CameraLinuxCameraEventApiErrorResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_ERROR_RESPONSE(object); + g_clear_pointer(&self->error, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_event_api_error_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_event_api_error_response_init(CameraLinuxCameraEventApiErrorResponse* self) { +} + +static void camera_linux_camera_event_api_error_response_class_init(CameraLinuxCameraEventApiErrorResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_event_api_error_response_dispose; +} + +static CameraLinuxCameraEventApiErrorResponse* camera_linux_camera_event_api_error_response_new(FlValue* response) { + CameraLinuxCameraEventApiErrorResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_ERROR_RESPONSE(g_object_new(camera_linux_camera_event_api_error_response_get_type(), nullptr)); + if (fl_value_get_length(response) > 1) { + self->error = fl_value_ref(response); + } + return self; +} + +gboolean camera_linux_camera_event_api_error_response_is_error(CameraLinuxCameraEventApiErrorResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_ERROR_RESPONSE(self), FALSE); + return self->error != nullptr; +} + +const gchar* camera_linux_camera_event_api_error_response_get_error_code(CameraLinuxCameraEventApiErrorResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_ERROR_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_error_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 0)); +} + +const gchar* camera_linux_camera_event_api_error_response_get_error_message(CameraLinuxCameraEventApiErrorResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_ERROR_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_error_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 1)); +} + +FlValue* camera_linux_camera_event_api_error_response_get_error_details(CameraLinuxCameraEventApiErrorResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_ERROR_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_error_response_is_error(self)); + return fl_value_get_list_value(self->error, 2); +} + +static void camera_linux_camera_event_api_error_cb(GObject* object, GAsyncResult* result, gpointer user_data) { + GTask* task = G_TASK(user_data); + g_task_return_pointer(task, result, g_object_unref); +} + +void camera_linux_camera_event_api_error(CameraLinuxCameraEventApi* self, const gchar* message, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data) { + g_autoptr(FlValue) args = fl_value_new_list(); + fl_value_append_take(args, fl_value_new_string(message)); + g_autofree gchar* channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraEventApi.error%s", self->suffix); + g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); + FlBasicMessageChannel* channel = fl_basic_message_channel_new(self->messenger, channel_name, FL_MESSAGE_CODEC(codec)); + GTask* task = g_task_new(self, cancellable, callback, user_data); + g_task_set_task_data(task, channel, g_object_unref); + fl_basic_message_channel_send(channel, args, cancellable, camera_linux_camera_event_api_error_cb, task); +} + +CameraLinuxCameraEventApiErrorResponse* camera_linux_camera_event_api_error_finish(CameraLinuxCameraEventApi* self, GAsyncResult* result, GError** error) { + g_autoptr(GTask) task = G_TASK(result); + GAsyncResult* r = G_ASYNC_RESULT(g_task_propagate_pointer(task, nullptr)); + FlBasicMessageChannel* channel = FL_BASIC_MESSAGE_CHANNEL(g_task_get_task_data(task)); + g_autoptr(FlValue) response = fl_basic_message_channel_send_finish(channel, r, error); + if (response == nullptr) { + return nullptr; + } + return camera_linux_camera_event_api_error_response_new(response); +} diff --git a/packages/camera/camera_linux/linux/messages.g.h b/packages/camera/camera_linux/linux/messages.g.h new file mode 100644 index 000000000000..0b30fca0b352 --- /dev/null +++ b/packages/camera/camera_linux/linux/messages.g.h @@ -0,0 +1,1288 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// Autogenerated from Pigeon (v22.7.4), do not edit directly. +// See also: https://pub.dev/packages/pigeon + +#ifndef PIGEON_MESSAGES_G_H_ +#define PIGEON_MESSAGES_G_H_ + +#include + +G_BEGIN_DECLS + +/** + * CameraLinuxPlatformCameraLensDirection: + * CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_FRONT: + * Front facing camera (a user looking at the screen is seen by the camera). + * CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_BACK: + * Back facing camera (a user looking at the screen is not seen by the camera). + * CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_EXTERNAL: + * External camera which may not be mounted to the device. + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_FRONT = 0, + CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_BACK = 1, + CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_EXTERNAL = 2 +} CameraLinuxPlatformCameraLensDirection; + +/** + * CameraLinuxPlatformDeviceOrientation: + * CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_PORTRAIT_UP: + * CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_LANDSCAPE_LEFT: + * CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_PORTRAIT_DOWN: + * CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_LANDSCAPE_RIGHT: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_PORTRAIT_UP = 0, + CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_LANDSCAPE_LEFT = 1, + CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_PORTRAIT_DOWN = 2, + CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_LANDSCAPE_RIGHT = 3 +} CameraLinuxPlatformDeviceOrientation; + +/** + * CameraLinuxPlatformExposureMode: + * CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_AUTO: + * CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_LOCKED: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_AUTO = 0, + CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_LOCKED = 1 +} CameraLinuxPlatformExposureMode; + +/** + * CameraLinuxPlatformFlashMode: + * CAMERA_LINUX_PLATFORM_FLASH_MODE_OFF: + * CAMERA_LINUX_PLATFORM_FLASH_MODE_AUTO: + * CAMERA_LINUX_PLATFORM_FLASH_MODE_ALWAYS: + * CAMERA_LINUX_PLATFORM_FLASH_MODE_TORCH: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_FLASH_MODE_OFF = 0, + CAMERA_LINUX_PLATFORM_FLASH_MODE_AUTO = 1, + CAMERA_LINUX_PLATFORM_FLASH_MODE_ALWAYS = 2, + CAMERA_LINUX_PLATFORM_FLASH_MODE_TORCH = 3 +} CameraLinuxPlatformFlashMode; + +/** + * CameraLinuxPlatformFocusMode: + * CAMERA_LINUX_PLATFORM_FOCUS_MODE_AUTO: + * CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_FOCUS_MODE_AUTO = 0, + CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED = 1 +} CameraLinuxPlatformFocusMode; + +/** + * CameraLinuxPlatformImageFileFormat: + * CAMERA_LINUX_PLATFORM_IMAGE_FILE_FORMAT_JPEG: + * CAMERA_LINUX_PLATFORM_IMAGE_FILE_FORMAT_HEIF: + * + * Pigeon version of ImageFileFormat. + */ +typedef enum { + CAMERA_LINUX_PLATFORM_IMAGE_FILE_FORMAT_JPEG = 0, + CAMERA_LINUX_PLATFORM_IMAGE_FILE_FORMAT_HEIF = 1 +} CameraLinuxPlatformImageFileFormat; + +/** + * CameraLinuxPlatformImageFormatGroup: + * CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_BGRA8888: + * CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_YUV420: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_BGRA8888 = 0, + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_YUV420 = 1 +} CameraLinuxPlatformImageFormatGroup; + +/** + * CameraLinuxPlatformResolutionPreset: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_LOW: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MEDIUM: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_HIGH: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_VERY_HIGH: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_ULTRA_HIGH: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MAX: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_LOW = 0, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MEDIUM = 1, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_HIGH = 2, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_VERY_HIGH = 3, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_ULTRA_HIGH = 4, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MAX = 5 +} CameraLinuxPlatformResolutionPreset; + +/** + * CameraLinuxPlatformSize: + * + */ + +G_DECLARE_FINAL_TYPE(CameraLinuxPlatformSize, camera_linux_platform_size, CAMERA_LINUX, PLATFORM_SIZE, GObject) + +/** + * camera_linux_platform_size_new: + * width: field in this object. + * height: field in this object. + * + * Creates a new #PlatformSize object. + * + * Returns: a new #CameraLinuxPlatformSize + */ +CameraLinuxPlatformSize* camera_linux_platform_size_new(double width, double height); + +/** + * camera_linux_platform_size_get_width + * @object: a #CameraLinuxPlatformSize. + * + * Gets the value of the width field of @object. + * + * Returns: the field value. + */ +double camera_linux_platform_size_get_width(CameraLinuxPlatformSize* object); + +/** + * camera_linux_platform_size_get_height + * @object: a #CameraLinuxPlatformSize. + * + * Gets the value of the height field of @object. + * + * Returns: the field value. + */ +double camera_linux_platform_size_get_height(CameraLinuxPlatformSize* object); + +/** + * CameraLinuxPlatformCameraDescription: + * + */ + +G_DECLARE_FINAL_TYPE(CameraLinuxPlatformCameraDescription, camera_linux_platform_camera_description, CAMERA_LINUX, PLATFORM_CAMERA_DESCRIPTION, GObject) + +/** + * camera_linux_platform_camera_description_new: + * name: field in this object. + * lens_direction: field in this object. + * + * Creates a new #PlatformCameraDescription object. + * + * Returns: a new #CameraLinuxPlatformCameraDescription + */ +CameraLinuxPlatformCameraDescription* camera_linux_platform_camera_description_new(const gchar* name, CameraLinuxPlatformCameraLensDirection lens_direction); + +/** + * camera_linux_platform_camera_description_get_name + * @object: a #CameraLinuxPlatformCameraDescription. + * + * The name of the camera device. + * + * Returns: the field value. + */ +const gchar* camera_linux_platform_camera_description_get_name(CameraLinuxPlatformCameraDescription* object); + +/** + * camera_linux_platform_camera_description_get_lens_direction + * @object: a #CameraLinuxPlatformCameraDescription. + * + * The direction the camera is facing. + * + * Returns: the field value. + */ +CameraLinuxPlatformCameraLensDirection camera_linux_platform_camera_description_get_lens_direction(CameraLinuxPlatformCameraDescription* object); + +/** + * CameraLinuxPlatformCameraState: + * + */ + +G_DECLARE_FINAL_TYPE(CameraLinuxPlatformCameraState, camera_linux_platform_camera_state, CAMERA_LINUX, PLATFORM_CAMERA_STATE, GObject) + +/** + * camera_linux_platform_camera_state_new: + * preview_size: field in this object. + * exposure_mode: field in this object. + * focus_mode: field in this object. + * exposure_point_supported: field in this object. + * focus_point_supported: field in this object. + * + * Creates a new #PlatformCameraState object. + * + * Returns: a new #CameraLinuxPlatformCameraState + */ +CameraLinuxPlatformCameraState* camera_linux_platform_camera_state_new(CameraLinuxPlatformSize* preview_size, CameraLinuxPlatformExposureMode exposure_mode, CameraLinuxPlatformFocusMode focus_mode, gboolean exposure_point_supported, gboolean focus_point_supported); + +/** + * camera_linux_platform_camera_state_get_preview_size + * @object: a #CameraLinuxPlatformCameraState. + * + * The size of the preview, in pixels. + * + * Returns: the field value. + */ +CameraLinuxPlatformSize* camera_linux_platform_camera_state_get_preview_size(CameraLinuxPlatformCameraState* object); + +/** + * camera_linux_platform_camera_state_get_exposure_mode + * @object: a #CameraLinuxPlatformCameraState. + * + * The default exposure mode + * + * Returns: the field value. + */ +CameraLinuxPlatformExposureMode camera_linux_platform_camera_state_get_exposure_mode(CameraLinuxPlatformCameraState* object); + +/** + * camera_linux_platform_camera_state_get_focus_mode + * @object: a #CameraLinuxPlatformCameraState. + * + * The default focus mode + * + * Returns: the field value. + */ +CameraLinuxPlatformFocusMode camera_linux_platform_camera_state_get_focus_mode(CameraLinuxPlatformCameraState* object); + +/** + * camera_linux_platform_camera_state_get_exposure_point_supported + * @object: a #CameraLinuxPlatformCameraState. + * + * Whether setting exposure points is supported. + * + * Returns: the field value. + */ +gboolean camera_linux_platform_camera_state_get_exposure_point_supported(CameraLinuxPlatformCameraState* object); + +/** + * camera_linux_platform_camera_state_get_focus_point_supported + * @object: a #CameraLinuxPlatformCameraState. + * + * Whether setting focus points is supported. + * + * Returns: the field value. + */ +gboolean camera_linux_platform_camera_state_get_focus_point_supported(CameraLinuxPlatformCameraState* object); + +/** + * CameraLinuxPlatformMediaSettings: + * + */ + +G_DECLARE_FINAL_TYPE(CameraLinuxPlatformMediaSettings, camera_linux_platform_media_settings, CAMERA_LINUX, PLATFORM_MEDIA_SETTINGS, GObject) + +/** + * camera_linux_platform_media_settings_new: + * resolution_preset: field in this object. + * frames_per_second: field in this object. + * video_bitrate: field in this object. + * audio_bitrate: field in this object. + * enable_audio: field in this object. + * + * Creates a new #PlatformMediaSettings object. + * + * Returns: a new #CameraLinuxPlatformMediaSettings + */ +CameraLinuxPlatformMediaSettings* camera_linux_platform_media_settings_new(CameraLinuxPlatformResolutionPreset resolution_preset, int64_t* frames_per_second, int64_t* video_bitrate, int64_t* audio_bitrate, gboolean enable_audio); + +/** + * camera_linux_platform_media_settings_get_resolution_preset + * @object: a #CameraLinuxPlatformMediaSettings. + * + * Gets the value of the resolutionPreset field of @object. + * + * Returns: the field value. + */ +CameraLinuxPlatformResolutionPreset camera_linux_platform_media_settings_get_resolution_preset(CameraLinuxPlatformMediaSettings* object); + +/** + * camera_linux_platform_media_settings_get_frames_per_second + * @object: a #CameraLinuxPlatformMediaSettings. + * + * Gets the value of the framesPerSecond field of @object. + * + * Returns: the field value. + */ +int64_t* camera_linux_platform_media_settings_get_frames_per_second(CameraLinuxPlatformMediaSettings* object); + +/** + * camera_linux_platform_media_settings_get_video_bitrate + * @object: a #CameraLinuxPlatformMediaSettings. + * + * Gets the value of the videoBitrate field of @object. + * + * Returns: the field value. + */ +int64_t* camera_linux_platform_media_settings_get_video_bitrate(CameraLinuxPlatformMediaSettings* object); + +/** + * camera_linux_platform_media_settings_get_audio_bitrate + * @object: a #CameraLinuxPlatformMediaSettings. + * + * Gets the value of the audioBitrate field of @object. + * + * Returns: the field value. + */ +int64_t* camera_linux_platform_media_settings_get_audio_bitrate(CameraLinuxPlatformMediaSettings* object); + +/** + * camera_linux_platform_media_settings_get_enable_audio + * @object: a #CameraLinuxPlatformMediaSettings. + * + * Gets the value of the enableAudio field of @object. + * + * Returns: the field value. + */ +gboolean camera_linux_platform_media_settings_get_enable_audio(CameraLinuxPlatformMediaSettings* object); + +/** + * CameraLinuxPlatformPoint: + * + */ + +G_DECLARE_FINAL_TYPE(CameraLinuxPlatformPoint, camera_linux_platform_point, CAMERA_LINUX, PLATFORM_POINT, GObject) + +/** + * camera_linux_platform_point_new: + * x: field in this object. + * y: field in this object. + * + * Creates a new #PlatformPoint object. + * + * Returns: a new #CameraLinuxPlatformPoint + */ +CameraLinuxPlatformPoint* camera_linux_platform_point_new(double x, double y); + +/** + * camera_linux_platform_point_get_x + * @object: a #CameraLinuxPlatformPoint. + * + * Gets the value of the x field of @object. + * + * Returns: the field value. + */ +double camera_linux_platform_point_get_x(CameraLinuxPlatformPoint* object); + +/** + * camera_linux_platform_point_get_y + * @object: a #CameraLinuxPlatformPoint. + * + * Gets the value of the y field of @object. + * + * Returns: the field value. + */ +double camera_linux_platform_point_get_y(CameraLinuxPlatformPoint* object); + +G_DECLARE_FINAL_TYPE(CameraLinuxMessageCodec, camera_linux_message_codec, CAMERA_LINUX, MESSAGE_CODEC, FlStandardMessageCodec) + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApi, camera_linux_camera_api, CAMERA_LINUX, CAMERA_API, GObject) + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiResponseHandle, camera_linux_camera_api_response_handle, CAMERA_LINUX, CAMERA_API_RESPONSE_HANDLE, GObject) + +/** + * CameraLinuxCameraApiVTable: + * + * Table of functions exposed by CameraApi to be implemented by the API provider. + */ +typedef struct { + void (*get_available_cameras)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*create)(const gchar* camera_name, CameraLinuxPlatformMediaSettings* settings, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*initialize)(int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*start_image_stream)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*stop_image_stream)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*received_image_stream_data)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*dispose)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*lock_capture_orientation)(CameraLinuxPlatformDeviceOrientation orientation, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*unlock_capture_orientation)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*take_picture)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*prepare_for_video_recording)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*start_video_recording)(gboolean enable_stream, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*stop_video_recording)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*pause_video_recording)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*resume_video_recording)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_flash_mode)(CameraLinuxPlatformFlashMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_exposure_mode)(CameraLinuxPlatformExposureMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_exposure_point)(CameraLinuxPlatformPoint* point, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_lens_position)(double position, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*get_min_exposure_offset)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*get_max_exposure_offset)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_exposure_offset)(double offset, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_focus_mode)(CameraLinuxPlatformFocusMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_focus_point)(CameraLinuxPlatformPoint* point, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*get_min_zoom_level)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*get_max_zoom_level)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_zoom_level)(double zoom, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*pause_preview)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*resume_preview)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*update_description_while_recording)(const gchar* camera_name, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_image_file_format)(CameraLinuxPlatformImageFileFormat format, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); +} CameraLinuxCameraApiVTable; + +/** + * camera_linux_camera_api_set_method_handlers: + * + * @messenger: an #FlBinaryMessenger. + * @suffix: (allow-none): a suffix to add to the API or %NULL for none. + * @vtable: implementations of the methods in this API. + * @user_data: (closure): user data to pass to the functions in @vtable. + * @user_data_free_func: (allow-none): a function which gets called to free @user_data, or %NULL. + * + * Connects the method handlers in the CameraApi API. + */ +void camera_linux_camera_api_set_method_handlers(FlBinaryMessenger* messenger, const gchar* suffix, const CameraLinuxCameraApiVTable* vtable, gpointer user_data, GDestroyNotify user_data_free_func); + +/** + * camera_linux_camera_api_clear_method_handlers: + * + * @messenger: an #FlBinaryMessenger. + * @suffix: (allow-none): a suffix to add to the API or %NULL for none. + * + * Clears the method handlers in the CameraApi API. + */ +void camera_linux_camera_api_clear_method_handlers(FlBinaryMessenger* messenger, const gchar* suffix); + +/** + * camera_linux_camera_api_respond_get_available_cameras: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.getAvailableCameras. + */ +void camera_linux_camera_api_respond_get_available_cameras(CameraLinuxCameraApiResponseHandle* response_handle, FlValue* return_value); + +/** + * camera_linux_camera_api_respond_error_get_available_cameras: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.getAvailableCameras. + */ +void camera_linux_camera_api_respond_error_get_available_cameras(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_create: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.create. + */ +void camera_linux_camera_api_respond_create(CameraLinuxCameraApiResponseHandle* response_handle, int64_t return_value); + +/** + * camera_linux_camera_api_respond_error_create: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.create. + */ +void camera_linux_camera_api_respond_error_create(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_initialize: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.initialize. + */ +void camera_linux_camera_api_respond_initialize(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_initialize: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.initialize. + */ +void camera_linux_camera_api_respond_error_initialize(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_start_image_stream: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.startImageStream. + */ +void camera_linux_camera_api_respond_start_image_stream(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_start_image_stream: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.startImageStream. + */ +void camera_linux_camera_api_respond_error_start_image_stream(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_stop_image_stream: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.stopImageStream. + */ +void camera_linux_camera_api_respond_stop_image_stream(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_stop_image_stream: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.stopImageStream. + */ +void camera_linux_camera_api_respond_error_stop_image_stream(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_received_image_stream_data: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.receivedImageStreamData. + */ +void camera_linux_camera_api_respond_received_image_stream_data(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_received_image_stream_data: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.receivedImageStreamData. + */ +void camera_linux_camera_api_respond_error_received_image_stream_data(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_dispose: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.dispose. + */ +void camera_linux_camera_api_respond_dispose(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_dispose: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.dispose. + */ +void camera_linux_camera_api_respond_error_dispose(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_lock_capture_orientation: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.lockCaptureOrientation. + */ +void camera_linux_camera_api_respond_lock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_lock_capture_orientation: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.lockCaptureOrientation. + */ +void camera_linux_camera_api_respond_error_lock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_unlock_capture_orientation: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.unlockCaptureOrientation. + */ +void camera_linux_camera_api_respond_unlock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_unlock_capture_orientation: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.unlockCaptureOrientation. + */ +void camera_linux_camera_api_respond_error_unlock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_take_picture: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.takePicture. + */ +void camera_linux_camera_api_respond_take_picture(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* return_value); + +/** + * camera_linux_camera_api_respond_error_take_picture: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.takePicture. + */ +void camera_linux_camera_api_respond_error_take_picture(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_prepare_for_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.prepareForVideoRecording. + */ +void camera_linux_camera_api_respond_prepare_for_video_recording(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_prepare_for_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.prepareForVideoRecording. + */ +void camera_linux_camera_api_respond_error_prepare_for_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_start_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.startVideoRecording. + */ +void camera_linux_camera_api_respond_start_video_recording(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_start_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.startVideoRecording. + */ +void camera_linux_camera_api_respond_error_start_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_stop_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.stopVideoRecording. + */ +void camera_linux_camera_api_respond_stop_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* return_value); + +/** + * camera_linux_camera_api_respond_error_stop_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.stopVideoRecording. + */ +void camera_linux_camera_api_respond_error_stop_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_pause_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.pauseVideoRecording. + */ +void camera_linux_camera_api_respond_pause_video_recording(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_pause_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.pauseVideoRecording. + */ +void camera_linux_camera_api_respond_error_pause_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_resume_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.resumeVideoRecording. + */ +void camera_linux_camera_api_respond_resume_video_recording(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_resume_video_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.resumeVideoRecording. + */ +void camera_linux_camera_api_respond_error_resume_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_flash_mode: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setFlashMode. + */ +void camera_linux_camera_api_respond_set_flash_mode(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_flash_mode: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setFlashMode. + */ +void camera_linux_camera_api_respond_error_set_flash_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_exposure_mode: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setExposureMode. + */ +void camera_linux_camera_api_respond_set_exposure_mode(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_exposure_mode: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setExposureMode. + */ +void camera_linux_camera_api_respond_error_set_exposure_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_exposure_point: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setExposurePoint. + */ +void camera_linux_camera_api_respond_set_exposure_point(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_exposure_point: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setExposurePoint. + */ +void camera_linux_camera_api_respond_error_set_exposure_point(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_lens_position: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setLensPosition. + */ +void camera_linux_camera_api_respond_set_lens_position(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_lens_position: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setLensPosition. + */ +void camera_linux_camera_api_respond_error_set_lens_position(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_get_min_exposure_offset: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.getMinExposureOffset. + */ +void camera_linux_camera_api_respond_get_min_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, double return_value); + +/** + * camera_linux_camera_api_respond_error_get_min_exposure_offset: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.getMinExposureOffset. + */ +void camera_linux_camera_api_respond_error_get_min_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_get_max_exposure_offset: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.getMaxExposureOffset. + */ +void camera_linux_camera_api_respond_get_max_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, double return_value); + +/** + * camera_linux_camera_api_respond_error_get_max_exposure_offset: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.getMaxExposureOffset. + */ +void camera_linux_camera_api_respond_error_get_max_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_exposure_offset: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setExposureOffset. + */ +void camera_linux_camera_api_respond_set_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_exposure_offset: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setExposureOffset. + */ +void camera_linux_camera_api_respond_error_set_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_focus_mode: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setFocusMode. + */ +void camera_linux_camera_api_respond_set_focus_mode(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_focus_mode: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setFocusMode. + */ +void camera_linux_camera_api_respond_error_set_focus_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_focus_point: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setFocusPoint. + */ +void camera_linux_camera_api_respond_set_focus_point(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_focus_point: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setFocusPoint. + */ +void camera_linux_camera_api_respond_error_set_focus_point(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_get_min_zoom_level: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.getMinZoomLevel. + */ +void camera_linux_camera_api_respond_get_min_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, double return_value); + +/** + * camera_linux_camera_api_respond_error_get_min_zoom_level: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.getMinZoomLevel. + */ +void camera_linux_camera_api_respond_error_get_min_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_get_max_zoom_level: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.getMaxZoomLevel. + */ +void camera_linux_camera_api_respond_get_max_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, double return_value); + +/** + * camera_linux_camera_api_respond_error_get_max_zoom_level: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.getMaxZoomLevel. + */ +void camera_linux_camera_api_respond_error_get_max_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_zoom_level: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setZoomLevel. + */ +void camera_linux_camera_api_respond_set_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_zoom_level: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setZoomLevel. + */ +void camera_linux_camera_api_respond_error_set_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_pause_preview: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.pausePreview. + */ +void camera_linux_camera_api_respond_pause_preview(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_pause_preview: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.pausePreview. + */ +void camera_linux_camera_api_respond_error_pause_preview(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_resume_preview: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.resumePreview. + */ +void camera_linux_camera_api_respond_resume_preview(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_resume_preview: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.resumePreview. + */ +void camera_linux_camera_api_respond_error_resume_preview(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_update_description_while_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.updateDescriptionWhileRecording. + */ +void camera_linux_camera_api_respond_update_description_while_recording(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_update_description_while_recording: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.updateDescriptionWhileRecording. + */ +void camera_linux_camera_api_respond_error_update_description_while_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +/** + * camera_linux_camera_api_respond_set_image_file_format: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setImageFileFormat. + */ +void camera_linux_camera_api_respond_set_image_file_format(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_image_file_format: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setImageFileFormat. + */ +void camera_linux_camera_api_respond_error_set_image_file_format(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse, camera_linux_camera_global_event_api_device_orientation_changed_response, CAMERA_LINUX, CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE, GObject) + +/** + * camera_linux_camera_global_event_api_device_orientation_changed_response_is_error: + * @response: a #CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse. + * + * Checks if a response to CameraGlobalEventApi.deviceOrientationChanged is an error. + * + * Returns: a %TRUE if this response is an error. + */ +gboolean camera_linux_camera_global_event_api_device_orientation_changed_response_is_error(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* response); + +/** + * camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_code: + * @response: a #CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse. + * + * Get the error code for this response. + * + * Returns: an error code or %NULL if not an error. + */ +const gchar* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_code(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* response); + +/** + * camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_message: + * @response: a #CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse. + * + * Get the error message for this response. + * + * Returns: an error message. + */ +const gchar* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_message(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* response); + +/** + * camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_details: + * @response: a #CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse. + * + * Get the error details for this response. + * + * Returns: (allow-none): an error details or %NULL. + */ +FlValue* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_details(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* response); + +/** + * CameraLinuxCameraGlobalEventApi: + * + * Handler for native callbacks that are not tied to a specific camera ID. + */ + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraGlobalEventApi, camera_linux_camera_global_event_api, CAMERA_LINUX, CAMERA_GLOBAL_EVENT_API, GObject) + +/** + * camera_linux_camera_global_event_api_new: + * @messenger: an #FlBinaryMessenger. + * @suffix: (allow-none): a suffix to add to the API or %NULL for none. + * + * Creates a new object to access the CameraGlobalEventApi API. + * + * Returns: a new #CameraLinuxCameraGlobalEventApi + */ +CameraLinuxCameraGlobalEventApi* camera_linux_camera_global_event_api_new(FlBinaryMessenger* messenger, const gchar* suffix); + +/** + * camera_linux_camera_global_event_api_device_orientation_changed: + * @api: a #CameraLinuxCameraGlobalEventApi. + * @orientation: parameter for this method. + * @cancellable: (allow-none): a #GCancellable or %NULL. + * @callback: (scope async): (allow-none): a #GAsyncReadyCallback to call when the call is complete or %NULL to ignore the response. + * @user_data: (closure): user data to pass to @callback. + * + * Called when the device's physical orientation changes. + */ +void camera_linux_camera_global_event_api_device_orientation_changed(CameraLinuxCameraGlobalEventApi* api, CameraLinuxPlatformDeviceOrientation orientation, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data); + +/** + * camera_linux_camera_global_event_api_device_orientation_changed_finish: + * @api: a #CameraLinuxCameraGlobalEventApi. + * @result: a #GAsyncResult. + * @error: (allow-none): #GError location to store the error occurring, or %NULL to ignore. + * + * Completes a camera_linux_camera_global_event_api_device_orientation_changed() call. + * + * Returns: a #CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse or %NULL on error. + */ +CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* camera_linux_camera_global_event_api_device_orientation_changed_finish(CameraLinuxCameraGlobalEventApi* api, GAsyncResult* result, GError** error); + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraEventApiInitializedResponse, camera_linux_camera_event_api_initialized_response, CAMERA_LINUX, CAMERA_EVENT_API_INITIALIZED_RESPONSE, GObject) + +/** + * camera_linux_camera_event_api_initialized_response_is_error: + * @response: a #CameraLinuxCameraEventApiInitializedResponse. + * + * Checks if a response to CameraEventApi.initialized is an error. + * + * Returns: a %TRUE if this response is an error. + */ +gboolean camera_linux_camera_event_api_initialized_response_is_error(CameraLinuxCameraEventApiInitializedResponse* response); + +/** + * camera_linux_camera_event_api_initialized_response_get_error_code: + * @response: a #CameraLinuxCameraEventApiInitializedResponse. + * + * Get the error code for this response. + * + * Returns: an error code or %NULL if not an error. + */ +const gchar* camera_linux_camera_event_api_initialized_response_get_error_code(CameraLinuxCameraEventApiInitializedResponse* response); + +/** + * camera_linux_camera_event_api_initialized_response_get_error_message: + * @response: a #CameraLinuxCameraEventApiInitializedResponse. + * + * Get the error message for this response. + * + * Returns: an error message. + */ +const gchar* camera_linux_camera_event_api_initialized_response_get_error_message(CameraLinuxCameraEventApiInitializedResponse* response); + +/** + * camera_linux_camera_event_api_initialized_response_get_error_details: + * @response: a #CameraLinuxCameraEventApiInitializedResponse. + * + * Get the error details for this response. + * + * Returns: (allow-none): an error details or %NULL. + */ +FlValue* camera_linux_camera_event_api_initialized_response_get_error_details(CameraLinuxCameraEventApiInitializedResponse* response); + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraEventApiErrorResponse, camera_linux_camera_event_api_error_response, CAMERA_LINUX, CAMERA_EVENT_API_ERROR_RESPONSE, GObject) + +/** + * camera_linux_camera_event_api_error_response_is_error: + * @response: a #CameraLinuxCameraEventApiErrorResponse. + * + * Checks if a response to CameraEventApi.error is an error. + * + * Returns: a %TRUE if this response is an error. + */ +gboolean camera_linux_camera_event_api_error_response_is_error(CameraLinuxCameraEventApiErrorResponse* response); + +/** + * camera_linux_camera_event_api_error_response_get_error_code: + * @response: a #CameraLinuxCameraEventApiErrorResponse. + * + * Get the error code for this response. + * + * Returns: an error code or %NULL if not an error. + */ +const gchar* camera_linux_camera_event_api_error_response_get_error_code(CameraLinuxCameraEventApiErrorResponse* response); + +/** + * camera_linux_camera_event_api_error_response_get_error_message: + * @response: a #CameraLinuxCameraEventApiErrorResponse. + * + * Get the error message for this response. + * + * Returns: an error message. + */ +const gchar* camera_linux_camera_event_api_error_response_get_error_message(CameraLinuxCameraEventApiErrorResponse* response); + +/** + * camera_linux_camera_event_api_error_response_get_error_details: + * @response: a #CameraLinuxCameraEventApiErrorResponse. + * + * Get the error details for this response. + * + * Returns: (allow-none): an error details or %NULL. + */ +FlValue* camera_linux_camera_event_api_error_response_get_error_details(CameraLinuxCameraEventApiErrorResponse* response); + +/** + * CameraLinuxCameraEventApi: + * + * Handler for native callbacks that are tied to a specific camera ID. + * + * This is intended to be initialized with the camera ID as a suffix. + */ + +G_DECLARE_FINAL_TYPE(CameraLinuxCameraEventApi, camera_linux_camera_event_api, CAMERA_LINUX, CAMERA_EVENT_API, GObject) + +/** + * camera_linux_camera_event_api_new: + * @messenger: an #FlBinaryMessenger. + * @suffix: (allow-none): a suffix to add to the API or %NULL for none. + * + * Creates a new object to access the CameraEventApi API. + * + * Returns: a new #CameraLinuxCameraEventApi + */ +CameraLinuxCameraEventApi* camera_linux_camera_event_api_new(FlBinaryMessenger* messenger, const gchar* suffix); + +/** + * camera_linux_camera_event_api_initialized: + * @api: a #CameraLinuxCameraEventApi. + * @initial_state: parameter for this method. + * @cancellable: (allow-none): a #GCancellable or %NULL. + * @callback: (scope async): (allow-none): a #GAsyncReadyCallback to call when the call is complete or %NULL to ignore the response. + * @user_data: (closure): user data to pass to @callback. + * + * Called when the camera is inialitized for use. + */ +void camera_linux_camera_event_api_initialized(CameraLinuxCameraEventApi* api, CameraLinuxPlatformCameraState* initial_state, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data); + +/** + * camera_linux_camera_event_api_initialized_finish: + * @api: a #CameraLinuxCameraEventApi. + * @result: a #GAsyncResult. + * @error: (allow-none): #GError location to store the error occurring, or %NULL to ignore. + * + * Completes a camera_linux_camera_event_api_initialized() call. + * + * Returns: a #CameraLinuxCameraEventApiInitializedResponse or %NULL on error. + */ +CameraLinuxCameraEventApiInitializedResponse* camera_linux_camera_event_api_initialized_finish(CameraLinuxCameraEventApi* api, GAsyncResult* result, GError** error); + +/** + * camera_linux_camera_event_api_error: + * @api: a #CameraLinuxCameraEventApi. + * @message: parameter for this method. + * @cancellable: (allow-none): a #GCancellable or %NULL. + * @callback: (scope async): (allow-none): a #GAsyncReadyCallback to call when the call is complete or %NULL to ignore the response. + * @user_data: (closure): user data to pass to @callback. + * + * Called when an error occurs in the camera. + * + * This should be used for errors that occur outside of the context of + * handling a specific HostApi call, such as during streaming. + */ +void camera_linux_camera_event_api_error(CameraLinuxCameraEventApi* api, const gchar* message, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data); + +/** + * camera_linux_camera_event_api_error_finish: + * @api: a #CameraLinuxCameraEventApi. + * @result: a #GAsyncResult. + * @error: (allow-none): #GError location to store the error occurring, or %NULL to ignore. + * + * Completes a camera_linux_camera_event_api_error() call. + * + * Returns: a #CameraLinuxCameraEventApiErrorResponse or %NULL on error. + */ +CameraLinuxCameraEventApiErrorResponse* camera_linux_camera_event_api_error_finish(CameraLinuxCameraEventApi* api, GAsyncResult* result, GError** error); + +G_END_DECLS + +#endif // PIGEON_MESSAGES_G_H_ diff --git a/packages/camera/camera_linux/linux/test/camera_linux_plugin_test.cc b/packages/camera/camera_linux/linux/test/camera_linux_plugin_test.cc deleted file mode 100644 index a3771a632cb6..000000000000 --- a/packages/camera/camera_linux/linux/test/camera_linux_plugin_test.cc +++ /dev/null @@ -1,31 +0,0 @@ -#include -#include -#include - -#include "include/camera_linux/camera_linux_plugin.h" -#include "camera_linux_plugin_private.h" - -// This demonstrates a simple unit test of the C portion of this plugin's -// implementation. -// -// Once you have built the plugin's example app, you can run these tests -// from the command line. For instance, for a plugin called my_plugin -// built for x64 debug, run: -// $ build/linux/x64/debug/plugins/my_plugin/my_plugin_test - -namespace camera_linux { -namespace test { - -TEST(CameraLinuxPlugin, GetPlatformVersion) { - g_autoptr(FlMethodResponse) response = get_platform_version(); - ASSERT_NE(response, nullptr); - ASSERT_TRUE(FL_IS_METHOD_SUCCESS_RESPONSE(response)); - FlValue* result = fl_method_success_response_get_result( - FL_METHOD_SUCCESS_RESPONSE(response)); - ASSERT_EQ(fl_value_get_type(result), FL_VALUE_TYPE_STRING); - // The full string varies, so just validate that it has the right format. - EXPECT_THAT(fl_value_get_string(result), testing::StartsWith("Linux ")); -} - -} // namespace test -} // namespace camera_linux diff --git a/packages/camera/camera_linux/pigeons/copyright.txt b/packages/camera/camera_linux/pigeons/copyright.txt new file mode 100644 index 000000000000..1236b63caf3a --- /dev/null +++ b/packages/camera/camera_linux/pigeons/copyright.txt @@ -0,0 +1,3 @@ +Copyright 2013 The Flutter Authors. All rights reserved. +Use of this source code is governed by a BSD-style license that can be +found in the LICENSE file. diff --git a/packages/camera/camera_linux/pigeons/messages.dart b/packages/camera/camera_linux/pigeons/messages.dart new file mode 100644 index 000000000000..005962b7b156 --- /dev/null +++ b/packages/camera/camera_linux/pigeons/messages.dart @@ -0,0 +1,316 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import 'package:pigeon/pigeon.dart'; + +@ConfigurePigeon(PigeonOptions( + dartOut: 'lib/src/messages.g.dart', + gobjectHeaderOut: 'linux/messages.g.h', + gobjectSourceOut: 'linux/messages.g.cc', + gobjectOptions: GObjectOptions(), + copyrightHeader: 'pigeons/copyright.txt', +)) + +// Pigeon version of CameraLensDirection. +enum PlatformCameraLensDirection { + /// Front facing camera (a user looking at the screen is seen by the camera). + front, + + /// Back facing camera (a user looking at the screen is not seen by the camera). + back, + + /// External camera which may not be mounted to the device. + external, +} + +// Pigeon equivalent of CGSize. +class PlatformSize { + PlatformSize({required this.width, required this.height}); + + final double width; + final double height; +} + +// Pigeon version of DeviceOrientation. +enum PlatformDeviceOrientation { + portraitUp, + landscapeLeft, + portraitDown, + landscapeRight, +} + +// Pigeon version of ExposureMode. +enum PlatformExposureMode { + auto, + locked, +} + +// Pigeon version of FlashMode. +enum PlatformFlashMode { + off, + auto, + always, + torch, +} + +// Pigeon version of FocusMode. +enum PlatformFocusMode { + auto, + locked, +} + +/// Pigeon version of ImageFileFormat. +enum PlatformImageFileFormat { + jpeg, + heif, +} + +// Pigeon version of the subset of ImageFormatGroup supported on iOS. +enum PlatformImageFormatGroup { + bgra8888, + yuv420, +} + +// Pigeon version of ResolutionPreset. +enum PlatformResolutionPreset { + low, + medium, + high, + veryHigh, + ultraHigh, + max, +} + +// Pigeon version of CameraDescription. +class PlatformCameraDescription { + PlatformCameraDescription({ + required this.name, + required this.lensDirection, + }); + + /// The name of the camera device. + final String name; + + /// The direction the camera is facing. + final PlatformCameraLensDirection lensDirection; +} + +// Pigeon version of the data needed for a CameraInitializedEvent. +class PlatformCameraState { + PlatformCameraState({ + required this.previewSize, + required this.exposureMode, + required this.focusMode, + required this.exposurePointSupported, + required this.focusPointSupported, + }); + + /// The size of the preview, in pixels. + final PlatformSize previewSize; + + /// The default exposure mode + final PlatformExposureMode exposureMode; + + /// The default focus mode + final PlatformFocusMode focusMode; + + /// Whether setting exposure points is supported. + final bool exposurePointSupported; + + /// Whether setting focus points is supported. + final bool focusPointSupported; +} + +// Pigeon version of to MediaSettings. +class PlatformMediaSettings { + PlatformMediaSettings({ + required this.resolutionPreset, + required this.framesPerSecond, + required this.videoBitrate, + required this.audioBitrate, + required this.enableAudio, + }); + + final PlatformResolutionPreset resolutionPreset; + final int? framesPerSecond; + final int? videoBitrate; + final int? audioBitrate; + final bool enableAudio; +} + +// Pigeon equivalent of CGPoint. +class PlatformPoint { + PlatformPoint({required this.x, required this.y}); + + final double x; + final double y; +} + +@HostApi() +abstract class CameraApi { + /// Returns the list of available cameras. + // TODO(stuartmorgan): Make the generic type non-nullable once supported. + // https://github.com/flutter/flutter/issues/97848 + // The consuming code treats it as non-nullable. + @async + List getAvailableCameras(); + + /// Create a new camera with the given settings, and returns its ID. + @async + int create(String cameraName, PlatformMediaSettings settings); + + /// Initializes the camera with the given ID. + @async + void initialize(int cameraId, PlatformImageFormatGroup imageFormat); + + /// Begins streaming frames from the camera. + @async + void startImageStream(); + + /// Stops streaming frames from the camera. + @async + void stopImageStream(); + + /// Called by the Dart side of the plugin when it has received the last image + /// frame sent. + /// + /// This is used to throttle sending frames across the channel. + @async + void receivedImageStreamData(); + + /// Indicates that the given camera is no longer being used on the Dart side, + /// and any associated resources can be cleaned up. + @async + void dispose(int cameraId); + + /// Locks the camera capture to the current device orientation. + @async + void lockCaptureOrientation(PlatformDeviceOrientation orientation); + + /// Unlocks camera capture orientation, allowing it to automatically adapt to + /// device orientation. + @async + void unlockCaptureOrientation(); + + /// Takes a picture with the current settings, and returns the path to the + /// resulting file. + @async + String takePicture(); + + /// Does any preprocessing necessary before beginning to record video. + @async + void prepareForVideoRecording(); + + /// Begins recording video, optionally enabling streaming to Dart at the same + /// time. + @async + void startVideoRecording(bool enableStream); + + /// Stops recording video, and results the path to the resulting file. + @async + String stopVideoRecording(); + + /// Pauses video recording. + @async + void pauseVideoRecording(); + + /// Resumes a previously paused video recording. + @async + void resumeVideoRecording(); + + /// Switches the camera to the given flash mode. + @async + void setFlashMode(PlatformFlashMode mode); + + /// Switches the camera to the given exposure mode. + @async + void setExposureMode(PlatformExposureMode mode); + + /// Anchors auto-exposure to the given point in (0,1) coordinate space. + /// + /// A null value resets to the default exposure point. + @async + void setExposurePoint(PlatformPoint? point); + + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + @async + void setLensPosition(double position); + + /// Returns the minimum exposure offset supported by the camera. + @async + double getMinExposureOffset(); + + /// Returns the maximum exposure offset supported by the camera. + @async + double getMaxExposureOffset(); + + /// Sets the exposure offset manually to the given value. + @async + void setExposureOffset(double offset); + + /// Switches the camera to the given focus mode. + @async + void setFocusMode(PlatformFocusMode mode); + + /// Anchors auto-focus to the given point in (0,1) coordinate space. + /// + /// A null value resets to the default focus point. + @async + void setFocusPoint(PlatformPoint? point); + + /// Returns the minimum zoom level supported by the camera. + @async + double getMinZoomLevel(); + + /// Returns the maximum zoom level supported by the camera. + @async + double getMaxZoomLevel(); + + /// Sets the zoom factor. + @async + void setZoomLevel(double zoom); + + /// Pauses streaming of preview frames. + @async + void pausePreview(); + + /// Resumes a previously paused preview stream. + @async + void resumePreview(); + + /// Changes the camera used while recording video. + /// + /// This should only be called while video recording is active. + @async + void updateDescriptionWhileRecording(String cameraName); + + /// Sets the file format used for taking pictures. + @async + void setImageFileFormat(PlatformImageFileFormat format); +} + +/// Handler for native callbacks that are not tied to a specific camera ID. +@FlutterApi() +abstract class CameraGlobalEventApi { + /// Called when the device's physical orientation changes. + void deviceOrientationChanged(PlatformDeviceOrientation orientation); +} + +/// Handler for native callbacks that are tied to a specific camera ID. +/// +/// This is intended to be initialized with the camera ID as a suffix. +@FlutterApi() +abstract class CameraEventApi { + /// Called when the camera is inialitized for use. + void initialized(PlatformCameraState initialState); + + /// Called when an error occurs in the camera. + /// + /// This should be used for errors that occur outside of the context of + /// handling a specific HostApi call, such as during streaming. + void error(String message); +} diff --git a/packages/camera/camera_linux/pubspec.yaml b/packages/camera/camera_linux/pubspec.yaml index b0a978cdf8b8..896f0a19fb89 100644 --- a/packages/camera/camera_linux/pubspec.yaml +++ b/packages/camera/camera_linux/pubspec.yaml @@ -10,11 +10,13 @@ dependencies: flutter: sdk: flutter plugin_platform_interface: ^2.0.2 + camera_platform_interface: ^2.7.0 dev_dependencies: flutter_test: sdk: flutter flutter_lints: ^4.0.0 + pigeon: ^22.4.2 # For information on the generic Dart part of this file, see the # following page: https://dart.dev/tools/pub/pubspec @@ -34,7 +36,8 @@ flutter: plugin: platforms: linux: - pluginClass: CameraLinuxPlugin + pluginClass: CameraPlugin + dartPluginClass: CameraLinux # To add assets to your plugin package, add an assets section, like this: # assets: diff --git a/packages/camera/camera_linux/test/camera_linux_method_channel_test.dart b/packages/camera/camera_linux/test/camera_linux_method_channel_test.dart deleted file mode 100644 index 7ab5d5289a45..000000000000 --- a/packages/camera/camera_linux/test/camera_linux_method_channel_test.dart +++ /dev/null @@ -1,27 +0,0 @@ -import 'package:flutter/services.dart'; -import 'package:flutter_test/flutter_test.dart'; -import 'package:camera_linux/camera_linux_method_channel.dart'; - -void main() { - TestWidgetsFlutterBinding.ensureInitialized(); - - MethodChannelCameraLinux platform = MethodChannelCameraLinux(); - const MethodChannel channel = MethodChannel('camera_linux'); - - setUp(() { - TestDefaultBinaryMessengerBinding.instance.defaultBinaryMessenger.setMockMethodCallHandler( - channel, - (MethodCall methodCall) async { - return '42'; - }, - ); - }); - - tearDown(() { - TestDefaultBinaryMessengerBinding.instance.defaultBinaryMessenger.setMockMethodCallHandler(channel, null); - }); - - test('getPlatformVersion', () async { - expect(await platform.getPlatformVersion(), '42'); - }); -} diff --git a/packages/camera/camera_linux/test/camera_linux_test.dart b/packages/camera/camera_linux/test/camera_linux_test.dart deleted file mode 100644 index 6dec7f8cf8c3..000000000000 --- a/packages/camera/camera_linux/test/camera_linux_test.dart +++ /dev/null @@ -1,29 +0,0 @@ -import 'package:flutter_test/flutter_test.dart'; -import 'package:camera_linux/camera_linux.dart'; -import 'package:camera_linux/camera_linux_platform_interface.dart'; -import 'package:camera_linux/camera_linux_method_channel.dart'; -import 'package:plugin_platform_interface/plugin_platform_interface.dart'; - -class MockCameraLinuxPlatform - with MockPlatformInterfaceMixin - implements CameraLinuxPlatform { - - @override - Future getPlatformVersion() => Future.value('42'); -} - -void main() { - final CameraLinuxPlatform initialPlatform = CameraLinuxPlatform.instance; - - test('$MethodChannelCameraLinux is the default instance', () { - expect(initialPlatform, isInstanceOf()); - }); - - test('getPlatformVersion', () async { - CameraLinux cameraLinuxPlugin = CameraLinux(); - MockCameraLinuxPlatform fakePlatform = MockCameraLinuxPlatform(); - CameraLinuxPlatform.instance = fakePlatform; - - expect(await cameraLinuxPlugin.getPlatformVersion(), '42'); - }); -} From d4b90d93ada00398c6edc213a2e0e3e3ee42fda6 Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Tue, 20 May 2025 22:32:48 -0400 Subject: [PATCH 05/21] Refactor camera_linux API: Remove unused enums and classes, update method signatures - Removed CameraLensDirection and ResolutionPreset enums from messages.g.h and messages.dart. - Eliminated CameraDescription and MediaSettings classes from messages.dart. - Updated CameraApi methods to reflect changes in camera creation and available cameras retrieval. - Added getTextureId method to CameraApi for retrieving texture ID. - Updated pubspec.yaml to include stream_transform dependency and asset for Pylon setup. --- packages/camera/camera/example/.gitattributes | 1 + .../camera_linux/lib/src/linux_camera.dart | 323 +++++++++- .../camera_linux/lib/src/messages.g.dart | 213 ++----- .../camera/camera_linux/linux/CMakeLists.txt | 37 +- .../camera_linux/linux/camera_host_plugin.cpp | 207 +++++++ .../camera_linux/linux/camera_host_plugin.h | 234 +++++++ .../camera_linux/linux/camera_plugin.cpp | 7 +- .../camera_texture_image_event_handler.cpp | 119 ++++ .../camera_texture_image_event_handler.h | 52 ++ .../camera_linux/linux/dma_buffer_factory.cpp | 69 +++ .../camera_linux/linux/dma_buffer_factory.h | 55 ++ .../camera/camera_linux/linux/messages.g.cc | 572 +++++------------- .../camera/camera_linux/linux/messages.g.h | 266 +------- .../camera/camera_linux/pigeons/messages.dart | 71 +-- packages/camera/camera_linux/pubspec.yaml | 4 + 15 files changed, 1322 insertions(+), 908 deletions(-) create mode 100644 packages/camera/camera/example/.gitattributes create mode 100644 packages/camera/camera_linux/linux/camera_host_plugin.cpp create mode 100644 packages/camera/camera_linux/linux/camera_host_plugin.h create mode 100644 packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp create mode 100644 packages/camera/camera_linux/linux/camera_texture_image_event_handler.h create mode 100644 packages/camera/camera_linux/linux/dma_buffer_factory.cpp create mode 100644 packages/camera/camera_linux/linux/dma_buffer_factory.h diff --git a/packages/camera/camera/example/.gitattributes b/packages/camera/camera/example/.gitattributes new file mode 100644 index 000000000000..f087b429e2f8 --- /dev/null +++ b/packages/camera/camera/example/.gitattributes @@ -0,0 +1 @@ +*.tar.gz filter=lfs diff=lfs merge=lfs -text diff --git a/packages/camera/camera_linux/lib/src/linux_camera.dart b/packages/camera/camera_linux/lib/src/linux_camera.dart index 80df997a1150..8cdd7fcde5b9 100644 --- a/packages/camera/camera_linux/lib/src/linux_camera.dart +++ b/packages/camera/camera_linux/lib/src/linux_camera.dart @@ -1,26 +1,339 @@ +import 'dart:async'; +import 'dart:math'; + import 'package:camera_linux/src/messages.g.dart'; import 'package:camera_platform_interface/camera_platform_interface.dart'; -import 'package:flutter/foundation.dart'; +import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; +import 'package:stream_transform/stream_transform.dart'; class CameraLinux extends CameraPlatform { final CameraApi _hostApi; CameraLinux({@visibleForTesting CameraApi? api}) : _hostApi = api ?? CameraApi(); - /// Registers this class as the default instance of [CameraPlatform]. static void registerWith() { - print("registerWith"); CameraPlatform.instance = CameraLinux(); } + /// The controller we need to broadcast the different events coming + /// from handleMethodCall, specific to camera events. + /// + /// It is a `broadcast` because multiple controllers will connect to + /// different stream views of this Controller. + /// This is only exposed for test purposes. It shouldn't be used by clients of + /// the plugin as it may break or change at any time. + @visibleForTesting + final StreamController cameraEventStreamController = StreamController.broadcast(); + + /// The per-camera handlers for messages that should be rebroadcast to + /// clients as [CameraEvent]s. + @visibleForTesting + final Map hostCameraHandlers = {}; + + Stream _cameraEvents(int cameraId) => + cameraEventStreamController.stream.where((CameraEvent event) => event.cameraId == cameraId); + @override Future> availableCameras() async { try { - print("availableCameras"); - return []; //(await _hostApi.getAvailableCameras()).map(cameraDescriptionFromPlatform).toList(); + final camerasNames = await _hostApi.getAvailableCamerasNames(); + return camerasNames.map( + (name) { + return CameraDescription( + name: name, + lensDirection: CameraLensDirection.back, + sensorOrientation: 0, + lensType: CameraLensType.unknown, + ); + }, + ).toList(); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } + + /// resolutionPreset is not used on Linux. + /// enableAudio is not used on Linux. + @override + Future createCamera( + CameraDescription cameraDescription, + ResolutionPreset? resolutionPreset, { + bool enableAudio = false, + }) async { + try { + final cameraId = await _hostApi.create(cameraDescription.name); + return cameraId; } on PlatformException catch (e) { throw CameraException(e.code, e.message); } } + + @override + Future initializeCamera( + int cameraId, { + ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown, + }) async { + hostCameraHandlers.putIfAbsent(cameraId, () => HostCameraMessageHandler(cameraId, cameraEventStreamController)); + final Completer completer = Completer(); + + unawaited( + onCameraInitialized(cameraId).first.then( + (CameraInitializedEvent value) => completer.complete(), + ), + ); + + try { + await _hostApi.initialize(cameraId, _pigeonImageFormat(imageFormatGroup)); + } on PlatformException catch (e, s) { + completer.completeError( + CameraException(e.code, e.message), + s, + ); + } + + return completer.future; + } + + @override + Stream onCameraInitialized(int cameraId) { + return _cameraEvents(cameraId).whereType(); + } + + @override + Stream onCameraResolutionChanged(int cameraId) { + return _cameraEvents(cameraId).whereType(); + } + + @override + Stream onCameraClosing(int cameraId) { + return _cameraEvents(cameraId).whereType(); + } + + @override + Stream onCameraError(int cameraId) { + return _cameraEvents(cameraId).whereType(); + } + + @override + Stream onVideoRecordedEvent(int cameraId) { + return _cameraEvents(cameraId).whereType(); + } + + @override + Stream onDeviceOrientationChanged() { + return const Stream.empty(); + } + + /// The following methods are not implemented for Linux, as they are not + /// supported by the underlying camera API. + @override + Future lockCaptureOrientation(int cameraId, DeviceOrientation orientation) { + return Future.value(); + } + + /// The following methods are not implemented for Linux, as they are not + /// supported by the underlying camera API. + @override + Future unlockCaptureOrientation(int cameraId) { + return Future.value(); + } + + @override + Future takePicture(int cameraId) { + throw UnimplementedError('takePicture() is not implemented.'); + } + + @override + Future prepareForVideoRecording() { + throw UnimplementedError('prepareForVideoRecording() is not implemented.'); + } + + @override + Future startVideoRecording( + int cameraId, { + @Deprecated('This parameter is unused, and will be ignored on all platforms') Duration? maxVideoDuration, + }) { + throw UnimplementedError('startVideoRecording() is not implemented.'); + } + + @override + Future stopVideoRecording(int cameraId) { + throw UnimplementedError('stopVideoRecording() is not implemented.'); + } + + @override + Future pauseVideoRecording(int cameraId) { + throw UnimplementedError('pauseVideoRecording() is not implemented.'); + } + + @override + Future resumeVideoRecording(int cameraId) { + throw UnimplementedError('resumeVideoRecording() is not implemented.'); + } + + @override + Future setFlashMode(int cameraId, FlashMode mode) async {} + + @override + Future setExposureMode(int cameraId, ExposureMode mode) async {} + + @override + Future setExposurePoint(int cameraId, Point? point) async {} + + @override + Future getMinExposureOffset(int cameraId) async { + return 0.0; + } + + @override + Future getMaxExposureOffset(int cameraId) async { + return 0.0; + } + + @override + Future getExposureOffsetStepSize(int cameraId) async { + return 0.0; + } + + @override + Future setExposureOffset(int cameraId, double offset) async { + return 0.0; + } + + @override + Future setFocusMode(int cameraId, FocusMode mode) async {} + + @override + Future setFocusPoint(int cameraId, Point? point) async {} + + @override + Future getMaxZoomLevel(int cameraId) async { + return 1.0; + } + + @override + Future getMinZoomLevel(int cameraId) async { + return 0.0; + } + + @override + Future setZoomLevel(int cameraId, double zoom) async {} + + @override + Future pausePreview(int cameraId) async {} + + @override + Future resumePreview(int cameraId) async {} + + @override + Future setDescriptionWhileRecording(CameraDescription description) async {} + + @override + Widget buildPreview(int cameraId) { + return FutureBuilder( + future: _hostApi.getTextureId(cameraId), + builder: (context, snapshot) { + if (snapshot.connectionState == ConnectionState.done && snapshot.data != null) { + print('Texture ID from dart: ${snapshot.data}'); + return RepaintBoundary( + child: Texture( + textureId: snapshot.data!, + filterQuality: FilterQuality.none, + ), + ); + } else { + return const Center(child: CircularProgressIndicator()); + } + }, + ); + } + + @override + Future dispose(int cameraId) async {} + + @override + Future setImageFileFormat(int cameraId, ImageFileFormat format) async {} + + /// Returns an [ImageFormatGroup]'s Pigeon representation. + PlatformImageFormatGroup _pigeonImageFormat(ImageFormatGroup format) { + switch (format) { + // "unknown" is used to indicate the default. + case ImageFormatGroup.unknown: + case ImageFormatGroup.bgra8888: + return PlatformImageFormatGroup.bgra8888; + case ImageFormatGroup.yuv420: + return PlatformImageFormatGroup.yuv420; + case ImageFormatGroup.jpeg: + case ImageFormatGroup.nv21: + // Fall through. + } + // The enum comes from a different package, which could get a new value at + // any time, so provide a fallback that ensures this won't break when used + // with a version that contains new values. This is deliberately outside + // the switch rather than a `default` so that the linter will flag the + // switch as needing an update. + // TODO(stuartmorgan): Consider throwing an UnsupportedError, instead of + // doing fallback, when a specific unsupported format is requested. This + // would require a breaking change at this layer and the app-facing layer. + return PlatformImageFormatGroup.bgra8888; + } +} + +/// Callback handler for camera-level events from the platform host. +@visibleForTesting +class HostCameraMessageHandler implements CameraEventApi { + /// Creates a new handler that listens for events from camera [cameraId], and + /// broadcasts them to [streamController]. + HostCameraMessageHandler(this.cameraId, this.streamController) { + CameraEventApi.setUp(this, messageChannelSuffix: cameraId.toString()); + } + + /// Removes the handler for native messages. + void dispose() { + CameraEventApi.setUp(null, messageChannelSuffix: cameraId.toString()); + } + + /// The camera ID this handler listens for events from. + final int cameraId; + + /// The controller used to broadcast camera events coming from the + /// host platform. + final StreamController streamController; + + @override + void error(String message) { + streamController.add(CameraErrorEvent(cameraId, message)); + } + + @override + void initialized(PlatformCameraState initialState) { + streamController.add( + CameraInitializedEvent( + cameraId, + initialState.previewSize.width, + initialState.previewSize.height, + exposureModeFromPlatform(initialState.exposureMode), + initialState.exposurePointSupported, + focusModeFromPlatform(initialState.focusMode), + initialState.focusPointSupported, + ), + ); + } +} + +/// Converts a Pigeon [PlatformExposureMode] to an [ExposureMode]. +ExposureMode exposureModeFromPlatform(PlatformExposureMode mode) { + return switch (mode) { + PlatformExposureMode.auto => ExposureMode.auto, + PlatformExposureMode.locked => ExposureMode.locked, + }; +} + +/// Converts a Pigeon [PlatformFocusMode] to an [FocusMode]. +FocusMode focusModeFromPlatform(PlatformFocusMode mode) { + return switch (mode) { + PlatformFocusMode.auto => FocusMode.auto, + PlatformFocusMode.locked => FocusMode.locked, + }; } diff --git a/packages/camera/camera_linux/lib/src/messages.g.dart b/packages/camera/camera_linux/lib/src/messages.g.dart index f9b7672e4c94..6cf92e4af05a 100644 --- a/packages/camera/camera_linux/lib/src/messages.g.dart +++ b/packages/camera/camera_linux/lib/src/messages.g.dart @@ -28,15 +28,6 @@ List wrapResponse({Object? result, PlatformException? error, bool empty return [error.code, error.message, error.details]; } -enum PlatformCameraLensDirection { - /// Front facing camera (a user looking at the screen is seen by the camera). - front, - /// Back facing camera (a user looking at the screen is not seen by the camera). - back, - /// External camera which may not be mounted to the device. - external, -} - enum PlatformDeviceOrientation { portraitUp, landscapeLeft, @@ -72,15 +63,6 @@ enum PlatformImageFormatGroup { yuv420, } -enum PlatformResolutionPreset { - low, - medium, - high, - veryHigh, - ultraHigh, - max, -} - class PlatformSize { PlatformSize({ required this.width, @@ -107,34 +89,6 @@ class PlatformSize { } } -class PlatformCameraDescription { - PlatformCameraDescription({ - required this.name, - required this.lensDirection, - }); - - /// The name of the camera device. - String name; - - /// The direction the camera is facing. - PlatformCameraLensDirection lensDirection; - - Object encode() { - return [ - name, - lensDirection, - ]; - } - - static PlatformCameraDescription decode(Object result) { - result as List; - return PlatformCameraDescription( - name: result[0]! as String, - lensDirection: result[1]! as PlatformCameraLensDirection, - ); - } -} - class PlatformCameraState { PlatformCameraState({ required this.previewSize, @@ -181,47 +135,6 @@ class PlatformCameraState { } } -class PlatformMediaSettings { - PlatformMediaSettings({ - required this.resolutionPreset, - this.framesPerSecond, - this.videoBitrate, - this.audioBitrate, - required this.enableAudio, - }); - - PlatformResolutionPreset resolutionPreset; - - int? framesPerSecond; - - int? videoBitrate; - - int? audioBitrate; - - bool enableAudio; - - Object encode() { - return [ - resolutionPreset, - framesPerSecond, - videoBitrate, - audioBitrate, - enableAudio, - ]; - } - - static PlatformMediaSettings decode(Object result) { - result as List; - return PlatformMediaSettings( - resolutionPreset: result[0]! as PlatformResolutionPreset, - framesPerSecond: result[1] as int?, - videoBitrate: result[2] as int?, - audioBitrate: result[3] as int?, - enableAudio: result[4]! as bool, - ); - } -} - class PlatformPoint { PlatformPoint({ required this.x, @@ -256,44 +169,32 @@ class _PigeonCodec extends StandardMessageCodec { if (value is int) { buffer.putUint8(4); buffer.putInt64(value); - } else if (value is PlatformCameraLensDirection) { - buffer.putUint8(129); - writeValue(buffer, value.index); } else if (value is PlatformDeviceOrientation) { - buffer.putUint8(130); + buffer.putUint8(129); writeValue(buffer, value.index); } else if (value is PlatformExposureMode) { - buffer.putUint8(131); + buffer.putUint8(130); writeValue(buffer, value.index); } else if (value is PlatformFlashMode) { - buffer.putUint8(132); + buffer.putUint8(131); writeValue(buffer, value.index); } else if (value is PlatformFocusMode) { - buffer.putUint8(133); + buffer.putUint8(132); writeValue(buffer, value.index); } else if (value is PlatformImageFileFormat) { - buffer.putUint8(134); + buffer.putUint8(133); writeValue(buffer, value.index); } else if (value is PlatformImageFormatGroup) { - buffer.putUint8(135); - writeValue(buffer, value.index); - } else if (value is PlatformResolutionPreset) { - buffer.putUint8(136); + buffer.putUint8(134); writeValue(buffer, value.index); } else if (value is PlatformSize) { - buffer.putUint8(137); - writeValue(buffer, value.encode()); - } else if (value is PlatformCameraDescription) { - buffer.putUint8(138); + buffer.putUint8(135); writeValue(buffer, value.encode()); } else if (value is PlatformCameraState) { - buffer.putUint8(139); - writeValue(buffer, value.encode()); - } else if (value is PlatformMediaSettings) { - buffer.putUint8(140); + buffer.putUint8(136); writeValue(buffer, value.encode()); } else if (value is PlatformPoint) { - buffer.putUint8(141); + buffer.putUint8(137); writeValue(buffer, value.encode()); } else { super.writeValue(buffer, value); @@ -304,38 +205,28 @@ class _PigeonCodec extends StandardMessageCodec { Object? readValueOfType(int type, ReadBuffer buffer) { switch (type) { case 129: - final int? value = readValue(buffer) as int?; - return value == null ? null : PlatformCameraLensDirection.values[value]; - case 130: final int? value = readValue(buffer) as int?; return value == null ? null : PlatformDeviceOrientation.values[value]; - case 131: + case 130: final int? value = readValue(buffer) as int?; return value == null ? null : PlatformExposureMode.values[value]; - case 132: + case 131: final int? value = readValue(buffer) as int?; return value == null ? null : PlatformFlashMode.values[value]; - case 133: + case 132: final int? value = readValue(buffer) as int?; return value == null ? null : PlatformFocusMode.values[value]; - case 134: + case 133: final int? value = readValue(buffer) as int?; return value == null ? null : PlatformImageFileFormat.values[value]; - case 135: + case 134: final int? value = readValue(buffer) as int?; return value == null ? null : PlatformImageFormatGroup.values[value]; - case 136: - final int? value = readValue(buffer) as int?; - return value == null ? null : PlatformResolutionPreset.values[value]; - case 137: + case 135: return PlatformSize.decode(readValue(buffer)!); - case 138: - return PlatformCameraDescription.decode(readValue(buffer)!); - case 139: + case 136: return PlatformCameraState.decode(readValue(buffer)!); - case 140: - return PlatformMediaSettings.decode(readValue(buffer)!); - case 141: + case 137: return PlatformPoint.decode(readValue(buffer)!); default: return super.readValueOfType(type, buffer); @@ -357,8 +248,8 @@ class CameraApi { final String pigeonVar_messageChannelSuffix; /// Returns the list of available cameras. - Future> getAvailableCameras() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCameras$pigeonVar_messageChannelSuffix'; + Future> getAvailableCamerasNames() async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCamerasNames$pigeonVar_messageChannelSuffix'; final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( pigeonVar_channelName, pigeonChannelCodec, @@ -380,12 +271,12 @@ class CameraApi { message: 'Host platform returned null value for non-null return value.', ); } else { - return (pigeonVar_replyList[0] as List?)!.cast(); + return (pigeonVar_replyList[0] as List?)!.cast(); } } /// Create a new camera with the given settings, and returns its ID. - Future create(String cameraName, PlatformMediaSettings settings) async { + Future create(String cameraName) async { final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.create$pigeonVar_messageChannelSuffix'; final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( pigeonVar_channelName, @@ -393,7 +284,7 @@ class CameraApi { binaryMessenger: pigeonVar_binaryMessenger, ); final List? pigeonVar_replyList = - await pigeonVar_channel.send([cameraName, settings]) as List?; + await pigeonVar_channel.send([cameraName]) as List?; if (pigeonVar_replyList == null) { throw _createConnectionError(pigeonVar_channelName); } else if (pigeonVar_replyList.length > 1) { @@ -481,6 +372,29 @@ class CameraApi { } } + /// Get the texture ID for the camera with the given ID. + Future getTextureId(int cameraId) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getTextureId$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return (pigeonVar_replyList[0] as int?); + } + } + /// Called by the Dart side of the plugin when it has received the last image /// frame sent. /// @@ -1126,43 +1040,6 @@ class CameraApi { } } -/// Handler for native callbacks that are not tied to a specific camera ID. -abstract class CameraGlobalEventApi { - static const MessageCodec pigeonChannelCodec = _PigeonCodec(); - - /// Called when the device's physical orientation changes. - void deviceOrientationChanged(PlatformDeviceOrientation orientation); - - static void setUp(CameraGlobalEventApi? api, {BinaryMessenger? binaryMessenger, String messageChannelSuffix = '',}) { - messageChannelSuffix = messageChannelSuffix.isNotEmpty ? '.$messageChannelSuffix' : ''; - { - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - 'dev.flutter.pigeon.camera_linux.CameraGlobalEventApi.deviceOrientationChanged$messageChannelSuffix', pigeonChannelCodec, - binaryMessenger: binaryMessenger); - if (api == null) { - pigeonVar_channel.setMessageHandler(null); - } else { - pigeonVar_channel.setMessageHandler((Object? message) async { - assert(message != null, - 'Argument for dev.flutter.pigeon.camera_linux.CameraGlobalEventApi.deviceOrientationChanged was null.'); - final List args = (message as List?)!; - final PlatformDeviceOrientation? arg_orientation = (args[0] as PlatformDeviceOrientation?); - assert(arg_orientation != null, - 'Argument for dev.flutter.pigeon.camera_linux.CameraGlobalEventApi.deviceOrientationChanged was null, expected non-null PlatformDeviceOrientation.'); - try { - api.deviceOrientationChanged(arg_orientation!); - return wrapResponse(empty: true); - } on PlatformException catch (e) { - return wrapResponse(error: e); - } catch (e) { - return wrapResponse(error: PlatformException(code: 'error', message: e.toString())); - } - }); - } - } - } -} - /// Handler for native callbacks that are tied to a specific camera ID. /// /// This is intended to be initialized with the camera ID as a suffix. diff --git a/packages/camera/camera_linux/linux/CMakeLists.txt b/packages/camera/camera_linux/linux/CMakeLists.txt index 30858ec47992..265647b93354 100644 --- a/packages/camera/camera_linux/linux/CMakeLists.txt +++ b/packages/camera/camera_linux/linux/CMakeLists.txt @@ -3,6 +3,26 @@ # the plugin to fail to compile for some customers of the plugin. cmake_minimum_required(VERSION 3.10) +set(CMAKE_CXX_STANDARD 17) + +# Path to SDK tar.gz +set(PYLON_ARCHIVE ${CMAKE_CURRENT_SOURCE_DIR}/../deps/pylon-8.0.2.16314_linux-aarch64.tar.gz) +set(PYLON_ROOT ${CMAKE_BINARY_DIR}/pylon-sdk) + +# Extract the archive only if not already extracted +if(NOT EXISTS "${PYLON_ROOT}") + message(STATUS "Extracting Pylon SDK to ${PYLON_ROOT}...") + message(STATUS "Command: ${CMAKE_COMMAND} -E tar -xzf ${PYLON_ARCHIVE}") + file(MAKE_DIRECTORY "${PYLON_ROOT}") + execute_process( + COMMAND ${CMAKE_COMMAND} -E tar -xzf "${PYLON_ARCHIVE}" + WORKING_DIRECTORY "${PYLON_ROOT}" + ) +endif() + +include_directories(${PYLON_ROOT}/include) +link_directories(${PYLON_ROOT}/lib) + # Project-level configuration. set(PROJECT_NAME "camera_linux") project(${PROJECT_NAME} LANGUAGES CXX) @@ -14,6 +34,9 @@ set(PLUGIN_NAME "camera_linux_plugin") # Any new source files that you add to the plugin should be added here. list(APPEND PLUGIN_SOURCES "camera_plugin.cpp" + "camera_host_plugin.cpp" + "camera_texture_image_event_handler.cpp" + "messages.g.cc" ) @@ -43,11 +66,23 @@ target_include_directories(${PLUGIN_NAME} INTERFACE ) target_link_libraries(${PLUGIN_NAME} PRIVATE flutter) target_link_libraries(${PLUGIN_NAME} PRIVATE PkgConfig::GTK) +cmake_policy(SET CMP0072 NEW) +set(OpenGL_GL_PREFERENCE GLVND) +find_package( OpenGL REQUIRED ) +target_link_libraries(${PLUGIN_NAME} PRIVATE OpenGL::GL) +target_link_libraries(${PLUGIN_NAME} + PRIVATE + ${PYLON_ROOT}/lib/libpylonbase.so + ${PYLON_ROOT}/lib/libpylonutility.so +) +find_package(PkgConfig REQUIRED) + # List of absolute paths to libraries that should be bundled with the plugin. # This list could contain prebuilt libraries, or libraries created by an # external build triggered from this build file. set(camera_linux_bundled_libraries - "" + "${PYLON_ROOT}/lib/libpylonbase.so" + "${PYLON_ROOT}/lib/libpylonutility.so" PARENT_SCOPE ) diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.cpp b/packages/camera/camera_linux/linux/camera_host_plugin.cpp new file mode 100644 index 000000000000..6779a3c9d75b --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_host_plugin.cpp @@ -0,0 +1,207 @@ +#include "camera_host_plugin.h" + +std::map + CameraHostPlugin::cameraTextureImageEventHandlers = {}; +std::map> + CameraHostPlugin::cameras = {}; +std::map + CameraHostPlugin::cameraLinuxCameraEventApis = {}; +FlPluginRegistrar* CameraHostPlugin::registrar = nullptr; + +CameraHostPlugin::CameraHostPlugin(FlPluginRegistrar* registrar) + : m_registrar(FL_PLUGIN_REGISTRAR(g_object_ref(registrar))) { + CameraHostPlugin::registrar = m_registrar; + static CameraLinuxCameraApiVTable api_vtable = { + .get_available_cameras_names = get_available_cameras_names, + .create = create, + .initialize = initialize, + .start_image_stream = start_image_stream, + .stop_image_stream = stop_image_stream, + .get_texture_id = get_texture_id, + .received_image_stream_data = received_image_stream_data, + .dispose = dispose, + .lock_capture_orientation = lock_capture_orientation, + .unlock_capture_orientation = unlock_capture_orientation, + .take_picture = take_picture, + .prepare_for_video_recording = prepare_for_video_recording, + .start_video_recording = start_video_recording, + .stop_video_recording = stop_video_recording, + .pause_video_recording = pause_video_recording, + .resume_video_recording = resume_video_recording, + .set_flash_mode = set_flash_mode, + .set_exposure_mode = set_exposure_mode, + .set_exposure_point = set_exposure_point, + .set_lens_position = set_lens_position, + .get_min_exposure_offset = get_min_exposure_offset, + .get_max_exposure_offset = get_max_exposure_offset, + .set_exposure_offset = set_exposure_offset, + .set_focus_mode = set_focus_mode, + .set_focus_point = set_focus_point, + .get_min_zoom_level = get_min_zoom_level, + .get_max_zoom_level = get_max_zoom_level, + .set_zoom_level = set_zoom_level, + .pause_preview = pause_preview, + .resume_preview = resume_preview, + .update_description_while_recording = update_description_while_recording, + .set_image_file_format = set_image_file_format, + }; + + camera_linux_camera_api_set_method_handlers( + fl_plugin_registrar_get_messenger(registrar), nullptr, &api_vtable, this, + nullptr); + Pylon::PylonInitialize(); +} + +CameraHostPlugin::~CameraHostPlugin() { + for (auto&& it = cameraLinuxCameraEventApis.begin(); + it != cameraLinuxCameraEventApis.end(); ++it) { + g_object_unref(it->second); + } + cameraLinuxCameraEventApis.clear(); + for (auto&& it = cameras.begin(); it != cameras.end(); ++it) { + it->second->Close(); + } + cameras.clear(); + cameraTextureImageEventHandlers.clear(); + g_object_unref(m_registrar); + Pylon::PylonTerminate(); +} + +void CameraHostPlugin::get_available_cameras_names( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(get_available_cameras_names, { + Pylon::CTlFactory& TlFactory = Pylon::CTlFactory::GetInstance(); + Pylon::DeviceInfoList_t lstDevices; + TlFactory.EnumerateDevices(lstDevices); + FlValue* list = fl_value_new_list(); + + if (!lstDevices.empty()) { + for (auto&& it = lstDevices.begin(); it != lstDevices.end(); ++it) { + fl_value_append_take(list, fl_value_new_string(it->GetFriendlyName())); + } + } + + CAMERA_HOST_RETURN(list); + }); +} + +void CameraHostPlugin::create( + const gchar* camera_name, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(create, { + Pylon::CTlFactory& TlFactory = Pylon::CTlFactory::GetInstance(); + Pylon::DeviceInfoList_t lstDevices; + TlFactory.EnumerateDevices(lstDevices); + + for (auto&& it = lstDevices.begin(); it != lstDevices.end(); ++it) { + if (it->GetFriendlyName() == camera_name) { + std::string serialNumber = it->GetSerialNumber().c_str(); + int64_t camera_id = std::stoll(serialNumber); + if (cameras.find(camera_id) != cameras.end()) { + cameras[camera_id]->Close(); + cameras.erase(camera_id); + } + + cameras[camera_id] = std::make_unique( + TlFactory.CreateDevice(*it)); + + if (cameraLinuxCameraEventApis.find(camera_id) == + cameraLinuxCameraEventApis.end()) { + cameraLinuxCameraEventApis[camera_id] = + camera_linux_camera_event_api_new( + fl_plugin_registrar_get_messenger(registrar), + std::to_string(camera_id).c_str()); + } + + CAMERA_HOST_RETURN(camera_id); + return; + } + } + + CAMERA_HOST_RAISE_ERROR("Camera not found"); + }); +} + +void CameraHostPlugin::camera_linux_camera_event_api_initialized_callback( + GObject* object, GAsyncResult* result, gpointer user_data) {} + +void CameraHostPlugin::initialize( + int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(initialize, { + const auto camera_it = cameras.find(camera_id); + if (camera_it == cameras.end()) { + CAMERA_HOST_RAISE_ERROR("Camera not created"); + } + + CameraTextureImageEventHandler* cameraTextureImageEventHandler = + new CameraTextureImageEventHandler(registrar); + cameraTextureImageEventHandlers[camera_id] = cameraTextureImageEventHandler; + + Pylon::CInstantCamera* camera = camera_it->second.get(); + camera->Open(); + GenApi::INodeMap& nodemap = camera->GetNodeMap(); + Pylon::CEnumParameter(nodemap, "DeviceLinkThroughputLimitMode") + .TrySetValue("Off"); + Pylon::CBooleanParameter(nodemap, "AcquisitionFrameRateEnable") + .TrySetValue(true); + Pylon::CFloatParameter(nodemap, "AcquisitionFrameRate").TrySetValue(60.0); + Pylon::CFloatParameter(nodemap, "ResultingFrameRate").TrySetValue(60.0); + Pylon::CEnumParameter(nodemap, "PixelFormat").TrySetValue("RGB8"); + Pylon::CIntegerParameter(nodemap, "DecimationHorizontal").TrySetValue(2); + Pylon::CIntegerParameter(nodemap, "DecimationVertical").TrySetValue(2); + Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("Off"); + Pylon::CIntegerParameter(nodemap, "Width").TrySetValue(1920); + Pylon::CIntegerParameter(nodemap, "Height").TrySetValue(1080); + Pylon::CIntegerParameter(nodemap, "OffsetX").TrySetValue(0); + Pylon::CIntegerParameter(nodemap, "OffsetY").TrySetValue(0); + + camera->RegisterImageEventHandler(cameraTextureImageEventHandler, + Pylon::RegistrationMode_Append, + Pylon::Cleanup_Delete); + camera->StartGrabbing(Pylon::GrabStrategy_LatestImages, + Pylon::EGrabLoop::GrabLoop_ProvidedByInstantCamera); + + std::cout << "Texture ID: " + << cameraTextureImageEventHandler->get_texture_id() << std::endl; + + CameraLinuxPlatformSize* size = camera_linux_platform_size_new(1920, 1080); + CameraLinuxPlatformCameraState* cameraState = + camera_linux_platform_camera_state_new( + size, + CameraLinuxPlatformExposureMode:: + CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_LOCKED, + CameraLinuxPlatformFocusMode:: + CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED, + true, true); + camera_linux_camera_event_api_initialized( + cameraLinuxCameraEventApis[camera_id], cameraState, nullptr, + camera_linux_camera_event_api_initialized_callback, nullptr); + g_object_unref(cameraState); + g_object_unref(size); + CAMERA_HOST_VOID_RETURN(); + }); +} + +void CameraHostPlugin::get_texture_id( + int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(get_texture_id, { + const auto cameraTextureImageEventHandler_it = + cameraTextureImageEventHandlers.find(camera_id); + if (cameraTextureImageEventHandler_it == + cameraTextureImageEventHandlers.end()) { + CAMERA_HOST_RAISE_ERROR("Camera not created"); + } + CameraTextureImageEventHandler* cameraTextureImageEventHandler = + cameraTextureImageEventHandler_it->second; + if (cameraTextureImageEventHandler == nullptr) { + CAMERA_HOST_RAISE_ERROR("Camera not initialized"); + } + int64_t texture_id = cameraTextureImageEventHandler->get_texture_id(); + if (texture_id == -1) { + CAMERA_HOST_RAISE_ERROR("Texture not created"); + } + CAMERA_HOST_RETURN(&texture_id); + }); +} diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.h b/packages/camera/camera_linux/linux/camera_host_plugin.h new file mode 100644 index 000000000000..6dfd88277798 --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_host_plugin.h @@ -0,0 +1,234 @@ + +#ifndef CAMERA_HOST_PLUGIN_PRIVATE_H_ +#define CAMERA_HOST_PLUGIN_PRIVATE_H_ + +#include + +#include "camera_texture_image_event_handler.h" +#include "flutter_linux/flutter_linux.h" +#include "messages.g.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Woverloaded-virtual" +#pragma clang diagnostic ignored "-Wunused-variable" + +#include + +#pragma clang diagnostic pop + +#define CAMERA_HOST_ERROR_HANDLING(method_name, code) \ + try { \ + [[maybe_unused]] auto camera_linux_camera_api_respond_macro = \ + &camera_linux_camera_api_respond_##method_name; \ + [[maybe_unused]] auto camera_linux_camera_api_respond_error_macro = \ + &camera_linux_camera_api_respond_error_##method_name; \ + code \ + } catch (const Pylon::GenericException& e) { \ + camera_linux_camera_api_respond_error_##method_name( \ + response_handle, nullptr, e.what(), nullptr); \ + } catch (const std::exception& e) { \ + camera_linux_camera_api_respond_error_##method_name( \ + response_handle, nullptr, e.what(), nullptr); \ + } catch (...) { \ + camera_linux_camera_api_respond_error_##method_name( \ + response_handle, nullptr, "CameraLinuxPlugin Unknown error", nullptr); \ + } + +#define CAMERA_HOST_RETURN(...) \ + camera_linux_camera_api_respond_macro(response_handle, __VA_ARGS__) + +#define CAMERA_HOST_VOID_RETURN() \ + camera_linux_camera_api_respond_macro(response_handle) + +#define CAMERA_HOST_RAISE_ERROR(description) \ + camera_linux_camera_api_respond_error_macro(response_handle, nullptr, \ + #description, nullptr) + +class CameraHostPlugin { + static FlPluginRegistrar* registrar; + FlPluginRegistrar* m_registrar; + static std::map> cameras; + static std::map + cameraTextureImageEventHandlers; + static std::map + cameraLinuxCameraEventApis; + + public: + CameraHostPlugin(FlPluginRegistrar* registrar); + + ~CameraHostPlugin(); + + static void get_available_cameras_names( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + + static void create(const gchar* camera_name, + CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data); + + static void initialize(int64_t camera_id, + CameraLinuxPlatformImageFormatGroup image_format, + CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data); + + static void start_image_stream( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void stop_image_stream( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void received_image_stream_data( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void dispose(int64_t camera_id, + CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void lock_capture_orientation( + CameraLinuxPlatformDeviceOrientation orientation, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void unlock_capture_orientation( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void get_texture_id( + int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data); + + static void take_picture(CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void prepare_for_video_recording( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void start_video_recording( + gboolean enable_stream, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void stop_video_recording( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void pause_video_recording( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void resume_video_recording( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void set_flash_mode( + CameraLinuxPlatformFlashMode mode, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void set_exposure_mode( + CameraLinuxPlatformExposureMode mode, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void set_exposure_point( + CameraLinuxPlatformPoint* point, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void set_lens_position( + double position, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void get_min_exposure_offset( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void get_max_exposure_offset( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void set_exposure_offset( + double offset, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void set_focus_mode( + CameraLinuxPlatformFocusMode mode, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void set_focus_point( + CameraLinuxPlatformPoint* point, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void get_min_zoom_level( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void get_max_zoom_level( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void set_zoom_level( + double zoom, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void pause_preview(CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void resume_preview( + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void update_description_while_recording( + const gchar* camera_name, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void set_image_file_format( + CameraLinuxPlatformImageFileFormat format, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + throw new std::runtime_error("Not Implemented"); + } + + static void camera_linux_camera_event_api_initialized_callback( + GObject* object, GAsyncResult* result, gpointer user_data); +}; + +#endif // CAMERA_HOST_PLUGIN_PRIVATE_H_ diff --git a/packages/camera/camera_linux/linux/camera_plugin.cpp b/packages/camera/camera_linux/linux/camera_plugin.cpp index 3973c0d044cd..b486905c789e 100644 --- a/packages/camera/camera_linux/linux/camera_plugin.cpp +++ b/packages/camera/camera_linux/linux/camera_plugin.cpp @@ -1,3 +1,8 @@ #include "include/camera_linux/camera_plugin.h" -void camera_plugin_register_with_registrar(FlPluginRegistrar* registrar) {} +#include "camera_host_plugin.h" + +void camera_plugin_register_with_registrar(FlPluginRegistrar* registrar) { + CameraHostPlugin* camera_host_plugin = new CameraHostPlugin(registrar); + g_object_unref(camera_host_plugin); +} diff --git a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp new file mode 100644 index 000000000000..6a68a0b937d4 --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp @@ -0,0 +1,119 @@ +#include "camera_texture_image_event_handler.h" + +#include + +G_DEFINE_TYPE(FlMyTextureGL, fl_my_texture_gl, fl_texture_gl_get_type()) + +static gboolean fl_my_texture_gl_populate(FlTextureGL* texture, + uint32_t* target, uint32_t* name, + uint32_t* width, uint32_t* height, + GError** error) { + FlMyTextureGL* f = (FlMyTextureGL*)texture; + *target = f->target; + *name = f->name; + *width = f->width; + *height = f->height; + return true; +} + +FlMyTextureGL* fl_my_texture_gl_new(uint32_t target, uint32_t name, + uint32_t width, uint32_t height) { + auto r = FL_MY_TEXTURE_GL(g_object_new(fl_my_texture_gl_get_type(), nullptr)); + r->target = target; + r->name = name; + r->width = width; + r->height = height; + return r; +} + +static void fl_my_texture_gl_class_init(FlMyTextureGLClass* klass) { + FL_TEXTURE_GL_CLASS(klass)->populate = fl_my_texture_gl_populate; +} + +static void fl_my_texture_gl_init(FlMyTextureGL* self) {} + +CameraTextureImageEventHandler::CameraTextureImageEventHandler( + FlPluginRegistrar* registrar) + : m_registrar(registrar), + m_texture_registrar( + fl_plugin_registrar_get_texture_registrar(registrar)) {} + +CameraTextureImageEventHandler ::~CameraTextureImageEventHandler() { + if (m_texture) { + glDeleteTextures(1, &m_texture_name); + fl_texture_registrar_unregister_texture(m_texture_registrar, + FL_TEXTURE(m_texture)); + g_object_unref(m_texture); + } +} + +int64_t CameraTextureImageEventHandler::get_texture_id() { + if (!m_texture) { + std::cerr << "Texture is null" << std::endl; + return -1; + } + return fl_texture_get_id(FL_TEXTURE(m_texture)); +} + +void CameraTextureImageEventHandler::OnImageEventHandlerRegistered( + Pylon::CInstantCamera& camera) { + FlView* fl_view = FL_VIEW(fl_plugin_registrar_get_view(m_registrar)); + GdkWindow* window = gtk_widget_get_parent_window(GTK_WIDGET(fl_view)); + m_gl_context = gdk_window_create_gl_context(window, NULL); + + // Camera frame size, update if you get dynamic size + int width = 1920; + int height = 1080; + + // Create GL texture for the camera preview + gdk_gl_context_make_current(m_gl_context); + glGenTextures(1, &m_texture_name); + glBindTexture(GL_TEXTURE_2D, m_texture_name); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB, + GL_UNSIGNED_BYTE, nullptr); + + // Wrap GL texture for Flutter + m_texture = + fl_my_texture_gl_new(GL_TEXTURE_2D, m_texture_name, width, height); + fl_texture_registrar_register_texture(m_texture_registrar, + FL_TEXTURE(m_texture)); + fl_texture_registrar_mark_texture_frame_available(m_texture_registrar, + FL_TEXTURE(m_texture)); +} + +void CameraTextureImageEventHandler::OnImageGrabbed( + Pylon::CInstantCamera& camera, const Pylon::CGrabResultPtr& ptr) { + if (!m_texture) { + return; + } + + static std::chrono::steady_clock::time_point m_last_fps_time; + static int m_frame_count = 0; + + gdk_gl_context_make_current(m_gl_context); + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, ptr->GetWidth(), ptr->GetHeight(), + GL_RGB, GL_UNSIGNED_BYTE, ptr->GetBuffer()); + glFlush(); + fl_texture_registrar_mark_texture_frame_available(m_texture_registrar, + FL_TEXTURE(m_texture)); + + // Track frame count + m_frame_count++; + auto now = std::chrono::steady_clock::now(); + auto elapsed = std::chrono::duration_cast( + now - m_last_fps_time) + .count(); + + if (elapsed >= 1000) { // 1 second has passed + std::cout << "FPS: " << m_frame_count << std::endl; + m_frame_count = 0; + m_last_fps_time = now; + } + + if (!ptr->GrabSucceeded()) { + std::cerr << "Error grabbing image" << std::endl; + return; + } +} diff --git a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h new file mode 100644 index 000000000000..20800f519ee2 --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h @@ -0,0 +1,52 @@ + +#ifndef CAMERA_TEXTURE_IMAGE_EVENT_HANDLER_H_ +#define CAMERA_TEXTURE_IMAGE_EVENT_HANDLER_H_ + +#include + +#include "flutter_linux/flutter_linux.h" +#include "messages.g.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Woverloaded-virtual" +#pragma clang diagnostic ignored "-Wunused-variable" + +#include + +#pragma clang diagnostic pop + +G_DECLARE_FINAL_TYPE(FlMyTextureGL, fl_my_texture_gl, FL, MY_TEXTURE_GL, + FlTextureGL) + +struct _FlMyTextureGL { + FlTextureGL parent_instance; + uint32_t target; + uint32_t name; + uint32_t width; + uint32_t height; +}; + +FlMyTextureGL* fl_my_texture_gl_new(uint32_t target, uint32_t name, + uint32_t width, uint32_t height); + +class CameraTextureImageEventHandler : public Pylon::CImageEventHandler { + FlMyTextureGL* m_texture; + unsigned int m_texture_name; + FlPluginRegistrar* m_registrar; + FlTextureRegistrar* m_texture_registrar; + GdkGLContext* m_gl_context; + + public: + CameraTextureImageEventHandler(FlPluginRegistrar* registrar); + + ~CameraTextureImageEventHandler() override; + + int64_t get_texture_id(); + + void OnImageEventHandlerRegistered(Pylon::CInstantCamera& camera) override; + + void OnImageGrabbed(Pylon::CInstantCamera& camera, + const Pylon::CGrabResultPtr& ptr) override; +}; + +#endif // CAMERA_TEXTURE_IMAGE_EVENT_HANDLER_H_ diff --git a/packages/camera/camera_linux/linux/dma_buffer_factory.cpp b/packages/camera/camera_linux/linux/dma_buffer_factory.cpp new file mode 100644 index 000000000000..e57be31014c2 --- /dev/null +++ b/packages/camera/camera_linux/linux/dma_buffer_factory.cpp @@ -0,0 +1,69 @@ +#include "dma_buffer_factory.h" + +DMABufferFactory::DMABufferFactory() { + m_gbmDevice = gbm_create_device(open("/dev/dri/renderD128", O_RDWR)); + if (!m_gbmDevice) { + throw std::runtime_error("Failed to create GBM device"); + } +} + +DMABufferFactory ::~DMABufferFactory() { + for (auto& pair : buffers) { + FreeBuffer(pair.second.mappedAddress, pair.second.context); + } + if (m_gbmDevice) { + close(gbm_device_get_fd(m_gbmDevice)); + gbm_device_destroy(m_gbmDevice); + } +} + +void DMABufferFactory::AllocateBuffer(size_t bufferSize, void** pCreatedBuffer, + intptr_t& bufferContext) { + const int width = 3840; + const int height = 2160; + const int format = GBM_FORMAT_XRGB8888; + + gbm_bo* bo = gbm_bo_create(m_gbmDevice, width, height, format, + GBM_BO_USE_LINEAR | GBM_BO_USE_RENDERING); + + if (!bo) { + throw std::runtime_error("Failed to allocate GBM buffer"); + } + + void* map_data = nullptr; + uint32_t stride; + void* addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_WRITE, + &stride, &map_data); + if (!addr) { + gbm_bo_destroy(bo); + throw std::runtime_error("Failed to map GBM buffer"); + } + + intptr_t ctx = reinterpret_cast(map_data); + + BufferInfo info = {bo, addr, ctx}; + buffers[addr] = info; + + *pCreatedBuffer = addr; + bufferContext = ctx; +} + +void DMABufferFactory::FreeBuffer(void* pCreatedBuffer, + intptr_t bufferContext) { + auto it = buffers.find(pCreatedBuffer); + if (it != buffers.end()) { + gbm_bo_unmap(it->second.bo, (void*)bufferContext); + gbm_bo_destroy(it->second.bo); + buffers.erase(it); + } +} + +void DMABufferFactory::DestroyBufferFactory() { delete this; } + +gbm_bo* DMABufferFactory::get_bo(void* pCreatedBuffer) { + auto it = buffers.find(pCreatedBuffer); + if (it != buffers.end()) { + return it->second.bo; + } + return nullptr; +} diff --git a/packages/camera/camera_linux/linux/dma_buffer_factory.h b/packages/camera/camera_linux/linux/dma_buffer_factory.h new file mode 100644 index 000000000000..7e5e5dbca52e --- /dev/null +++ b/packages/camera/camera_linux/linux/dma_buffer_factory.h @@ -0,0 +1,55 @@ + +#ifndef DMA_BUFFER_FACTORY_H_ +#define DMA_BUFFER_FACTORY_H_ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "flutter_linux/flutter_linux.h" +#include "messages.g.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Woverloaded-virtual" +#pragma clang diagnostic ignored "-Wunused-variable" + +#include + +#pragma clang diagnostic pop + +class DMABufferFactory : public Pylon::IBufferFactory { + public: + struct BufferInfo { + gbm_bo* bo; + void* mappedAddress; + intptr_t context; + }; + + DMABufferFactory(); + + ~DMABufferFactory() override; + + void AllocateBuffer(size_t bufferSize, void** pCreatedBuffer, + intptr_t& bufferContext) override; + + void FreeBuffer(void* pCreatedBuffer, intptr_t bufferContext) override; + + void DestroyBufferFactory() override; + + gbm_bo* get_bo(void* pCreatedBuffer); + gbm_device* get_gbm_device() { return m_gbmDevice; } + + private: + gbm_device* m_gbmDevice = nullptr; + std::map buffers = {}; +}; + +#endif // DMA_BUFFER_FACTORY_H_ diff --git a/packages/camera/camera_linux/linux/messages.g.cc b/packages/camera/camera_linux/linux/messages.g.cc index ae838c8e117f..f6250ab91f6a 100644 --- a/packages/camera/camera_linux/linux/messages.g.cc +++ b/packages/camera/camera_linux/linux/messages.g.cc @@ -58,60 +58,6 @@ static CameraLinuxPlatformSize* camera_linux_platform_size_new_from_list(FlValue return camera_linux_platform_size_new(width, height); } -struct _CameraLinuxPlatformCameraDescription { - GObject parent_instance; - - gchar* name; - CameraLinuxPlatformCameraLensDirection lens_direction; -}; - -G_DEFINE_TYPE(CameraLinuxPlatformCameraDescription, camera_linux_platform_camera_description, G_TYPE_OBJECT) - -static void camera_linux_platform_camera_description_dispose(GObject* object) { - CameraLinuxPlatformCameraDescription* self = CAMERA_LINUX_PLATFORM_CAMERA_DESCRIPTION(object); - g_clear_pointer(&self->name, g_free); - G_OBJECT_CLASS(camera_linux_platform_camera_description_parent_class)->dispose(object); -} - -static void camera_linux_platform_camera_description_init(CameraLinuxPlatformCameraDescription* self) { -} - -static void camera_linux_platform_camera_description_class_init(CameraLinuxPlatformCameraDescriptionClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_platform_camera_description_dispose; -} - -CameraLinuxPlatformCameraDescription* camera_linux_platform_camera_description_new(const gchar* name, CameraLinuxPlatformCameraLensDirection lens_direction) { - CameraLinuxPlatformCameraDescription* self = CAMERA_LINUX_PLATFORM_CAMERA_DESCRIPTION(g_object_new(camera_linux_platform_camera_description_get_type(), nullptr)); - self->name = g_strdup(name); - self->lens_direction = lens_direction; - return self; -} - -const gchar* camera_linux_platform_camera_description_get_name(CameraLinuxPlatformCameraDescription* self) { - g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_DESCRIPTION(self), nullptr); - return self->name; -} - -CameraLinuxPlatformCameraLensDirection camera_linux_platform_camera_description_get_lens_direction(CameraLinuxPlatformCameraDescription* self) { - g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_CAMERA_DESCRIPTION(self), static_cast(0)); - return self->lens_direction; -} - -static FlValue* camera_linux_platform_camera_description_to_list(CameraLinuxPlatformCameraDescription* self) { - FlValue* values = fl_value_new_list(); - fl_value_append_take(values, fl_value_new_string(self->name)); - fl_value_append_take(values, fl_value_new_custom(129, fl_value_new_int(self->lens_direction), (GDestroyNotify)fl_value_unref)); - return values; -} - -static CameraLinuxPlatformCameraDescription* camera_linux_platform_camera_description_new_from_list(FlValue* values) { - FlValue* value0 = fl_value_get_list_value(values, 0); - const gchar* name = fl_value_get_string(value0); - FlValue* value1 = fl_value_get_list_value(values, 1); - CameraLinuxPlatformCameraLensDirection lens_direction = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); - return camera_linux_platform_camera_description_new(name, lens_direction); -} - struct _CameraLinuxPlatformCameraState { GObject parent_instance; @@ -174,9 +120,9 @@ gboolean camera_linux_platform_camera_state_get_focus_point_supported(CameraLinu static FlValue* camera_linux_platform_camera_state_to_list(CameraLinuxPlatformCameraState* self) { FlValue* values = fl_value_new_list(); - fl_value_append_take(values, fl_value_new_custom_object(137, G_OBJECT(self->preview_size))); - fl_value_append_take(values, fl_value_new_custom(131, fl_value_new_int(self->exposure_mode), (GDestroyNotify)fl_value_unref)); - fl_value_append_take(values, fl_value_new_custom(133, fl_value_new_int(self->focus_mode), (GDestroyNotify)fl_value_unref)); + fl_value_append_take(values, fl_value_new_custom_object(135, G_OBJECT(self->preview_size))); + fl_value_append_take(values, fl_value_new_custom(130, fl_value_new_int(self->exposure_mode), (GDestroyNotify)fl_value_unref)); + fl_value_append_take(values, fl_value_new_custom(132, fl_value_new_int(self->focus_mode), (GDestroyNotify)fl_value_unref)); fl_value_append_take(values, fl_value_new_bool(self->exposure_point_supported)); fl_value_append_take(values, fl_value_new_bool(self->focus_point_supported)); return values; @@ -196,125 +142,6 @@ static CameraLinuxPlatformCameraState* camera_linux_platform_camera_state_new_fr return camera_linux_platform_camera_state_new(preview_size, exposure_mode, focus_mode, exposure_point_supported, focus_point_supported); } -struct _CameraLinuxPlatformMediaSettings { - GObject parent_instance; - - CameraLinuxPlatformResolutionPreset resolution_preset; - int64_t* frames_per_second; - int64_t* video_bitrate; - int64_t* audio_bitrate; - gboolean enable_audio; -}; - -G_DEFINE_TYPE(CameraLinuxPlatformMediaSettings, camera_linux_platform_media_settings, G_TYPE_OBJECT) - -static void camera_linux_platform_media_settings_dispose(GObject* object) { - CameraLinuxPlatformMediaSettings* self = CAMERA_LINUX_PLATFORM_MEDIA_SETTINGS(object); - g_clear_pointer(&self->frames_per_second, g_free); - g_clear_pointer(&self->video_bitrate, g_free); - g_clear_pointer(&self->audio_bitrate, g_free); - G_OBJECT_CLASS(camera_linux_platform_media_settings_parent_class)->dispose(object); -} - -static void camera_linux_platform_media_settings_init(CameraLinuxPlatformMediaSettings* self) { -} - -static void camera_linux_platform_media_settings_class_init(CameraLinuxPlatformMediaSettingsClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_platform_media_settings_dispose; -} - -CameraLinuxPlatformMediaSettings* camera_linux_platform_media_settings_new(CameraLinuxPlatformResolutionPreset resolution_preset, int64_t* frames_per_second, int64_t* video_bitrate, int64_t* audio_bitrate, gboolean enable_audio) { - CameraLinuxPlatformMediaSettings* self = CAMERA_LINUX_PLATFORM_MEDIA_SETTINGS(g_object_new(camera_linux_platform_media_settings_get_type(), nullptr)); - self->resolution_preset = resolution_preset; - if (frames_per_second != nullptr) { - self->frames_per_second = static_cast(malloc(sizeof(int64_t))); - *self->frames_per_second = *frames_per_second; - } - else { - self->frames_per_second = nullptr; - } - if (video_bitrate != nullptr) { - self->video_bitrate = static_cast(malloc(sizeof(int64_t))); - *self->video_bitrate = *video_bitrate; - } - else { - self->video_bitrate = nullptr; - } - if (audio_bitrate != nullptr) { - self->audio_bitrate = static_cast(malloc(sizeof(int64_t))); - *self->audio_bitrate = *audio_bitrate; - } - else { - self->audio_bitrate = nullptr; - } - self->enable_audio = enable_audio; - return self; -} - -CameraLinuxPlatformResolutionPreset camera_linux_platform_media_settings_get_resolution_preset(CameraLinuxPlatformMediaSettings* self) { - g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_MEDIA_SETTINGS(self), static_cast(0)); - return self->resolution_preset; -} - -int64_t* camera_linux_platform_media_settings_get_frames_per_second(CameraLinuxPlatformMediaSettings* self) { - g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_MEDIA_SETTINGS(self), nullptr); - return self->frames_per_second; -} - -int64_t* camera_linux_platform_media_settings_get_video_bitrate(CameraLinuxPlatformMediaSettings* self) { - g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_MEDIA_SETTINGS(self), nullptr); - return self->video_bitrate; -} - -int64_t* camera_linux_platform_media_settings_get_audio_bitrate(CameraLinuxPlatformMediaSettings* self) { - g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_MEDIA_SETTINGS(self), nullptr); - return self->audio_bitrate; -} - -gboolean camera_linux_platform_media_settings_get_enable_audio(CameraLinuxPlatformMediaSettings* self) { - g_return_val_if_fail(CAMERA_LINUX_IS_PLATFORM_MEDIA_SETTINGS(self), FALSE); - return self->enable_audio; -} - -static FlValue* camera_linux_platform_media_settings_to_list(CameraLinuxPlatformMediaSettings* self) { - FlValue* values = fl_value_new_list(); - fl_value_append_take(values, fl_value_new_custom(136, fl_value_new_int(self->resolution_preset), (GDestroyNotify)fl_value_unref)); - fl_value_append_take(values, self->frames_per_second != nullptr ? fl_value_new_int(*self->frames_per_second) : fl_value_new_null()); - fl_value_append_take(values, self->video_bitrate != nullptr ? fl_value_new_int(*self->video_bitrate) : fl_value_new_null()); - fl_value_append_take(values, self->audio_bitrate != nullptr ? fl_value_new_int(*self->audio_bitrate) : fl_value_new_null()); - fl_value_append_take(values, fl_value_new_bool(self->enable_audio)); - return values; -} - -static CameraLinuxPlatformMediaSettings* camera_linux_platform_media_settings_new_from_list(FlValue* values) { - FlValue* value0 = fl_value_get_list_value(values, 0); - CameraLinuxPlatformResolutionPreset resolution_preset = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); - FlValue* value1 = fl_value_get_list_value(values, 1); - int64_t* frames_per_second = nullptr; - int64_t frames_per_second_value; - if (fl_value_get_type(value1) != FL_VALUE_TYPE_NULL) { - frames_per_second_value = fl_value_get_int(value1); - frames_per_second = &frames_per_second_value; - } - FlValue* value2 = fl_value_get_list_value(values, 2); - int64_t* video_bitrate = nullptr; - int64_t video_bitrate_value; - if (fl_value_get_type(value2) != FL_VALUE_TYPE_NULL) { - video_bitrate_value = fl_value_get_int(value2); - video_bitrate = &video_bitrate_value; - } - FlValue* value3 = fl_value_get_list_value(values, 3); - int64_t* audio_bitrate = nullptr; - int64_t audio_bitrate_value; - if (fl_value_get_type(value3) != FL_VALUE_TYPE_NULL) { - audio_bitrate_value = fl_value_get_int(value3); - audio_bitrate = &audio_bitrate_value; - } - FlValue* value4 = fl_value_get_list_value(values, 4); - gboolean enable_audio = fl_value_get_bool(value4); - return camera_linux_platform_media_settings_new(resolution_preset, frames_per_second, video_bitrate, audio_bitrate, enable_audio); -} - struct _CameraLinuxPlatformPoint { GObject parent_instance; @@ -374,84 +201,58 @@ struct _CameraLinuxMessageCodec { G_DEFINE_TYPE(CameraLinuxMessageCodec, camera_linux_message_codec, fl_standard_message_codec_get_type()) -static gboolean camera_linux_message_codec_write_camera_linux_platform_camera_lens_direction(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { - uint8_t type = 129; - g_byte_array_append(buffer, &type, sizeof(uint8_t)); - return fl_standard_message_codec_write_value(codec, buffer, value, error); -} - static gboolean camera_linux_message_codec_write_camera_linux_platform_device_orientation(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { - uint8_t type = 130; + uint8_t type = 129; g_byte_array_append(buffer, &type, sizeof(uint8_t)); return fl_standard_message_codec_write_value(codec, buffer, value, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_exposure_mode(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { - uint8_t type = 131; + uint8_t type = 130; g_byte_array_append(buffer, &type, sizeof(uint8_t)); return fl_standard_message_codec_write_value(codec, buffer, value, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_flash_mode(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { - uint8_t type = 132; + uint8_t type = 131; g_byte_array_append(buffer, &type, sizeof(uint8_t)); return fl_standard_message_codec_write_value(codec, buffer, value, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_focus_mode(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { - uint8_t type = 133; + uint8_t type = 132; g_byte_array_append(buffer, &type, sizeof(uint8_t)); return fl_standard_message_codec_write_value(codec, buffer, value, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_image_file_format(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { - uint8_t type = 134; + uint8_t type = 133; g_byte_array_append(buffer, &type, sizeof(uint8_t)); return fl_standard_message_codec_write_value(codec, buffer, value, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_image_format_group(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { - uint8_t type = 135; - g_byte_array_append(buffer, &type, sizeof(uint8_t)); - return fl_standard_message_codec_write_value(codec, buffer, value, error); -} - -static gboolean camera_linux_message_codec_write_camera_linux_platform_resolution_preset(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { - uint8_t type = 136; + uint8_t type = 134; g_byte_array_append(buffer, &type, sizeof(uint8_t)); return fl_standard_message_codec_write_value(codec, buffer, value, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_size(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformSize* value, GError** error) { - uint8_t type = 137; + uint8_t type = 135; g_byte_array_append(buffer, &type, sizeof(uint8_t)); g_autoptr(FlValue) values = camera_linux_platform_size_to_list(value); return fl_standard_message_codec_write_value(codec, buffer, values, error); } -static gboolean camera_linux_message_codec_write_camera_linux_platform_camera_description(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformCameraDescription* value, GError** error) { - uint8_t type = 138; - g_byte_array_append(buffer, &type, sizeof(uint8_t)); - g_autoptr(FlValue) values = camera_linux_platform_camera_description_to_list(value); - return fl_standard_message_codec_write_value(codec, buffer, values, error); -} - static gboolean camera_linux_message_codec_write_camera_linux_platform_camera_state(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformCameraState* value, GError** error) { - uint8_t type = 139; + uint8_t type = 136; g_byte_array_append(buffer, &type, sizeof(uint8_t)); g_autoptr(FlValue) values = camera_linux_platform_camera_state_to_list(value); return fl_standard_message_codec_write_value(codec, buffer, values, error); } -static gboolean camera_linux_message_codec_write_camera_linux_platform_media_settings(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformMediaSettings* value, GError** error) { - uint8_t type = 140; - g_byte_array_append(buffer, &type, sizeof(uint8_t)); - g_autoptr(FlValue) values = camera_linux_platform_media_settings_to_list(value); - return fl_standard_message_codec_write_value(codec, buffer, values, error); -} - static gboolean camera_linux_message_codec_write_camera_linux_platform_point(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformPoint* value, GError** error) { - uint8_t type = 141; + uint8_t type = 137; g_byte_array_append(buffer, &type, sizeof(uint8_t)); g_autoptr(FlValue) values = camera_linux_platform_point_to_list(value); return fl_standard_message_codec_write_value(codec, buffer, values, error); @@ -461,30 +262,22 @@ static gboolean camera_linux_message_codec_write_value(FlStandardMessageCodec* c if (fl_value_get_type(value) == FL_VALUE_TYPE_CUSTOM) { switch (fl_value_get_custom_type(value)) { case 129: - return camera_linux_message_codec_write_camera_linux_platform_camera_lens_direction(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); - case 130: return camera_linux_message_codec_write_camera_linux_platform_device_orientation(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); - case 131: + case 130: return camera_linux_message_codec_write_camera_linux_platform_exposure_mode(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); - case 132: + case 131: return camera_linux_message_codec_write_camera_linux_platform_flash_mode(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); - case 133: + case 132: return camera_linux_message_codec_write_camera_linux_platform_focus_mode(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); - case 134: + case 133: return camera_linux_message_codec_write_camera_linux_platform_image_file_format(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); - case 135: + case 134: return camera_linux_message_codec_write_camera_linux_platform_image_format_group(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); - case 136: - return camera_linux_message_codec_write_camera_linux_platform_resolution_preset(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); - case 137: + case 135: return camera_linux_message_codec_write_camera_linux_platform_size(codec, buffer, CAMERA_LINUX_PLATFORM_SIZE(fl_value_get_custom_value_object(value)), error); - case 138: - return camera_linux_message_codec_write_camera_linux_platform_camera_description(codec, buffer, CAMERA_LINUX_PLATFORM_CAMERA_DESCRIPTION(fl_value_get_custom_value_object(value)), error); - case 139: + case 136: return camera_linux_message_codec_write_camera_linux_platform_camera_state(codec, buffer, CAMERA_LINUX_PLATFORM_CAMERA_STATE(fl_value_get_custom_value_object(value)), error); - case 140: - return camera_linux_message_codec_write_camera_linux_platform_media_settings(codec, buffer, CAMERA_LINUX_PLATFORM_MEDIA_SETTINGS(fl_value_get_custom_value_object(value)), error); - case 141: + case 137: return camera_linux_message_codec_write_camera_linux_platform_point(codec, buffer, CAMERA_LINUX_PLATFORM_POINT(fl_value_get_custom_value_object(value)), error); } } @@ -492,36 +285,28 @@ static gboolean camera_linux_message_codec_write_value(FlStandardMessageCodec* c return FL_STANDARD_MESSAGE_CODEC_CLASS(camera_linux_message_codec_parent_class)->write_value(codec, buffer, value, error); } -static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_lens_direction(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - return fl_value_new_custom(129, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); -} - static FlValue* camera_linux_message_codec_read_camera_linux_platform_device_orientation(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - return fl_value_new_custom(130, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); + return fl_value_new_custom(129, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_exposure_mode(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - return fl_value_new_custom(131, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); + return fl_value_new_custom(130, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_flash_mode(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - return fl_value_new_custom(132, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); + return fl_value_new_custom(131, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_focus_mode(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - return fl_value_new_custom(133, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); + return fl_value_new_custom(132, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_image_file_format(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - return fl_value_new_custom(134, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); + return fl_value_new_custom(133, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_image_format_group(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - return fl_value_new_custom(135, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); -} - -static FlValue* camera_linux_message_codec_read_camera_linux_platform_resolution_preset(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - return fl_value_new_custom(136, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); + return fl_value_new_custom(134, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_size(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { @@ -536,22 +321,7 @@ static FlValue* camera_linux_message_codec_read_camera_linux_platform_size(FlSta return nullptr; } - return fl_value_new_custom_object(137, G_OBJECT(value)); -} - -static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_description(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - g_autoptr(FlValue) values = fl_standard_message_codec_read_value(codec, buffer, offset, error); - if (values == nullptr) { - return nullptr; - } - - g_autoptr(CameraLinuxPlatformCameraDescription) value = camera_linux_platform_camera_description_new_from_list(values); - if (value == nullptr) { - g_set_error(error, FL_MESSAGE_CODEC_ERROR, FL_MESSAGE_CODEC_ERROR_FAILED, "Invalid data received for MessageData"); - return nullptr; - } - - return fl_value_new_custom_object(138, G_OBJECT(value)); + return fl_value_new_custom_object(135, G_OBJECT(value)); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_state(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { @@ -566,22 +336,7 @@ static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_sta return nullptr; } - return fl_value_new_custom_object(139, G_OBJECT(value)); -} - -static FlValue* camera_linux_message_codec_read_camera_linux_platform_media_settings(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - g_autoptr(FlValue) values = fl_standard_message_codec_read_value(codec, buffer, offset, error); - if (values == nullptr) { - return nullptr; - } - - g_autoptr(CameraLinuxPlatformMediaSettings) value = camera_linux_platform_media_settings_new_from_list(values); - if (value == nullptr) { - g_set_error(error, FL_MESSAGE_CODEC_ERROR, FL_MESSAGE_CODEC_ERROR_FAILED, "Invalid data received for MessageData"); - return nullptr; - } - - return fl_value_new_custom_object(140, G_OBJECT(value)); + return fl_value_new_custom_object(136, G_OBJECT(value)); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_point(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { @@ -596,36 +351,28 @@ static FlValue* camera_linux_message_codec_read_camera_linux_platform_point(FlSt return nullptr; } - return fl_value_new_custom_object(141, G_OBJECT(value)); + return fl_value_new_custom_object(137, G_OBJECT(value)); } static FlValue* camera_linux_message_codec_read_value_of_type(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, int type, GError** error) { switch (type) { case 129: - return camera_linux_message_codec_read_camera_linux_platform_camera_lens_direction(codec, buffer, offset, error); - case 130: return camera_linux_message_codec_read_camera_linux_platform_device_orientation(codec, buffer, offset, error); - case 131: + case 130: return camera_linux_message_codec_read_camera_linux_platform_exposure_mode(codec, buffer, offset, error); - case 132: + case 131: return camera_linux_message_codec_read_camera_linux_platform_flash_mode(codec, buffer, offset, error); - case 133: + case 132: return camera_linux_message_codec_read_camera_linux_platform_focus_mode(codec, buffer, offset, error); - case 134: + case 133: return camera_linux_message_codec_read_camera_linux_platform_image_file_format(codec, buffer, offset, error); - case 135: + case 134: return camera_linux_message_codec_read_camera_linux_platform_image_format_group(codec, buffer, offset, error); - case 136: - return camera_linux_message_codec_read_camera_linux_platform_resolution_preset(codec, buffer, offset, error); - case 137: + case 135: return camera_linux_message_codec_read_camera_linux_platform_size(codec, buffer, offset, error); - case 138: - return camera_linux_message_codec_read_camera_linux_platform_camera_description(codec, buffer, offset, error); - case 139: + case 136: return camera_linux_message_codec_read_camera_linux_platform_camera_state(codec, buffer, offset, error); - case 140: - return camera_linux_message_codec_read_camera_linux_platform_media_settings(codec, buffer, offset, error); - case 141: + case 137: return camera_linux_message_codec_read_camera_linux_platform_point(codec, buffer, offset, error); default: return FL_STANDARD_MESSAGE_CODEC_CLASS(camera_linux_message_codec_parent_class)->read_value_of_type(codec, buffer, offset, type, error); @@ -675,38 +422,38 @@ static CameraLinuxCameraApiResponseHandle* camera_linux_camera_api_response_hand return self; } -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetAvailableCamerasResponse, camera_linux_camera_api_get_available_cameras_response, CAMERA_LINUX, CAMERA_API_GET_AVAILABLE_CAMERAS_RESPONSE, GObject) +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetAvailableCamerasNamesResponse, camera_linux_camera_api_get_available_cameras_names_response, CAMERA_LINUX, CAMERA_API_GET_AVAILABLE_CAMERAS_NAMES_RESPONSE, GObject) -struct _CameraLinuxCameraApiGetAvailableCamerasResponse { +struct _CameraLinuxCameraApiGetAvailableCamerasNamesResponse { GObject parent_instance; FlValue* value; }; -G_DEFINE_TYPE(CameraLinuxCameraApiGetAvailableCamerasResponse, camera_linux_camera_api_get_available_cameras_response, G_TYPE_OBJECT) +G_DEFINE_TYPE(CameraLinuxCameraApiGetAvailableCamerasNamesResponse, camera_linux_camera_api_get_available_cameras_names_response, G_TYPE_OBJECT) -static void camera_linux_camera_api_get_available_cameras_response_dispose(GObject* object) { - CameraLinuxCameraApiGetAvailableCamerasResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_RESPONSE(object); +static void camera_linux_camera_api_get_available_cameras_names_response_dispose(GObject* object) { + CameraLinuxCameraApiGetAvailableCamerasNamesResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_NAMES_RESPONSE(object); g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_get_available_cameras_response_parent_class)->dispose(object); + G_OBJECT_CLASS(camera_linux_camera_api_get_available_cameras_names_response_parent_class)->dispose(object); } -static void camera_linux_camera_api_get_available_cameras_response_init(CameraLinuxCameraApiGetAvailableCamerasResponse* self) { +static void camera_linux_camera_api_get_available_cameras_names_response_init(CameraLinuxCameraApiGetAvailableCamerasNamesResponse* self) { } -static void camera_linux_camera_api_get_available_cameras_response_class_init(CameraLinuxCameraApiGetAvailableCamerasResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_available_cameras_response_dispose; +static void camera_linux_camera_api_get_available_cameras_names_response_class_init(CameraLinuxCameraApiGetAvailableCamerasNamesResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_available_cameras_names_response_dispose; } -static CameraLinuxCameraApiGetAvailableCamerasResponse* camera_linux_camera_api_get_available_cameras_response_new(FlValue* return_value) { - CameraLinuxCameraApiGetAvailableCamerasResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_RESPONSE(g_object_new(camera_linux_camera_api_get_available_cameras_response_get_type(), nullptr)); +static CameraLinuxCameraApiGetAvailableCamerasNamesResponse* camera_linux_camera_api_get_available_cameras_names_response_new(FlValue* return_value) { + CameraLinuxCameraApiGetAvailableCamerasNamesResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_NAMES_RESPONSE(g_object_new(camera_linux_camera_api_get_available_cameras_names_response_get_type(), nullptr)); self->value = fl_value_new_list(); fl_value_append_take(self->value, fl_value_ref(return_value)); return self; } -static CameraLinuxCameraApiGetAvailableCamerasResponse* camera_linux_camera_api_get_available_cameras_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiGetAvailableCamerasResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_RESPONSE(g_object_new(camera_linux_camera_api_get_available_cameras_response_get_type(), nullptr)); +static CameraLinuxCameraApiGetAvailableCamerasNamesResponse* camera_linux_camera_api_get_available_cameras_names_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiGetAvailableCamerasNamesResponse* self = CAMERA_LINUX_CAMERA_API_GET_AVAILABLE_CAMERAS_NAMES_RESPONSE(g_object_new(camera_linux_camera_api_get_available_cameras_names_response_get_type(), nullptr)); self->value = fl_value_new_list(); fl_value_append_take(self->value, fl_value_new_string(code)); fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); @@ -870,6 +617,45 @@ static CameraLinuxCameraApiStopImageStreamResponse* camera_linux_camera_api_stop return self; } +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetTextureIdResponse, camera_linux_camera_api_get_texture_id_response, CAMERA_LINUX, CAMERA_API_GET_TEXTURE_ID_RESPONSE, GObject) + +struct _CameraLinuxCameraApiGetTextureIdResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiGetTextureIdResponse, camera_linux_camera_api_get_texture_id_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_get_texture_id_response_dispose(GObject* object) { + CameraLinuxCameraApiGetTextureIdResponse* self = CAMERA_LINUX_CAMERA_API_GET_TEXTURE_ID_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_get_texture_id_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_get_texture_id_response_init(CameraLinuxCameraApiGetTextureIdResponse* self) { +} + +static void camera_linux_camera_api_get_texture_id_response_class_init(CameraLinuxCameraApiGetTextureIdResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_texture_id_response_dispose; +} + +static CameraLinuxCameraApiGetTextureIdResponse* camera_linux_camera_api_get_texture_id_response_new(int64_t* return_value) { + CameraLinuxCameraApiGetTextureIdResponse* self = CAMERA_LINUX_CAMERA_API_GET_TEXTURE_ID_RESPONSE(g_object_new(camera_linux_camera_api_get_texture_id_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, return_value != nullptr ? fl_value_new_int(*return_value) : fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiGetTextureIdResponse* camera_linux_camera_api_get_texture_id_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiGetTextureIdResponse* self = CAMERA_LINUX_CAMERA_API_GET_TEXTURE_ID_RESPONSE(g_object_new(camera_linux_camera_api_get_texture_id_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiReceivedImageStreamDataResponse, camera_linux_camera_api_received_image_stream_data_response, CAMERA_LINUX, CAMERA_API_RECEIVED_IMAGE_STREAM_DATA_RESPONSE, GObject) struct _CameraLinuxCameraApiReceivedImageStreamDataResponse { @@ -1918,15 +1704,15 @@ static CameraLinuxCameraApi* camera_linux_camera_api_new(const CameraLinuxCamera return self; } -static void camera_linux_camera_api_get_available_cameras_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { +static void camera_linux_camera_api_get_available_cameras_names_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - if (self->vtable == nullptr || self->vtable->get_available_cameras == nullptr) { + if (self->vtable == nullptr || self->vtable->get_available_cameras_names == nullptr) { return; } g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->get_available_cameras(handle, self->user_data); + self->vtable->get_available_cameras_names(handle, self->user_data); } static void camera_linux_camera_api_create_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { @@ -1938,10 +1724,8 @@ static void camera_linux_camera_api_create_cb(FlBasicMessageChannel* channel, Fl FlValue* value0 = fl_value_get_list_value(message_, 0); const gchar* camera_name = fl_value_get_string(value0); - FlValue* value1 = fl_value_get_list_value(message_, 1); - CameraLinuxPlatformMediaSettings* settings = CAMERA_LINUX_PLATFORM_MEDIA_SETTINGS(fl_value_get_custom_value_object(value1)); g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->create(camera_name, settings, handle, self->user_data); + self->vtable->create(camera_name, handle, self->user_data); } static void camera_linux_camera_api_initialize_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { @@ -1981,6 +1765,19 @@ static void camera_linux_camera_api_stop_image_stream_cb(FlBasicMessageChannel* self->vtable->stop_image_stream(handle, self->user_data); } +static void camera_linux_camera_api_get_texture_id_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->get_texture_id == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->get_texture_id(camera_id, handle, self->user_data); +} + static void camera_linux_camera_api_received_image_stream_data_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); @@ -2298,9 +2095,9 @@ void camera_linux_camera_api_set_method_handlers(FlBinaryMessenger* messenger, c g_autoptr(CameraLinuxCameraApi) api_data = camera_linux_camera_api_new(vtable, user_data, user_data_free_func); g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); - g_autofree gchar* get_available_cameras_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCameras%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) get_available_cameras_channel = fl_basic_message_channel_new(messenger, get_available_cameras_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(get_available_cameras_channel, camera_linux_camera_api_get_available_cameras_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* get_available_cameras_names_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCamerasNames%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_available_cameras_names_channel = fl_basic_message_channel_new(messenger, get_available_cameras_names_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_available_cameras_names_channel, camera_linux_camera_api_get_available_cameras_names_cb, g_object_ref(api_data), g_object_unref); g_autofree gchar* create_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.create%s", dot_suffix); g_autoptr(FlBasicMessageChannel) create_channel = fl_basic_message_channel_new(messenger, create_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(create_channel, camera_linux_camera_api_create_cb, g_object_ref(api_data), g_object_unref); @@ -2313,6 +2110,9 @@ void camera_linux_camera_api_set_method_handlers(FlBinaryMessenger* messenger, c g_autofree gchar* stop_image_stream_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopImageStream%s", dot_suffix); g_autoptr(FlBasicMessageChannel) stop_image_stream_channel = fl_basic_message_channel_new(messenger, stop_image_stream_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(stop_image_stream_channel, camera_linux_camera_api_stop_image_stream_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* get_texture_id_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getTextureId%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_texture_id_channel = fl_basic_message_channel_new(messenger, get_texture_id_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_texture_id_channel, camera_linux_camera_api_get_texture_id_cb, g_object_ref(api_data), g_object_unref); g_autofree gchar* received_image_stream_data_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.receivedImageStreamData%s", dot_suffix); g_autoptr(FlBasicMessageChannel) received_image_stream_data_channel = fl_basic_message_channel_new(messenger, received_image_stream_data_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(received_image_stream_data_channel, camera_linux_camera_api_received_image_stream_data_cb, g_object_ref(api_data), g_object_unref); @@ -2397,9 +2197,9 @@ void camera_linux_camera_api_clear_method_handlers(FlBinaryMessenger* messenger, g_autofree gchar* dot_suffix = suffix != nullptr ? g_strdup_printf(".%s", suffix) : g_strdup(""); g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); - g_autofree gchar* get_available_cameras_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCameras%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) get_available_cameras_channel = fl_basic_message_channel_new(messenger, get_available_cameras_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(get_available_cameras_channel, nullptr, nullptr, nullptr); + g_autofree gchar* get_available_cameras_names_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getAvailableCamerasNames%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_available_cameras_names_channel = fl_basic_message_channel_new(messenger, get_available_cameras_names_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_available_cameras_names_channel, nullptr, nullptr, nullptr); g_autofree gchar* create_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.create%s", dot_suffix); g_autoptr(FlBasicMessageChannel) create_channel = fl_basic_message_channel_new(messenger, create_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(create_channel, nullptr, nullptr, nullptr); @@ -2412,6 +2212,9 @@ void camera_linux_camera_api_clear_method_handlers(FlBinaryMessenger* messenger, g_autofree gchar* stop_image_stream_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopImageStream%s", dot_suffix); g_autoptr(FlBasicMessageChannel) stop_image_stream_channel = fl_basic_message_channel_new(messenger, stop_image_stream_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(stop_image_stream_channel, nullptr, nullptr, nullptr); + g_autofree gchar* get_texture_id_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getTextureId%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) get_texture_id_channel = fl_basic_message_channel_new(messenger, get_texture_id_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(get_texture_id_channel, nullptr, nullptr, nullptr); g_autofree gchar* received_image_stream_data_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.receivedImageStreamData%s", dot_suffix); g_autoptr(FlBasicMessageChannel) received_image_stream_data_channel = fl_basic_message_channel_new(messenger, received_image_stream_data_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(received_image_stream_data_channel, nullptr, nullptr, nullptr); @@ -2492,19 +2295,19 @@ void camera_linux_camera_api_clear_method_handlers(FlBinaryMessenger* messenger, fl_basic_message_channel_set_message_handler(set_image_file_format_channel, nullptr, nullptr, nullptr); } -void camera_linux_camera_api_respond_get_available_cameras(CameraLinuxCameraApiResponseHandle* response_handle, FlValue* return_value) { - g_autoptr(CameraLinuxCameraApiGetAvailableCamerasResponse) response = camera_linux_camera_api_get_available_cameras_response_new(return_value); +void camera_linux_camera_api_respond_get_available_cameras_names(CameraLinuxCameraApiResponseHandle* response_handle, FlValue* return_value) { + g_autoptr(CameraLinuxCameraApiGetAvailableCamerasNamesResponse) response = camera_linux_camera_api_get_available_cameras_names_response_new(return_value); g_autoptr(GError) error = nullptr; if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getAvailableCameras", error->message); + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getAvailableCamerasNames", error->message); } } -void camera_linux_camera_api_respond_error_get_available_cameras(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiGetAvailableCamerasResponse) response = camera_linux_camera_api_get_available_cameras_response_new_error(code, message, details); +void camera_linux_camera_api_respond_error_get_available_cameras_names(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiGetAvailableCamerasNamesResponse) response = camera_linux_camera_api_get_available_cameras_names_response_new_error(code, message, details); g_autoptr(GError) error = nullptr; if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getAvailableCameras", error->message); + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getAvailableCamerasNames", error->message); } } @@ -2572,6 +2375,22 @@ void camera_linux_camera_api_respond_error_stop_image_stream(CameraLinuxCameraAp } } +void camera_linux_camera_api_respond_get_texture_id(CameraLinuxCameraApiResponseHandle* response_handle, int64_t* return_value) { + g_autoptr(CameraLinuxCameraApiGetTextureIdResponse) response = camera_linux_camera_api_get_texture_id_response_new(return_value); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getTextureId", error->message); + } +} + +void camera_linux_camera_api_respond_error_get_texture_id(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiGetTextureIdResponse) response = camera_linux_camera_api_get_texture_id_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getTextureId", error->message); + } +} + void camera_linux_camera_api_respond_received_image_stream_data(CameraLinuxCameraApiResponseHandle* response_handle) { g_autoptr(CameraLinuxCameraApiReceivedImageStreamDataResponse) response = camera_linux_camera_api_received_image_stream_data_response_new(); g_autoptr(GError) error = nullptr; @@ -2988,115 +2807,6 @@ void camera_linux_camera_api_respond_error_set_image_file_format(CameraLinuxCame } } -struct _CameraLinuxCameraGlobalEventApi { - GObject parent_instance; - - FlBinaryMessenger* messenger; - gchar *suffix; -}; - -G_DEFINE_TYPE(CameraLinuxCameraGlobalEventApi, camera_linux_camera_global_event_api, G_TYPE_OBJECT) - -static void camera_linux_camera_global_event_api_dispose(GObject* object) { - CameraLinuxCameraGlobalEventApi* self = CAMERA_LINUX_CAMERA_GLOBAL_EVENT_API(object); - g_clear_object(&self->messenger); - g_clear_pointer(&self->suffix, g_free); - G_OBJECT_CLASS(camera_linux_camera_global_event_api_parent_class)->dispose(object); -} - -static void camera_linux_camera_global_event_api_init(CameraLinuxCameraGlobalEventApi* self) { -} - -static void camera_linux_camera_global_event_api_class_init(CameraLinuxCameraGlobalEventApiClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_global_event_api_dispose; -} - -CameraLinuxCameraGlobalEventApi* camera_linux_camera_global_event_api_new(FlBinaryMessenger* messenger, const gchar* suffix) { - CameraLinuxCameraGlobalEventApi* self = CAMERA_LINUX_CAMERA_GLOBAL_EVENT_API(g_object_new(camera_linux_camera_global_event_api_get_type(), nullptr)); - self->messenger = FL_BINARY_MESSENGER(g_object_ref(messenger)); - self->suffix = suffix != nullptr ? g_strdup_printf(".%s", suffix) : g_strdup(""); - return self; -} - -struct _CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse { - GObject parent_instance; - - FlValue* error; -}; - -G_DEFINE_TYPE(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse, camera_linux_camera_global_event_api_device_orientation_changed_response, G_TYPE_OBJECT) - -static void camera_linux_camera_global_event_api_device_orientation_changed_response_dispose(GObject* object) { - CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self = CAMERA_LINUX_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(object); - g_clear_pointer(&self->error, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_global_event_api_device_orientation_changed_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_global_event_api_device_orientation_changed_response_init(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self) { -} - -static void camera_linux_camera_global_event_api_device_orientation_changed_response_class_init(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_global_event_api_device_orientation_changed_response_dispose; -} - -static CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* camera_linux_camera_global_event_api_device_orientation_changed_response_new(FlValue* response) { - CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self = CAMERA_LINUX_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(g_object_new(camera_linux_camera_global_event_api_device_orientation_changed_response_get_type(), nullptr)); - if (fl_value_get_length(response) > 1) { - self->error = fl_value_ref(response); - } - return self; -} - -gboolean camera_linux_camera_global_event_api_device_orientation_changed_response_is_error(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self) { - g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(self), FALSE); - return self->error != nullptr; -} - -const gchar* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_code(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self) { - g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(self), nullptr); - g_assert(camera_linux_camera_global_event_api_device_orientation_changed_response_is_error(self)); - return fl_value_get_string(fl_value_get_list_value(self->error, 0)); -} - -const gchar* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_message(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self) { - g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(self), nullptr); - g_assert(camera_linux_camera_global_event_api_device_orientation_changed_response_is_error(self)); - return fl_value_get_string(fl_value_get_list_value(self->error, 1)); -} - -FlValue* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_details(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* self) { - g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE(self), nullptr); - g_assert(camera_linux_camera_global_event_api_device_orientation_changed_response_is_error(self)); - return fl_value_get_list_value(self->error, 2); -} - -static void camera_linux_camera_global_event_api_device_orientation_changed_cb(GObject* object, GAsyncResult* result, gpointer user_data) { - GTask* task = G_TASK(user_data); - g_task_return_pointer(task, result, g_object_unref); -} - -void camera_linux_camera_global_event_api_device_orientation_changed(CameraLinuxCameraGlobalEventApi* self, CameraLinuxPlatformDeviceOrientation orientation, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data) { - g_autoptr(FlValue) args = fl_value_new_list(); - fl_value_append_take(args, fl_value_new_custom(130, fl_value_new_int(orientation), (GDestroyNotify)fl_value_unref)); - g_autofree gchar* channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraGlobalEventApi.deviceOrientationChanged%s", self->suffix); - g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); - FlBasicMessageChannel* channel = fl_basic_message_channel_new(self->messenger, channel_name, FL_MESSAGE_CODEC(codec)); - GTask* task = g_task_new(self, cancellable, callback, user_data); - g_task_set_task_data(task, channel, g_object_unref); - fl_basic_message_channel_send(channel, args, cancellable, camera_linux_camera_global_event_api_device_orientation_changed_cb, task); -} - -CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* camera_linux_camera_global_event_api_device_orientation_changed_finish(CameraLinuxCameraGlobalEventApi* self, GAsyncResult* result, GError** error) { - g_autoptr(GTask) task = G_TASK(result); - GAsyncResult* r = G_ASYNC_RESULT(g_task_propagate_pointer(task, nullptr)); - FlBasicMessageChannel* channel = FL_BASIC_MESSAGE_CHANNEL(g_task_get_task_data(task)); - g_autoptr(FlValue) response = fl_basic_message_channel_send_finish(channel, r, error); - if (response == nullptr) { - return nullptr; - } - return camera_linux_camera_global_event_api_device_orientation_changed_response_new(response); -} - struct _CameraLinuxCameraEventApi { GObject parent_instance; @@ -3186,7 +2896,7 @@ static void camera_linux_camera_event_api_initialized_cb(GObject* object, GAsync void camera_linux_camera_event_api_initialized(CameraLinuxCameraEventApi* self, CameraLinuxPlatformCameraState* initial_state, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data) { g_autoptr(FlValue) args = fl_value_new_list(); - fl_value_append_take(args, fl_value_new_custom_object(139, G_OBJECT(initial_state))); + fl_value_append_take(args, fl_value_new_custom_object(136, G_OBJECT(initial_state))); g_autofree gchar* channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraEventApi.initialized%s", self->suffix); g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); FlBasicMessageChannel* channel = fl_basic_message_channel_new(self->messenger, channel_name, FL_MESSAGE_CODEC(codec)); diff --git a/packages/camera/camera_linux/linux/messages.g.h b/packages/camera/camera_linux/linux/messages.g.h index 0b30fca0b352..1bc7c5a9c0bd 100644 --- a/packages/camera/camera_linux/linux/messages.g.h +++ b/packages/camera/camera_linux/linux/messages.g.h @@ -11,22 +11,6 @@ G_BEGIN_DECLS -/** - * CameraLinuxPlatformCameraLensDirection: - * CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_FRONT: - * Front facing camera (a user looking at the screen is seen by the camera). - * CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_BACK: - * Back facing camera (a user looking at the screen is not seen by the camera). - * CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_EXTERNAL: - * External camera which may not be mounted to the device. - * - */ -typedef enum { - CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_FRONT = 0, - CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_BACK = 1, - CAMERA_LINUX_PLATFORM_CAMERA_LENS_DIRECTION_EXTERNAL = 2 -} CameraLinuxPlatformCameraLensDirection; - /** * CameraLinuxPlatformDeviceOrientation: * CAMERA_LINUX_PLATFORM_DEVICE_ORIENTATION_PORTRAIT_UP: @@ -102,25 +86,6 @@ typedef enum { CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_YUV420 = 1 } CameraLinuxPlatformImageFormatGroup; -/** - * CameraLinuxPlatformResolutionPreset: - * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_LOW: - * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MEDIUM: - * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_HIGH: - * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_VERY_HIGH: - * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_ULTRA_HIGH: - * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MAX: - * - */ -typedef enum { - CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_LOW = 0, - CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MEDIUM = 1, - CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_HIGH = 2, - CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_VERY_HIGH = 3, - CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_ULTRA_HIGH = 4, - CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MAX = 5 -} CameraLinuxPlatformResolutionPreset; - /** * CameraLinuxPlatformSize: * @@ -159,44 +124,6 @@ double camera_linux_platform_size_get_width(CameraLinuxPlatformSize* object); */ double camera_linux_platform_size_get_height(CameraLinuxPlatformSize* object); -/** - * CameraLinuxPlatformCameraDescription: - * - */ - -G_DECLARE_FINAL_TYPE(CameraLinuxPlatformCameraDescription, camera_linux_platform_camera_description, CAMERA_LINUX, PLATFORM_CAMERA_DESCRIPTION, GObject) - -/** - * camera_linux_platform_camera_description_new: - * name: field in this object. - * lens_direction: field in this object. - * - * Creates a new #PlatformCameraDescription object. - * - * Returns: a new #CameraLinuxPlatformCameraDescription - */ -CameraLinuxPlatformCameraDescription* camera_linux_platform_camera_description_new(const gchar* name, CameraLinuxPlatformCameraLensDirection lens_direction); - -/** - * camera_linux_platform_camera_description_get_name - * @object: a #CameraLinuxPlatformCameraDescription. - * - * The name of the camera device. - * - * Returns: the field value. - */ -const gchar* camera_linux_platform_camera_description_get_name(CameraLinuxPlatformCameraDescription* object); - -/** - * camera_linux_platform_camera_description_get_lens_direction - * @object: a #CameraLinuxPlatformCameraDescription. - * - * The direction the camera is facing. - * - * Returns: the field value. - */ -CameraLinuxPlatformCameraLensDirection camera_linux_platform_camera_description_get_lens_direction(CameraLinuxPlatformCameraDescription* object); - /** * CameraLinuxPlatformCameraState: * @@ -268,77 +195,6 @@ gboolean camera_linux_platform_camera_state_get_exposure_point_supported(CameraL */ gboolean camera_linux_platform_camera_state_get_focus_point_supported(CameraLinuxPlatformCameraState* object); -/** - * CameraLinuxPlatformMediaSettings: - * - */ - -G_DECLARE_FINAL_TYPE(CameraLinuxPlatformMediaSettings, camera_linux_platform_media_settings, CAMERA_LINUX, PLATFORM_MEDIA_SETTINGS, GObject) - -/** - * camera_linux_platform_media_settings_new: - * resolution_preset: field in this object. - * frames_per_second: field in this object. - * video_bitrate: field in this object. - * audio_bitrate: field in this object. - * enable_audio: field in this object. - * - * Creates a new #PlatformMediaSettings object. - * - * Returns: a new #CameraLinuxPlatformMediaSettings - */ -CameraLinuxPlatformMediaSettings* camera_linux_platform_media_settings_new(CameraLinuxPlatformResolutionPreset resolution_preset, int64_t* frames_per_second, int64_t* video_bitrate, int64_t* audio_bitrate, gboolean enable_audio); - -/** - * camera_linux_platform_media_settings_get_resolution_preset - * @object: a #CameraLinuxPlatformMediaSettings. - * - * Gets the value of the resolutionPreset field of @object. - * - * Returns: the field value. - */ -CameraLinuxPlatformResolutionPreset camera_linux_platform_media_settings_get_resolution_preset(CameraLinuxPlatformMediaSettings* object); - -/** - * camera_linux_platform_media_settings_get_frames_per_second - * @object: a #CameraLinuxPlatformMediaSettings. - * - * Gets the value of the framesPerSecond field of @object. - * - * Returns: the field value. - */ -int64_t* camera_linux_platform_media_settings_get_frames_per_second(CameraLinuxPlatformMediaSettings* object); - -/** - * camera_linux_platform_media_settings_get_video_bitrate - * @object: a #CameraLinuxPlatformMediaSettings. - * - * Gets the value of the videoBitrate field of @object. - * - * Returns: the field value. - */ -int64_t* camera_linux_platform_media_settings_get_video_bitrate(CameraLinuxPlatformMediaSettings* object); - -/** - * camera_linux_platform_media_settings_get_audio_bitrate - * @object: a #CameraLinuxPlatformMediaSettings. - * - * Gets the value of the audioBitrate field of @object. - * - * Returns: the field value. - */ -int64_t* camera_linux_platform_media_settings_get_audio_bitrate(CameraLinuxPlatformMediaSettings* object); - -/** - * camera_linux_platform_media_settings_get_enable_audio - * @object: a #CameraLinuxPlatformMediaSettings. - * - * Gets the value of the enableAudio field of @object. - * - * Returns: the field value. - */ -gboolean camera_linux_platform_media_settings_get_enable_audio(CameraLinuxPlatformMediaSettings* object); - /** * CameraLinuxPlatformPoint: * @@ -389,11 +245,12 @@ G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiResponseHandle, camera_linux_camera_api * Table of functions exposed by CameraApi to be implemented by the API provider. */ typedef struct { - void (*get_available_cameras)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*create)(const gchar* camera_name, CameraLinuxPlatformMediaSettings* settings, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*get_available_cameras_names)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*create)(const gchar* camera_name, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*initialize)(int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*start_image_stream)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*stop_image_stream)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*get_texture_id)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*received_image_stream_data)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*dispose)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*lock_capture_orientation)(CameraLinuxPlatformDeviceOrientation orientation, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); @@ -446,24 +303,24 @@ void camera_linux_camera_api_set_method_handlers(FlBinaryMessenger* messenger, c void camera_linux_camera_api_clear_method_handlers(FlBinaryMessenger* messenger, const gchar* suffix); /** - * camera_linux_camera_api_respond_get_available_cameras: + * camera_linux_camera_api_respond_get_available_cameras_names: * @response_handle: a #CameraLinuxCameraApiResponseHandle. * @return_value: location to write the value returned by this method. * - * Responds to CameraApi.getAvailableCameras. + * Responds to CameraApi.getAvailableCamerasNames. */ -void camera_linux_camera_api_respond_get_available_cameras(CameraLinuxCameraApiResponseHandle* response_handle, FlValue* return_value); +void camera_linux_camera_api_respond_get_available_cameras_names(CameraLinuxCameraApiResponseHandle* response_handle, FlValue* return_value); /** - * camera_linux_camera_api_respond_error_get_available_cameras: + * camera_linux_camera_api_respond_error_get_available_cameras_names: * @response_handle: a #CameraLinuxCameraApiResponseHandle. * @code: error code. * @message: error message. * @details: (allow-none): error details or %NULL. * - * Responds with an error to CameraApi.getAvailableCameras. + * Responds with an error to CameraApi.getAvailableCamerasNames. */ -void camera_linux_camera_api_respond_error_get_available_cameras(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); +void camera_linux_camera_api_respond_error_get_available_cameras_names(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); /** * camera_linux_camera_api_respond_create: @@ -542,6 +399,26 @@ void camera_linux_camera_api_respond_stop_image_stream(CameraLinuxCameraApiRespo */ void camera_linux_camera_api_respond_error_stop_image_stream(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); +/** + * camera_linux_camera_api_respond_get_texture_id: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @return_value: location to write the value returned by this method. + * + * Responds to CameraApi.getTextureId. + */ +void camera_linux_camera_api_respond_get_texture_id(CameraLinuxCameraApiResponseHandle* response_handle, int64_t* return_value); + +/** + * camera_linux_camera_api_respond_error_get_texture_id: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.getTextureId. + */ +void camera_linux_camera_api_respond_error_get_texture_id(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + /** * camera_linux_camera_api_respond_received_image_stream_data: * @response_handle: a #CameraLinuxCameraApiResponseHandle. @@ -1042,91 +919,6 @@ void camera_linux_camera_api_respond_set_image_file_format(CameraLinuxCameraApiR */ void camera_linux_camera_api_respond_error_set_image_file_format(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); -G_DECLARE_FINAL_TYPE(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse, camera_linux_camera_global_event_api_device_orientation_changed_response, CAMERA_LINUX, CAMERA_GLOBAL_EVENT_API_DEVICE_ORIENTATION_CHANGED_RESPONSE, GObject) - -/** - * camera_linux_camera_global_event_api_device_orientation_changed_response_is_error: - * @response: a #CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse. - * - * Checks if a response to CameraGlobalEventApi.deviceOrientationChanged is an error. - * - * Returns: a %TRUE if this response is an error. - */ -gboolean camera_linux_camera_global_event_api_device_orientation_changed_response_is_error(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* response); - -/** - * camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_code: - * @response: a #CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse. - * - * Get the error code for this response. - * - * Returns: an error code or %NULL if not an error. - */ -const gchar* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_code(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* response); - -/** - * camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_message: - * @response: a #CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse. - * - * Get the error message for this response. - * - * Returns: an error message. - */ -const gchar* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_message(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* response); - -/** - * camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_details: - * @response: a #CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse. - * - * Get the error details for this response. - * - * Returns: (allow-none): an error details or %NULL. - */ -FlValue* camera_linux_camera_global_event_api_device_orientation_changed_response_get_error_details(CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* response); - -/** - * CameraLinuxCameraGlobalEventApi: - * - * Handler for native callbacks that are not tied to a specific camera ID. - */ - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraGlobalEventApi, camera_linux_camera_global_event_api, CAMERA_LINUX, CAMERA_GLOBAL_EVENT_API, GObject) - -/** - * camera_linux_camera_global_event_api_new: - * @messenger: an #FlBinaryMessenger. - * @suffix: (allow-none): a suffix to add to the API or %NULL for none. - * - * Creates a new object to access the CameraGlobalEventApi API. - * - * Returns: a new #CameraLinuxCameraGlobalEventApi - */ -CameraLinuxCameraGlobalEventApi* camera_linux_camera_global_event_api_new(FlBinaryMessenger* messenger, const gchar* suffix); - -/** - * camera_linux_camera_global_event_api_device_orientation_changed: - * @api: a #CameraLinuxCameraGlobalEventApi. - * @orientation: parameter for this method. - * @cancellable: (allow-none): a #GCancellable or %NULL. - * @callback: (scope async): (allow-none): a #GAsyncReadyCallback to call when the call is complete or %NULL to ignore the response. - * @user_data: (closure): user data to pass to @callback. - * - * Called when the device's physical orientation changes. - */ -void camera_linux_camera_global_event_api_device_orientation_changed(CameraLinuxCameraGlobalEventApi* api, CameraLinuxPlatformDeviceOrientation orientation, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data); - -/** - * camera_linux_camera_global_event_api_device_orientation_changed_finish: - * @api: a #CameraLinuxCameraGlobalEventApi. - * @result: a #GAsyncResult. - * @error: (allow-none): #GError location to store the error occurring, or %NULL to ignore. - * - * Completes a camera_linux_camera_global_event_api_device_orientation_changed() call. - * - * Returns: a #CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse or %NULL on error. - */ -CameraLinuxCameraGlobalEventApiDeviceOrientationChangedResponse* camera_linux_camera_global_event_api_device_orientation_changed_finish(CameraLinuxCameraGlobalEventApi* api, GAsyncResult* result, GError** error); - G_DECLARE_FINAL_TYPE(CameraLinuxCameraEventApiInitializedResponse, camera_linux_camera_event_api_initialized_response, CAMERA_LINUX, CAMERA_EVENT_API_INITIALIZED_RESPONSE, GObject) /** diff --git a/packages/camera/camera_linux/pigeons/messages.dart b/packages/camera/camera_linux/pigeons/messages.dart index 005962b7b156..d84d05b4ccb5 100644 --- a/packages/camera/camera_linux/pigeons/messages.dart +++ b/packages/camera/camera_linux/pigeons/messages.dart @@ -12,18 +12,6 @@ import 'package:pigeon/pigeon.dart'; copyrightHeader: 'pigeons/copyright.txt', )) -// Pigeon version of CameraLensDirection. -enum PlatformCameraLensDirection { - /// Front facing camera (a user looking at the screen is seen by the camera). - front, - - /// Back facing camera (a user looking at the screen is not seen by the camera). - back, - - /// External camera which may not be mounted to the device. - external, -} - // Pigeon equivalent of CGSize. class PlatformSize { PlatformSize({required this.width, required this.height}); @@ -72,30 +60,6 @@ enum PlatformImageFormatGroup { yuv420, } -// Pigeon version of ResolutionPreset. -enum PlatformResolutionPreset { - low, - medium, - high, - veryHigh, - ultraHigh, - max, -} - -// Pigeon version of CameraDescription. -class PlatformCameraDescription { - PlatformCameraDescription({ - required this.name, - required this.lensDirection, - }); - - /// The name of the camera device. - final String name; - - /// The direction the camera is facing. - final PlatformCameraLensDirection lensDirection; -} - // Pigeon version of the data needed for a CameraInitializedEvent. class PlatformCameraState { PlatformCameraState({ @@ -122,23 +86,6 @@ class PlatformCameraState { final bool focusPointSupported; } -// Pigeon version of to MediaSettings. -class PlatformMediaSettings { - PlatformMediaSettings({ - required this.resolutionPreset, - required this.framesPerSecond, - required this.videoBitrate, - required this.audioBitrate, - required this.enableAudio, - }); - - final PlatformResolutionPreset resolutionPreset; - final int? framesPerSecond; - final int? videoBitrate; - final int? audioBitrate; - final bool enableAudio; -} - // Pigeon equivalent of CGPoint. class PlatformPoint { PlatformPoint({required this.x, required this.y}); @@ -150,15 +97,12 @@ class PlatformPoint { @HostApi() abstract class CameraApi { /// Returns the list of available cameras. - // TODO(stuartmorgan): Make the generic type non-nullable once supported. - // https://github.com/flutter/flutter/issues/97848 - // The consuming code treats it as non-nullable. @async - List getAvailableCameras(); + List getAvailableCamerasNames(); /// Create a new camera with the given settings, and returns its ID. @async - int create(String cameraName, PlatformMediaSettings settings); + int create(String cameraName); /// Initializes the camera with the given ID. @async @@ -172,6 +116,10 @@ abstract class CameraApi { @async void stopImageStream(); + /// Get the texture ID for the camera with the given ID. + @async + int? getTextureId(int cameraId); + /// Called by the Dart side of the plugin when it has received the last image /// frame sent. /// @@ -293,13 +241,6 @@ abstract class CameraApi { void setImageFileFormat(PlatformImageFileFormat format); } -/// Handler for native callbacks that are not tied to a specific camera ID. -@FlutterApi() -abstract class CameraGlobalEventApi { - /// Called when the device's physical orientation changes. - void deviceOrientationChanged(PlatformDeviceOrientation orientation); -} - /// Handler for native callbacks that are tied to a specific camera ID. /// /// This is intended to be initialized with the camera ID as a suffix. diff --git a/packages/camera/camera_linux/pubspec.yaml b/packages/camera/camera_linux/pubspec.yaml index 896f0a19fb89..043bc9e2d79e 100644 --- a/packages/camera/camera_linux/pubspec.yaml +++ b/packages/camera/camera_linux/pubspec.yaml @@ -11,6 +11,7 @@ dependencies: sdk: flutter plugin_platform_interface: ^2.0.2 camera_platform_interface: ^2.7.0 + stream_transform: ^2.1.1 dev_dependencies: flutter_test: @@ -39,6 +40,9 @@ flutter: pluginClass: CameraPlugin dartPluginClass: CameraLinux + assets: + - deps/pylon-8.0.2.16314_linux-aarch64_setup.tar.gz + # To add assets to your plugin package, add an assets section, like this: # assets: # - images/a_dot_burr.jpeg From 2ce2112ac7d38c7a72f7287c599720513a7f1af7 Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Tue, 3 Jun 2025 20:46:03 -0400 Subject: [PATCH 06/21] Update CMakeLists.txt to improve Pylon SDK handling and add missing archive path --- packages/camera/camera_linux/.gitignore | 2 ++ .../camera/camera_linux/linux/CMakeLists.txt | 23 +++++++++++++++---- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/packages/camera/camera_linux/.gitignore b/packages/camera/camera_linux/.gitignore index ac5aa9893e48..33b7f6366ad1 100644 --- a/packages/camera/camera_linux/.gitignore +++ b/packages/camera/camera_linux/.gitignore @@ -1,3 +1,5 @@ +deps/ + # Miscellaneous *.class *.log diff --git a/packages/camera/camera_linux/linux/CMakeLists.txt b/packages/camera/camera_linux/linux/CMakeLists.txt index 265647b93354..5c18323991da 100644 --- a/packages/camera/camera_linux/linux/CMakeLists.txt +++ b/packages/camera/camera_linux/linux/CMakeLists.txt @@ -5,17 +5,32 @@ cmake_minimum_required(VERSION 3.10) set(CMAKE_CXX_STANDARD 17) -# Path to SDK tar.gz -set(PYLON_ARCHIVE ${CMAKE_CURRENT_SOURCE_DIR}/../deps/pylon-8.0.2.16314_linux-aarch64.tar.gz) +# Set variables +set(PYLON_VERSION "8.0.2.16314") +set(PYLON_ARCHIVE_NAME "pylon-${PYLON_VERSION}_linux-aarch64.tar.gz") +set(PYLON_ARCHIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/../deps/${PYLON_ARCHIVE_NAME}) +set(PYLON_DOWNLOAD_URL "https://github.com/LightX-Innovations/flutter_packages/releases/download/camera_linux_v0.1/pylon-${PYLON_VERSION}_linux-aarch64.tar.gz") set(PYLON_ROOT ${CMAKE_BINARY_DIR}/pylon-sdk) +# Download the archive if it does not exist +if(NOT EXISTS "${PYLON_ARCHIVE_PATH}") + message(STATUS "Downloading Pylon SDK archive...") + file(DOWNLOAD + "${PYLON_DOWNLOAD_URL}" + "${PYLON_ARCHIVE_PATH}" + SHOW_PROGRESS + EXPECTED_MD5 "" # Optionally add checksum here + STATUS DOWNLOAD_STATUS + ) + list(GET DOWNLOAD_STATUS 0 DOWNLOAD_RESULT) +endif() + # Extract the archive only if not already extracted if(NOT EXISTS "${PYLON_ROOT}") message(STATUS "Extracting Pylon SDK to ${PYLON_ROOT}...") - message(STATUS "Command: ${CMAKE_COMMAND} -E tar -xzf ${PYLON_ARCHIVE}") file(MAKE_DIRECTORY "${PYLON_ROOT}") execute_process( - COMMAND ${CMAKE_COMMAND} -E tar -xzf "${PYLON_ARCHIVE}" + COMMAND ${CMAKE_COMMAND} -E tar -xzf "${PYLON_ARCHIVE_PATH}" WORKING_DIRECTORY "${PYLON_ROOT}" ) endif() From b034b7ac3bfa115d43f8d0a41fa99bc0577b3a3e Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Tue, 3 Jun 2025 20:51:53 -0400 Subject: [PATCH 07/21] Update pubspec.yaml: Set publish_to to none and specify git dependencies for camera_avfoundation and camera_linux --- packages/camera/camera/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 6401344cec70..885011a5c256 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -30,7 +30,7 @@ dependencies: git: url: git@github.com:LightX-Innovations/flutter_packages.git path: packages/camera/camera_linux - ref: main + ref: camera_0.1 camera_web: ^0.3.3 flutter: sdk: flutter From 6ca0593760a6a0dac1eca789ac9195790b473c88 Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Tue, 3 Jun 2025 20:55:11 -0400 Subject: [PATCH 08/21] Update pubspec.yaml and CMakeLists.txt: Bump camera dependencies to version 0.2 and remove optional MD5 checksum --- packages/camera/camera/pubspec.yaml | 2 +- packages/camera/camera_linux/linux/CMakeLists.txt | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 885011a5c256..65c39f1d659c 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -30,7 +30,7 @@ dependencies: git: url: git@github.com:LightX-Innovations/flutter_packages.git path: packages/camera/camera_linux - ref: camera_0.1 + ref: camera_0.2 camera_web: ^0.3.3 flutter: sdk: flutter diff --git a/packages/camera/camera_linux/linux/CMakeLists.txt b/packages/camera/camera_linux/linux/CMakeLists.txt index 5c18323991da..8175a4e11adc 100644 --- a/packages/camera/camera_linux/linux/CMakeLists.txt +++ b/packages/camera/camera_linux/linux/CMakeLists.txt @@ -19,7 +19,6 @@ if(NOT EXISTS "${PYLON_ARCHIVE_PATH}") "${PYLON_DOWNLOAD_URL}" "${PYLON_ARCHIVE_PATH}" SHOW_PROGRESS - EXPECTED_MD5 "" # Optionally add checksum here STATUS DOWNLOAD_STATUS ) list(GET DOWNLOAD_STATUS 0 DOWNLOAD_RESULT) From cb0d97ebc8ebccd80522b0e6ecaac6f5dc12abd5 Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Tue, 3 Jun 2025 21:12:58 -0400 Subject: [PATCH 09/21] Update pubspec.yaml and linux_camera.dart: Change camera_linux dependency ref to camera_0.3 and format code for readability --- packages/camera/camera/pubspec.yaml | 2 +- .../camera_linux/lib/src/linux_camera.dart | 29 ++++++++++++------- 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 65c39f1d659c..7bedf1cb8e5d 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -30,7 +30,7 @@ dependencies: git: url: git@github.com:LightX-Innovations/flutter_packages.git path: packages/camera/camera_linux - ref: camera_0.2 + ref: camera_0.3 camera_web: ^0.3.3 flutter: sdk: flutter diff --git a/packages/camera/camera_linux/lib/src/linux_camera.dart b/packages/camera/camera_linux/lib/src/linux_camera.dart index 8cdd7fcde5b9..3a99ec3eed2b 100644 --- a/packages/camera/camera_linux/lib/src/linux_camera.dart +++ b/packages/camera/camera_linux/lib/src/linux_camera.dart @@ -10,7 +10,8 @@ import 'package:stream_transform/stream_transform.dart'; class CameraLinux extends CameraPlatform { final CameraApi _hostApi; - CameraLinux({@visibleForTesting CameraApi? api}) : _hostApi = api ?? CameraApi(); + CameraLinux({@visibleForTesting CameraApi? api}) + : _hostApi = api ?? CameraApi(); static void registerWith() { CameraPlatform.instance = CameraLinux(); @@ -24,15 +25,18 @@ class CameraLinux extends CameraPlatform { /// This is only exposed for test purposes. It shouldn't be used by clients of /// the plugin as it may break or change at any time. @visibleForTesting - final StreamController cameraEventStreamController = StreamController.broadcast(); + final StreamController cameraEventStreamController = + StreamController.broadcast(); /// The per-camera handlers for messages that should be rebroadcast to /// clients as [CameraEvent]s. @visibleForTesting - final Map hostCameraHandlers = {}; + final Map hostCameraHandlers = + {}; Stream _cameraEvents(int cameraId) => - cameraEventStreamController.stream.where((CameraEvent event) => event.cameraId == cameraId); + cameraEventStreamController.stream + .where((CameraEvent event) => event.cameraId == cameraId); @override Future> availableCameras() async { @@ -44,7 +48,6 @@ class CameraLinux extends CameraPlatform { name: name, lensDirection: CameraLensDirection.back, sensorOrientation: 0, - lensType: CameraLensType.unknown, ); }, ).toList(); @@ -74,7 +77,8 @@ class CameraLinux extends CameraPlatform { int cameraId, { ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown, }) async { - hostCameraHandlers.putIfAbsent(cameraId, () => HostCameraMessageHandler(cameraId, cameraEventStreamController)); + hostCameraHandlers.putIfAbsent(cameraId, + () => HostCameraMessageHandler(cameraId, cameraEventStreamController)); final Completer completer = Completer(); unawaited( @@ -128,7 +132,8 @@ class CameraLinux extends CameraPlatform { /// The following methods are not implemented for Linux, as they are not /// supported by the underlying camera API. @override - Future lockCaptureOrientation(int cameraId, DeviceOrientation orientation) { + Future lockCaptureOrientation( + int cameraId, DeviceOrientation orientation) { return Future.value(); } @@ -152,7 +157,9 @@ class CameraLinux extends CameraPlatform { @override Future startVideoRecording( int cameraId, { - @Deprecated('This parameter is unused, and will be ignored on all platforms') Duration? maxVideoDuration, + @Deprecated( + 'This parameter is unused, and will be ignored on all platforms') + Duration? maxVideoDuration, }) { throw UnimplementedError('startVideoRecording() is not implemented.'); } @@ -227,14 +234,16 @@ class CameraLinux extends CameraPlatform { Future resumePreview(int cameraId) async {} @override - Future setDescriptionWhileRecording(CameraDescription description) async {} + Future setDescriptionWhileRecording( + CameraDescription description) async {} @override Widget buildPreview(int cameraId) { return FutureBuilder( future: _hostApi.getTextureId(cameraId), builder: (context, snapshot) { - if (snapshot.connectionState == ConnectionState.done && snapshot.data != null) { + if (snapshot.connectionState == ConnectionState.done && + snapshot.data != null) { print('Texture ID from dart: ${snapshot.data}'); return RepaintBoundary( child: Texture( From 99cfd5ff2437f2a8c1e8ea2abe3654c479480fca Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Wed, 4 Jun 2025 13:49:12 -0400 Subject: [PATCH 10/21] clean upo --- .../camera_linux/lib/src/linux_camera.dart | 28 ++++- .../camera_linux/lib/src/messages.g.dart | 29 +++-- .../camera/camera_linux/linux/CMakeLists.txt | 1 + packages/camera/camera_linux/linux/camera.cpp | 117 ++++++++++++++++++ packages/camera/camera_linux/linux/camera.h | 53 ++++++++ .../camera_linux/linux/camera_host_plugin.cpp | 114 ++++------------- .../camera_linux/linux/camera_host_plugin.h | 11 +- .../camera_texture_image_event_handler.cpp | 45 ++----- .../camera_texture_image_event_handler.h | 5 +- .../camera/camera_linux/linux/messages.g.cc | 42 +++++-- .../camera/camera_linux/linux/messages.g.h | 21 +++- .../camera/camera_linux/pigeons/messages.dart | 11 +- 12 files changed, 324 insertions(+), 153 deletions(-) create mode 100644 packages/camera/camera_linux/linux/camera.cpp create mode 100644 packages/camera/camera_linux/linux/camera.h diff --git a/packages/camera/camera_linux/lib/src/linux_camera.dart b/packages/camera/camera_linux/lib/src/linux_camera.dart index 3a99ec3eed2b..33c782387b49 100644 --- a/packages/camera/camera_linux/lib/src/linux_camera.dart +++ b/packages/camera/camera_linux/lib/src/linux_camera.dart @@ -65,7 +65,32 @@ class CameraLinux extends CameraPlatform { bool enableAudio = false, }) async { try { - final cameraId = await _hostApi.create(cameraDescription.name); + PlatformResolutionPreset pigeonResolutionPreset = + PlatformResolutionPreset.veryHigh; + + if (resolutionPreset != null) { + switch (resolutionPreset) { + case ResolutionPreset.low: + pigeonResolutionPreset = PlatformResolutionPreset.low; + break; + case ResolutionPreset.medium: + pigeonResolutionPreset = PlatformResolutionPreset.medium; + break; + case ResolutionPreset.high: + pigeonResolutionPreset = PlatformResolutionPreset.high; + break; + case ResolutionPreset.veryHigh: + pigeonResolutionPreset = PlatformResolutionPreset.veryHigh; + break; + case ResolutionPreset.ultraHigh: + pigeonResolutionPreset = PlatformResolutionPreset.ultraHigh; + break; + case ResolutionPreset.max: + pigeonResolutionPreset = PlatformResolutionPreset.max; + } + } + final cameraId = + await _hostApi.create(cameraDescription.name, pigeonResolutionPreset); return cameraId; } on PlatformException catch (e) { throw CameraException(e.code, e.message); @@ -244,7 +269,6 @@ class CameraLinux extends CameraPlatform { builder: (context, snapshot) { if (snapshot.connectionState == ConnectionState.done && snapshot.data != null) { - print('Texture ID from dart: ${snapshot.data}'); return RepaintBoundary( child: Texture( textureId: snapshot.data!, diff --git a/packages/camera/camera_linux/lib/src/messages.g.dart b/packages/camera/camera_linux/lib/src/messages.g.dart index 6cf92e4af05a..f884bb068e41 100644 --- a/packages/camera/camera_linux/lib/src/messages.g.dart +++ b/packages/camera/camera_linux/lib/src/messages.g.dart @@ -63,6 +63,15 @@ enum PlatformImageFormatGroup { yuv420, } +enum PlatformResolutionPreset { + low, + medium, + high, + veryHigh, + ultraHigh, + max, +} + class PlatformSize { PlatformSize({ required this.width, @@ -187,14 +196,17 @@ class _PigeonCodec extends StandardMessageCodec { } else if (value is PlatformImageFormatGroup) { buffer.putUint8(134); writeValue(buffer, value.index); - } else if (value is PlatformSize) { + } else if (value is PlatformResolutionPreset) { buffer.putUint8(135); + writeValue(buffer, value.index); + } else if (value is PlatformSize) { + buffer.putUint8(136); writeValue(buffer, value.encode()); } else if (value is PlatformCameraState) { - buffer.putUint8(136); + buffer.putUint8(137); writeValue(buffer, value.encode()); } else if (value is PlatformPoint) { - buffer.putUint8(137); + buffer.putUint8(138); writeValue(buffer, value.encode()); } else { super.writeValue(buffer, value); @@ -223,10 +235,13 @@ class _PigeonCodec extends StandardMessageCodec { final int? value = readValue(buffer) as int?; return value == null ? null : PlatformImageFormatGroup.values[value]; case 135: - return PlatformSize.decode(readValue(buffer)!); + final int? value = readValue(buffer) as int?; + return value == null ? null : PlatformResolutionPreset.values[value]; case 136: - return PlatformCameraState.decode(readValue(buffer)!); + return PlatformSize.decode(readValue(buffer)!); case 137: + return PlatformCameraState.decode(readValue(buffer)!); + case 138: return PlatformPoint.decode(readValue(buffer)!); default: return super.readValueOfType(type, buffer); @@ -276,7 +291,7 @@ class CameraApi { } /// Create a new camera with the given settings, and returns its ID. - Future create(String cameraName) async { + Future create(String cameraName, PlatformResolutionPreset resolutionPreset) async { final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.create$pigeonVar_messageChannelSuffix'; final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( pigeonVar_channelName, @@ -284,7 +299,7 @@ class CameraApi { binaryMessenger: pigeonVar_binaryMessenger, ); final List? pigeonVar_replyList = - await pigeonVar_channel.send([cameraName]) as List?; + await pigeonVar_channel.send([cameraName, resolutionPreset]) as List?; if (pigeonVar_replyList == null) { throw _createConnectionError(pigeonVar_channelName); } else if (pigeonVar_replyList.length > 1) { diff --git a/packages/camera/camera_linux/linux/CMakeLists.txt b/packages/camera/camera_linux/linux/CMakeLists.txt index 8175a4e11adc..3e4ea240da32 100644 --- a/packages/camera/camera_linux/linux/CMakeLists.txt +++ b/packages/camera/camera_linux/linux/CMakeLists.txt @@ -50,6 +50,7 @@ list(APPEND PLUGIN_SOURCES "camera_plugin.cpp" "camera_host_plugin.cpp" "camera_texture_image_event_handler.cpp" + "camera.cpp" "messages.g.cc" ) diff --git a/packages/camera/camera_linux/linux/camera.cpp b/packages/camera/camera_linux/linux/camera.cpp new file mode 100644 index 000000000000..810480e135db --- /dev/null +++ b/packages/camera/camera_linux/linux/camera.cpp @@ -0,0 +1,117 @@ +#include "camera.h" + +#include "camera_texture_image_event_handler.h" + +Camera::Camera(Pylon::IPylonDevice* device, int64_t camera_id, + FlPluginRegistrar* registrar, + CameraLinuxPlatformResolutionPreset resolution_preset) + : camera_id(camera_id), + cameraLinuxCameraEventApi(camera_linux_camera_event_api_new( + fl_plugin_registrar_get_messenger(registrar), + std::to_string(camera_id).c_str())), + exposure_mode(CameraLinuxPlatformExposureMode:: + CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_AUTO), + focus_mode(CameraLinuxPlatformFocusMode:: + CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED), + width(1920), + height(1080), + resolution_preset(resolution_preset), + registrar(registrar) { + camera = std::make_unique(device); + setResolutionPreset(resolution_preset); + if (registrar) g_object_ref(registrar); +} + +Camera::~Camera() { + if (camera) camera->Close(); + if (cameraLinuxCameraEventApi) g_object_unref(cameraLinuxCameraEventApi); + if (registrar) g_object_unref(registrar); +} + +void Camera::initialize(CameraLinuxPlatformImageFormatGroup image_format) { + cameraTextureImageEventHandler = + std::make_unique(*this, registrar); + camera->Open(); + GenApi::INodeMap& nodemap = camera->GetNodeMap(); + Pylon::CEnumParameter(nodemap, "DeviceLinkThroughputLimitMode") + .TrySetValue("Off"); + Pylon::CBooleanParameter(nodemap, "AcquisitionFrameRateEnable") + .TrySetValue(true); + Pylon::CFloatParameter(nodemap, "AcquisitionFrameRate").TrySetValue(60.0); + Pylon::CFloatParameter(nodemap, "ResultingFrameRate").TrySetValue(60.0); + Pylon::CEnumParameter(nodemap, "PixelFormat").TrySetValue("RGB8"); + Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("Off"); + Pylon::CIntegerParameter(nodemap, "Width").TrySetValue(width); + Pylon::CIntegerParameter(nodemap, "Height").TrySetValue(height); + Pylon::CIntegerParameter(nodemap, "OffsetX").TrySetValue(0); + Pylon::CIntegerParameter(nodemap, "OffsetY").TrySetValue(0); + + camera->RegisterImageEventHandler(cameraTextureImageEventHandler.get(), + Pylon::RegistrationMode_Append, + Pylon::Cleanup_Delete); + camera->StartGrabbing(Pylon::GrabStrategy_LatestImages, + Pylon::EGrabLoop::GrabLoop_ProvidedByInstantCamera); + + emitState(); +} + +int64_t Camera::getTextureId() { + if (!cameraTextureImageEventHandler) return -1; + return cameraTextureImageEventHandler->get_texture_id(); +} + +void camera_linux_camera_event_api_initialized_callback(GObject* object, + GAsyncResult* result, + gpointer user_data) {} + +void Camera::emitState() { + if (!cameraLinuxCameraEventApi) return; + CameraLinuxPlatformSize* size = camera_linux_platform_size_new(width, height); + CameraLinuxPlatformCameraState* cameraState = + camera_linux_platform_camera_state_new(size, exposure_mode, focus_mode, + false, false); + camera_linux_camera_event_api_initialized( + cameraLinuxCameraEventApi, cameraState, nullptr, + camera_linux_camera_event_api_initialized_callback, nullptr); + g_object_unref(cameraState); + g_object_unref(size); +} + +Camera& Camera::setResolutionPreset( + CameraLinuxPlatformResolutionPreset preset) { + switch (preset) { + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_LOW: + width = 352; + height = 288; + break; + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MEDIUM: + width = 640; + height = 480; + break; + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_HIGH: + width = 1280; + height = 720; + break; + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_VERY_HIGH: + width = 1920; + height = 1080; + break; + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_ULTRA_HIGH: + case CameraLinuxPlatformResolutionPreset:: + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MAX: + width = 3840; + height = 2160; + break; + default: + width = 1920; + height = 1080; + break; + } + resolution_preset = preset; + return *this; +} diff --git a/packages/camera/camera_linux/linux/camera.h b/packages/camera/camera_linux/linux/camera.h new file mode 100644 index 000000000000..35aac476a85f --- /dev/null +++ b/packages/camera/camera_linux/linux/camera.h @@ -0,0 +1,53 @@ + +#ifndef CAMERA_H_ +#define CAMERA_H_ + +#include "flutter_linux/flutter_linux.h" +#include "messages.g.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Woverloaded-virtual" +#pragma clang diagnostic ignored "-Wunused-variable" + +#include + +#pragma clang diagnostic pop + +class Camera { + // Camera + public: + int64_t camera_id; + std::unique_ptr camera; + std::unique_ptr + cameraTextureImageEventHandler; + CameraLinuxCameraEventApi* cameraLinuxCameraEventApi; + + Camera(Pylon::IPylonDevice* device, int64_t camera_id, + FlPluginRegistrar* registrar, + CameraLinuxPlatformResolutionPreset resolution_preset); + + Camera(Camera&&) = default; + Camera& operator=(Camera&&) = default; + + ~Camera(); + + void initialize(CameraLinuxPlatformImageFormatGroup image_format); + int64_t getTextureId(); + + // State + public: + CameraLinuxPlatformExposureMode exposure_mode; + CameraLinuxPlatformFocusMode focus_mode; + int width; + int height; + + void emitState(); + + Camera& setResolutionPreset(CameraLinuxPlatformResolutionPreset preset); + + private: + CameraLinuxPlatformResolutionPreset resolution_preset; + FlPluginRegistrar* registrar; +}; + +#endif // CAMERA_H_ diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.cpp b/packages/camera/camera_linux/linux/camera_host_plugin.cpp index 6779a3c9d75b..12f670c2f4de 100644 --- a/packages/camera/camera_linux/linux/camera_host_plugin.cpp +++ b/packages/camera/camera_linux/linux/camera_host_plugin.cpp @@ -1,11 +1,6 @@ #include "camera_host_plugin.h" -std::map - CameraHostPlugin::cameraTextureImageEventHandlers = {}; -std::map> - CameraHostPlugin::cameras = {}; -std::map - CameraHostPlugin::cameraLinuxCameraEventApis = {}; +std::vector CameraHostPlugin::cameras = {}; FlPluginRegistrar* CameraHostPlugin::registrar = nullptr; CameraHostPlugin::CameraHostPlugin(FlPluginRegistrar* registrar) @@ -53,20 +48,20 @@ CameraHostPlugin::CameraHostPlugin(FlPluginRegistrar* registrar) } CameraHostPlugin::~CameraHostPlugin() { - for (auto&& it = cameraLinuxCameraEventApis.begin(); - it != cameraLinuxCameraEventApis.end(); ++it) { - g_object_unref(it->second); - } - cameraLinuxCameraEventApis.clear(); - for (auto&& it = cameras.begin(); it != cameras.end(); ++it) { - it->second->Close(); - } cameras.clear(); - cameraTextureImageEventHandlers.clear(); g_object_unref(m_registrar); Pylon::PylonTerminate(); } +inline Camera& CameraHostPlugin::get_camera_by_id(int64_t camera_id) { + for (size_t i = 0; i < cameras.size(); ++i) { + if (cameras[i].camera_id == camera_id) { + return cameras[i]; + } + } + throw std::runtime_error("Camera not found"); +} + void CameraHostPlugin::get_available_cameras_names( CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { CAMERA_HOST_ERROR_HANDLING(get_available_cameras_names, { @@ -87,6 +82,7 @@ void CameraHostPlugin::get_available_cameras_names( void CameraHostPlugin::create( const gchar* camera_name, + CameraLinuxPlatformResolutionPreset resolution_preset, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { CAMERA_HOST_ERROR_HANDLING(create, { Pylon::CTlFactory& TlFactory = Pylon::CTlFactory::GetInstance(); @@ -97,21 +93,15 @@ void CameraHostPlugin::create( if (it->GetFriendlyName() == camera_name) { std::string serialNumber = it->GetSerialNumber().c_str(); int64_t camera_id = std::stoll(serialNumber); - if (cameras.find(camera_id) != cameras.end()) { - cameras[camera_id]->Close(); - cameras.erase(camera_id); - } - - cameras[camera_id] = std::make_unique( - TlFactory.CreateDevice(*it)); - - if (cameraLinuxCameraEventApis.find(camera_id) == - cameraLinuxCameraEventApis.end()) { - cameraLinuxCameraEventApis[camera_id] = - camera_linux_camera_event_api_new( - fl_plugin_registrar_get_messenger(registrar), - std::to_string(camera_id).c_str()); + for (auto&& camera_it = cameras.begin(); camera_it != cameras.end(); + ++camera_it) { + if (camera_it->camera_id == camera_id) { + cameras.erase(camera_it); + break; + } } + cameras.emplace_back(TlFactory.CreateDevice(*it), camera_id, registrar, + resolution_preset); CAMERA_HOST_RETURN(camera_id); return; @@ -129,56 +119,8 @@ void CameraHostPlugin::initialize( int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { CAMERA_HOST_ERROR_HANDLING(initialize, { - const auto camera_it = cameras.find(camera_id); - if (camera_it == cameras.end()) { - CAMERA_HOST_RAISE_ERROR("Camera not created"); - } - - CameraTextureImageEventHandler* cameraTextureImageEventHandler = - new CameraTextureImageEventHandler(registrar); - cameraTextureImageEventHandlers[camera_id] = cameraTextureImageEventHandler; - - Pylon::CInstantCamera* camera = camera_it->second.get(); - camera->Open(); - GenApi::INodeMap& nodemap = camera->GetNodeMap(); - Pylon::CEnumParameter(nodemap, "DeviceLinkThroughputLimitMode") - .TrySetValue("Off"); - Pylon::CBooleanParameter(nodemap, "AcquisitionFrameRateEnable") - .TrySetValue(true); - Pylon::CFloatParameter(nodemap, "AcquisitionFrameRate").TrySetValue(60.0); - Pylon::CFloatParameter(nodemap, "ResultingFrameRate").TrySetValue(60.0); - Pylon::CEnumParameter(nodemap, "PixelFormat").TrySetValue("RGB8"); - Pylon::CIntegerParameter(nodemap, "DecimationHorizontal").TrySetValue(2); - Pylon::CIntegerParameter(nodemap, "DecimationVertical").TrySetValue(2); - Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("Off"); - Pylon::CIntegerParameter(nodemap, "Width").TrySetValue(1920); - Pylon::CIntegerParameter(nodemap, "Height").TrySetValue(1080); - Pylon::CIntegerParameter(nodemap, "OffsetX").TrySetValue(0); - Pylon::CIntegerParameter(nodemap, "OffsetY").TrySetValue(0); - - camera->RegisterImageEventHandler(cameraTextureImageEventHandler, - Pylon::RegistrationMode_Append, - Pylon::Cleanup_Delete); - camera->StartGrabbing(Pylon::GrabStrategy_LatestImages, - Pylon::EGrabLoop::GrabLoop_ProvidedByInstantCamera); - - std::cout << "Texture ID: " - << cameraTextureImageEventHandler->get_texture_id() << std::endl; - - CameraLinuxPlatformSize* size = camera_linux_platform_size_new(1920, 1080); - CameraLinuxPlatformCameraState* cameraState = - camera_linux_platform_camera_state_new( - size, - CameraLinuxPlatformExposureMode:: - CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_LOCKED, - CameraLinuxPlatformFocusMode:: - CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED, - true, true); - camera_linux_camera_event_api_initialized( - cameraLinuxCameraEventApis[camera_id], cameraState, nullptr, - camera_linux_camera_event_api_initialized_callback, nullptr); - g_object_unref(cameraState); - g_object_unref(size); + Camera& camera = get_camera_by_id(camera_id); + camera.initialize(image_format); CAMERA_HOST_VOID_RETURN(); }); } @@ -187,18 +129,8 @@ void CameraHostPlugin::get_texture_id( int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { CAMERA_HOST_ERROR_HANDLING(get_texture_id, { - const auto cameraTextureImageEventHandler_it = - cameraTextureImageEventHandlers.find(camera_id); - if (cameraTextureImageEventHandler_it == - cameraTextureImageEventHandlers.end()) { - CAMERA_HOST_RAISE_ERROR("Camera not created"); - } - CameraTextureImageEventHandler* cameraTextureImageEventHandler = - cameraTextureImageEventHandler_it->second; - if (cameraTextureImageEventHandler == nullptr) { - CAMERA_HOST_RAISE_ERROR("Camera not initialized"); - } - int64_t texture_id = cameraTextureImageEventHandler->get_texture_id(); + Camera& camera = get_camera_by_id(camera_id); + int64_t texture_id = camera.getTextureId(); if (texture_id == -1) { CAMERA_HOST_RAISE_ERROR("Texture not created"); } diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.h b/packages/camera/camera_linux/linux/camera_host_plugin.h index 6dfd88277798..a2034071d782 100644 --- a/packages/camera/camera_linux/linux/camera_host_plugin.h +++ b/packages/camera/camera_linux/linux/camera_host_plugin.h @@ -14,6 +14,8 @@ #include +#include "camera.h" + #pragma clang diagnostic pop #define CAMERA_HOST_ERROR_HANDLING(method_name, code) \ @@ -47,21 +49,20 @@ class CameraHostPlugin { static FlPluginRegistrar* registrar; FlPluginRegistrar* m_registrar; - static std::map> cameras; - static std::map - cameraTextureImageEventHandlers; - static std::map - cameraLinuxCameraEventApis; + static std::vector cameras; public: CameraHostPlugin(FlPluginRegistrar* registrar); ~CameraHostPlugin(); + inline static Camera& get_camera_by_id(int64_t camera_id); + static void get_available_cameras_names( CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); static void create(const gchar* camera_name, + CameraLinuxPlatformResolutionPreset resolution_preset, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); diff --git a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp index 6a68a0b937d4..39dbf3579d34 100644 --- a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp +++ b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp @@ -33,8 +33,9 @@ static void fl_my_texture_gl_class_init(FlMyTextureGLClass* klass) { static void fl_my_texture_gl_init(FlMyTextureGL* self) {} CameraTextureImageEventHandler::CameraTextureImageEventHandler( - FlPluginRegistrar* registrar) - : m_registrar(registrar), + const Camera& camera, FlPluginRegistrar* registrar) + : camera(camera), + m_registrar(registrar), m_texture_registrar( fl_plugin_registrar_get_texture_registrar(registrar)) {} @@ -56,27 +57,23 @@ int64_t CameraTextureImageEventHandler::get_texture_id() { } void CameraTextureImageEventHandler::OnImageEventHandlerRegistered( - Pylon::CInstantCamera& camera) { + Pylon::CInstantCamera& _) { FlView* fl_view = FL_VIEW(fl_plugin_registrar_get_view(m_registrar)); GdkWindow* window = gtk_widget_get_parent_window(GTK_WIDGET(fl_view)); m_gl_context = gdk_window_create_gl_context(window, NULL); - // Camera frame size, update if you get dynamic size - int width = 1920; - int height = 1080; - // Create GL texture for the camera preview gdk_gl_context_make_current(m_gl_context); glGenTextures(1, &m_texture_name); glBindTexture(GL_TEXTURE_2D, m_texture_name); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB, - GL_UNSIGNED_BYTE, nullptr); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, camera.width, camera.height, 0, + GL_RGB, GL_UNSIGNED_BYTE, nullptr); // Wrap GL texture for Flutter - m_texture = - fl_my_texture_gl_new(GL_TEXTURE_2D, m_texture_name, width, height); + m_texture = fl_my_texture_gl_new(GL_TEXTURE_2D, m_texture_name, camera.width, + camera.height); fl_texture_registrar_register_texture(m_texture_registrar, FL_TEXTURE(m_texture)); fl_texture_registrar_mark_texture_frame_available(m_texture_registrar, @@ -84,13 +81,15 @@ void CameraTextureImageEventHandler::OnImageEventHandlerRegistered( } void CameraTextureImageEventHandler::OnImageGrabbed( - Pylon::CInstantCamera& camera, const Pylon::CGrabResultPtr& ptr) { + Pylon::CInstantCamera& _, const Pylon::CGrabResultPtr& ptr) { if (!m_texture) { return; } - static std::chrono::steady_clock::time_point m_last_fps_time; - static int m_frame_count = 0; + if (!ptr->GrabSucceeded()) { + std::cerr << "Error grabbing image" << std::endl; + return; + } gdk_gl_context_make_current(m_gl_context); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, ptr->GetWidth(), ptr->GetHeight(), @@ -98,22 +97,4 @@ void CameraTextureImageEventHandler::OnImageGrabbed( glFlush(); fl_texture_registrar_mark_texture_frame_available(m_texture_registrar, FL_TEXTURE(m_texture)); - - // Track frame count - m_frame_count++; - auto now = std::chrono::steady_clock::now(); - auto elapsed = std::chrono::duration_cast( - now - m_last_fps_time) - .count(); - - if (elapsed >= 1000) { // 1 second has passed - std::cout << "FPS: " << m_frame_count << std::endl; - m_frame_count = 0; - m_last_fps_time = now; - } - - if (!ptr->GrabSucceeded()) { - std::cerr << "Error grabbing image" << std::endl; - return; - } } diff --git a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h index 20800f519ee2..3c6f922ab78f 100644 --- a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h +++ b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h @@ -4,6 +4,7 @@ #include +#include "camera.h" #include "flutter_linux/flutter_linux.h" #include "messages.g.h" @@ -32,12 +33,14 @@ FlMyTextureGL* fl_my_texture_gl_new(uint32_t target, uint32_t name, class CameraTextureImageEventHandler : public Pylon::CImageEventHandler { FlMyTextureGL* m_texture; unsigned int m_texture_name; + const Camera& camera; FlPluginRegistrar* m_registrar; FlTextureRegistrar* m_texture_registrar; GdkGLContext* m_gl_context; public: - CameraTextureImageEventHandler(FlPluginRegistrar* registrar); + CameraTextureImageEventHandler(const Camera& camera, + FlPluginRegistrar* registrar); ~CameraTextureImageEventHandler() override; diff --git a/packages/camera/camera_linux/linux/messages.g.cc b/packages/camera/camera_linux/linux/messages.g.cc index f6250ab91f6a..11f8816768b5 100644 --- a/packages/camera/camera_linux/linux/messages.g.cc +++ b/packages/camera/camera_linux/linux/messages.g.cc @@ -120,7 +120,7 @@ gboolean camera_linux_platform_camera_state_get_focus_point_supported(CameraLinu static FlValue* camera_linux_platform_camera_state_to_list(CameraLinuxPlatformCameraState* self) { FlValue* values = fl_value_new_list(); - fl_value_append_take(values, fl_value_new_custom_object(135, G_OBJECT(self->preview_size))); + fl_value_append_take(values, fl_value_new_custom_object(136, G_OBJECT(self->preview_size))); fl_value_append_take(values, fl_value_new_custom(130, fl_value_new_int(self->exposure_mode), (GDestroyNotify)fl_value_unref)); fl_value_append_take(values, fl_value_new_custom(132, fl_value_new_int(self->focus_mode), (GDestroyNotify)fl_value_unref)); fl_value_append_take(values, fl_value_new_bool(self->exposure_point_supported)); @@ -237,22 +237,28 @@ static gboolean camera_linux_message_codec_write_camera_linux_platform_image_for return fl_standard_message_codec_write_value(codec, buffer, value, error); } -static gboolean camera_linux_message_codec_write_camera_linux_platform_size(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformSize* value, GError** error) { +static gboolean camera_linux_message_codec_write_camera_linux_platform_resolution_preset(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { uint8_t type = 135; g_byte_array_append(buffer, &type, sizeof(uint8_t)); + return fl_standard_message_codec_write_value(codec, buffer, value, error); +} + +static gboolean camera_linux_message_codec_write_camera_linux_platform_size(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformSize* value, GError** error) { + uint8_t type = 136; + g_byte_array_append(buffer, &type, sizeof(uint8_t)); g_autoptr(FlValue) values = camera_linux_platform_size_to_list(value); return fl_standard_message_codec_write_value(codec, buffer, values, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_camera_state(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformCameraState* value, GError** error) { - uint8_t type = 136; + uint8_t type = 137; g_byte_array_append(buffer, &type, sizeof(uint8_t)); g_autoptr(FlValue) values = camera_linux_platform_camera_state_to_list(value); return fl_standard_message_codec_write_value(codec, buffer, values, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_point(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformPoint* value, GError** error) { - uint8_t type = 137; + uint8_t type = 138; g_byte_array_append(buffer, &type, sizeof(uint8_t)); g_autoptr(FlValue) values = camera_linux_platform_point_to_list(value); return fl_standard_message_codec_write_value(codec, buffer, values, error); @@ -274,10 +280,12 @@ static gboolean camera_linux_message_codec_write_value(FlStandardMessageCodec* c case 134: return camera_linux_message_codec_write_camera_linux_platform_image_format_group(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); case 135: - return camera_linux_message_codec_write_camera_linux_platform_size(codec, buffer, CAMERA_LINUX_PLATFORM_SIZE(fl_value_get_custom_value_object(value)), error); + return camera_linux_message_codec_write_camera_linux_platform_resolution_preset(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); case 136: - return camera_linux_message_codec_write_camera_linux_platform_camera_state(codec, buffer, CAMERA_LINUX_PLATFORM_CAMERA_STATE(fl_value_get_custom_value_object(value)), error); + return camera_linux_message_codec_write_camera_linux_platform_size(codec, buffer, CAMERA_LINUX_PLATFORM_SIZE(fl_value_get_custom_value_object(value)), error); case 137: + return camera_linux_message_codec_write_camera_linux_platform_camera_state(codec, buffer, CAMERA_LINUX_PLATFORM_CAMERA_STATE(fl_value_get_custom_value_object(value)), error); + case 138: return camera_linux_message_codec_write_camera_linux_platform_point(codec, buffer, CAMERA_LINUX_PLATFORM_POINT(fl_value_get_custom_value_object(value)), error); } } @@ -309,6 +317,10 @@ static FlValue* camera_linux_message_codec_read_camera_linux_platform_image_form return fl_value_new_custom(134, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); } +static FlValue* camera_linux_message_codec_read_camera_linux_platform_resolution_preset(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { + return fl_value_new_custom(135, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); +} + static FlValue* camera_linux_message_codec_read_camera_linux_platform_size(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { g_autoptr(FlValue) values = fl_standard_message_codec_read_value(codec, buffer, offset, error); if (values == nullptr) { @@ -321,7 +333,7 @@ static FlValue* camera_linux_message_codec_read_camera_linux_platform_size(FlSta return nullptr; } - return fl_value_new_custom_object(135, G_OBJECT(value)); + return fl_value_new_custom_object(136, G_OBJECT(value)); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_state(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { @@ -336,7 +348,7 @@ static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_sta return nullptr; } - return fl_value_new_custom_object(136, G_OBJECT(value)); + return fl_value_new_custom_object(137, G_OBJECT(value)); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_point(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { @@ -351,7 +363,7 @@ static FlValue* camera_linux_message_codec_read_camera_linux_platform_point(FlSt return nullptr; } - return fl_value_new_custom_object(137, G_OBJECT(value)); + return fl_value_new_custom_object(138, G_OBJECT(value)); } static FlValue* camera_linux_message_codec_read_value_of_type(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, int type, GError** error) { @@ -369,10 +381,12 @@ static FlValue* camera_linux_message_codec_read_value_of_type(FlStandardMessageC case 134: return camera_linux_message_codec_read_camera_linux_platform_image_format_group(codec, buffer, offset, error); case 135: - return camera_linux_message_codec_read_camera_linux_platform_size(codec, buffer, offset, error); + return camera_linux_message_codec_read_camera_linux_platform_resolution_preset(codec, buffer, offset, error); case 136: - return camera_linux_message_codec_read_camera_linux_platform_camera_state(codec, buffer, offset, error); + return camera_linux_message_codec_read_camera_linux_platform_size(codec, buffer, offset, error); case 137: + return camera_linux_message_codec_read_camera_linux_platform_camera_state(codec, buffer, offset, error); + case 138: return camera_linux_message_codec_read_camera_linux_platform_point(codec, buffer, offset, error); default: return FL_STANDARD_MESSAGE_CODEC_CLASS(camera_linux_message_codec_parent_class)->read_value_of_type(codec, buffer, offset, type, error); @@ -1724,8 +1738,10 @@ static void camera_linux_camera_api_create_cb(FlBasicMessageChannel* channel, Fl FlValue* value0 = fl_value_get_list_value(message_, 0); const gchar* camera_name = fl_value_get_string(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformResolutionPreset resolution_preset = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->create(camera_name, handle, self->user_data); + self->vtable->create(camera_name, resolution_preset, handle, self->user_data); } static void camera_linux_camera_api_initialize_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { @@ -2896,7 +2912,7 @@ static void camera_linux_camera_event_api_initialized_cb(GObject* object, GAsync void camera_linux_camera_event_api_initialized(CameraLinuxCameraEventApi* self, CameraLinuxPlatformCameraState* initial_state, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data) { g_autoptr(FlValue) args = fl_value_new_list(); - fl_value_append_take(args, fl_value_new_custom_object(136, G_OBJECT(initial_state))); + fl_value_append_take(args, fl_value_new_custom_object(137, G_OBJECT(initial_state))); g_autofree gchar* channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraEventApi.initialized%s", self->suffix); g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); FlBasicMessageChannel* channel = fl_basic_message_channel_new(self->messenger, channel_name, FL_MESSAGE_CODEC(codec)); diff --git a/packages/camera/camera_linux/linux/messages.g.h b/packages/camera/camera_linux/linux/messages.g.h index 1bc7c5a9c0bd..a3cbb7cd9c3f 100644 --- a/packages/camera/camera_linux/linux/messages.g.h +++ b/packages/camera/camera_linux/linux/messages.g.h @@ -86,6 +86,25 @@ typedef enum { CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_YUV420 = 1 } CameraLinuxPlatformImageFormatGroup; +/** + * CameraLinuxPlatformResolutionPreset: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_LOW: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MEDIUM: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_HIGH: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_VERY_HIGH: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_ULTRA_HIGH: + * CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MAX: + * + */ +typedef enum { + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_LOW = 0, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MEDIUM = 1, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_HIGH = 2, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_VERY_HIGH = 3, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_ULTRA_HIGH = 4, + CAMERA_LINUX_PLATFORM_RESOLUTION_PRESET_MAX = 5 +} CameraLinuxPlatformResolutionPreset; + /** * CameraLinuxPlatformSize: * @@ -246,7 +265,7 @@ G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiResponseHandle, camera_linux_camera_api */ typedef struct { void (*get_available_cameras_names)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*create)(const gchar* camera_name, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*create)(const gchar* camera_name, CameraLinuxPlatformResolutionPreset resolution_preset, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*initialize)(int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*start_image_stream)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*stop_image_stream)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); diff --git a/packages/camera/camera_linux/pigeons/messages.dart b/packages/camera/camera_linux/pigeons/messages.dart index d84d05b4ccb5..706bfe692530 100644 --- a/packages/camera/camera_linux/pigeons/messages.dart +++ b/packages/camera/camera_linux/pigeons/messages.dart @@ -60,6 +60,15 @@ enum PlatformImageFormatGroup { yuv420, } +enum PlatformResolutionPreset { + low, // 352x288 on iOS, ~240p on Android and Web + medium, // ~480p + high, // ~720p + veryHigh, // ~1080p + ultraHigh, // ~2160p + max, // The highest resolution available. +} + // Pigeon version of the data needed for a CameraInitializedEvent. class PlatformCameraState { PlatformCameraState({ @@ -102,7 +111,7 @@ abstract class CameraApi { /// Create a new camera with the given settings, and returns its ID. @async - int create(String cameraName); + int create(String cameraName, PlatformResolutionPreset resolutionPreset); /// Initializes the camera with the given ID. @async From 389d80ca27fe21945835a1816429ce0ca97af45a Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Fri, 6 Jun 2025 08:47:01 -0400 Subject: [PATCH 11/21] cam mono and rgb --- packages/camera/camera/example/lib/main.dart | 36 +++- .../camera/camera_linux/lib/camera_linux.dart | 1 + .../camera_linux/lib/src/linux_camera.dart | 112 ++++++++---- .../camera_linux/lib/src/messages.g.dart | 53 +++++- packages/camera/camera_linux/linux/camera.cpp | 68 ++++++- packages/camera/camera_linux/linux/camera.h | 8 +- .../camera_linux/linux/camera_host_plugin.cpp | 11 ++ .../camera_linux/linux/camera_host_plugin.h | 4 + .../camera_texture_image_event_handler.cpp | 169 ++++++++++++++++-- .../camera_texture_image_event_handler.h | 8 + .../camera/camera_linux/linux/messages.g.cc | 155 ++++++++++++++++ .../camera/camera_linux/linux/messages.g.h | 93 +++++++++- .../camera/camera_linux/pigeons/messages.dart | 11 +- 13 files changed, 664 insertions(+), 65 deletions(-) diff --git a/packages/camera/camera/example/lib/main.dart b/packages/camera/camera/example/lib/main.dart index cca528c04fda..4094a551ab52 100644 --- a/packages/camera/camera/example/lib/main.dart +++ b/packages/camera/camera/example/lib/main.dart @@ -6,6 +6,8 @@ import 'dart:async'; import 'dart:io'; import 'package:camera/camera.dart'; +import 'package:camera_linux/camera_linux.dart'; +import 'package:camera_platform_interface/camera_platform_interface.dart'; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; import 'package:flutter/scheduler.dart'; @@ -155,6 +157,38 @@ class _CameraExampleHomeState extends State ), _captureControlRowWidget(), _modeControlRowWidget(), + Row( + children: [ + ElevatedButton( + onPressed: () { + setState(() { + if (TargetPlatform.linux != defaultTargetPlatform) { + return; + } + final CameraLinux nativeCamera = + CameraPlatform.instance as CameraLinux; + nativeCamera.setImageFormatGroup( + controller!.cameraId, PlatformImageFormatGroup.mono8); + }); + }, + child: Text('mono8'), + ), + ElevatedButton( + onPressed: () { + setState(() { + if (TargetPlatform.linux != defaultTargetPlatform) { + return; + } + final CameraLinux nativeCamera = + CameraPlatform.instance as CameraLinux; + nativeCamera.setImageFormatGroup( + controller!.cameraId, PlatformImageFormatGroup.rgb8); + }); + }, + child: Text('rgb8'), + ), + ], + ), Padding( padding: const EdgeInsets.all(5.0), child: Row( @@ -635,7 +669,7 @@ class _CameraExampleHomeState extends State ) async { final cameraController = CameraController( cameraDescription, - kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium, + ResolutionPreset.max, enableAudio: enableAudio, imageFormatGroup: ImageFormatGroup.jpeg, ); diff --git a/packages/camera/camera_linux/lib/camera_linux.dart b/packages/camera/camera_linux/lib/camera_linux.dart index bad23a19fa41..4a8d4b963e4b 100644 --- a/packages/camera/camera_linux/lib/camera_linux.dart +++ b/packages/camera/camera_linux/lib/camera_linux.dart @@ -1 +1,2 @@ export 'src/linux_camera.dart'; +export 'src/messages.g.dart'; diff --git a/packages/camera/camera_linux/lib/src/linux_camera.dart b/packages/camera/camera_linux/lib/src/linux_camera.dart index 33c782387b49..37104c6e704b 100644 --- a/packages/camera/camera_linux/lib/src/linux_camera.dart +++ b/packages/camera/camera_linux/lib/src/linux_camera.dart @@ -112,8 +112,22 @@ class CameraLinux extends CameraPlatform { ), ); + PlatformImageFormatGroup imageFormat = PlatformImageFormatGroup.rgb8; + switch (imageFormatGroup) { + case ImageFormatGroup.jpeg: + imageFormat = PlatformImageFormatGroup.rgb8; + break; + case ImageFormatGroup.unknown: + case ImageFormatGroup.yuv420: + case ImageFormatGroup.nv21: + case ImageFormatGroup.bgra8888: + default: + imageFormat = PlatformImageFormatGroup.mono8; + break; + } + try { - await _hostApi.initialize(cameraId, _pigeonImageFormat(imageFormatGroup)); + await _hostApi.initialize(cameraId, imageFormat); } on PlatformException catch (e, s) { completer.completeError( CameraException(e.code, e.message), @@ -264,20 +278,28 @@ class CameraLinux extends CameraPlatform { @override Widget buildPreview(int cameraId) { - return FutureBuilder( - future: _hostApi.getTextureId(cameraId), + unawaited( + _hostApi.getTextureId(cameraId).then( + (int? textureId) { + cameraEventStreamController.add(TextureIdEvent(cameraId, textureId)); + }, + ), + ); + return StreamBuilder( + stream: _cameraEvents(cameraId) + .whereType() + .map((event) => event.textureId), builder: (context, snapshot) { - if (snapshot.connectionState == ConnectionState.done && - snapshot.data != null) { - return RepaintBoundary( - child: Texture( - textureId: snapshot.data!, - filterQuality: FilterQuality.none, - ), - ); - } else { + if (snapshot.data == null || snapshot.data == -1) { return const Center(child: CircularProgressIndicator()); } + + return RepaintBoundary( + child: Texture( + textureId: snapshot.data!, + filterQuality: FilterQuality.none, + ), + ); }, ); } @@ -288,31 +310,50 @@ class CameraLinux extends CameraPlatform { @override Future setImageFileFormat(int cameraId, ImageFileFormat format) async {} - /// Returns an [ImageFormatGroup]'s Pigeon representation. - PlatformImageFormatGroup _pigeonImageFormat(ImageFormatGroup format) { - switch (format) { - // "unknown" is used to indicate the default. - case ImageFormatGroup.unknown: - case ImageFormatGroup.bgra8888: - return PlatformImageFormatGroup.bgra8888; - case ImageFormatGroup.yuv420: - return PlatformImageFormatGroup.yuv420; - case ImageFormatGroup.jpeg: - case ImageFormatGroup.nv21: - // Fall through. + Future setImageFormatGroup( + int cameraId, PlatformImageFormatGroup format) async { + try { + await _hostApi.setImageFormatGroup(cameraId, format); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); } - // The enum comes from a different package, which could get a new value at - // any time, so provide a fallback that ensures this won't break when used - // with a version that contains new values. This is deliberately outside - // the switch rather than a `default` so that the linter will flag the - // switch as needing an update. - // TODO(stuartmorgan): Consider throwing an UnsupportedError, instead of - // doing fallback, when a specific unsupported format is requested. This - // would require a breaking change at this layer and the app-facing layer. - return PlatformImageFormatGroup.bgra8888; } } +/// An event fired when the camera texture id changed. +class TextureIdEvent extends CameraEvent { + const TextureIdEvent( + super.cameraId, + this.textureId, + ); + + TextureIdEvent.fromJson(Map json) + : textureId = json['textureId']! as int?, + super(json['cameraId']! as int); + + /// The texture ID of the camera. + final int? textureId; + + Map toJson() => { + 'cameraId': cameraId, + if (textureId != null) 'textureId': textureId!, + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + super == other && + other is TextureIdEvent && + runtimeType == other.runtimeType && + textureId == other.textureId; + + @override + int get hashCode => Object.hash( + super.hashCode, + textureId, + ); +} + /// Callback handler for camera-level events from the platform host. @visibleForTesting class HostCameraMessageHandler implements CameraEventApi { @@ -353,6 +394,11 @@ class HostCameraMessageHandler implements CameraEventApi { ), ); } + + @override + void textureId(int textureId) { + streamController.add(TextureIdEvent(cameraId, textureId)); + } } /// Converts a Pigeon [PlatformExposureMode] to an [ExposureMode]. diff --git a/packages/camera/camera_linux/lib/src/messages.g.dart b/packages/camera/camera_linux/lib/src/messages.g.dart index f884bb068e41..82c333f56b3a 100644 --- a/packages/camera/camera_linux/lib/src/messages.g.dart +++ b/packages/camera/camera_linux/lib/src/messages.g.dart @@ -59,8 +59,8 @@ enum PlatformImageFileFormat { } enum PlatformImageFormatGroup { - bgra8888, - yuv420, + rgb8, + mono8, } enum PlatformResolutionPreset { @@ -1053,6 +1053,28 @@ class CameraApi { return; } } + + Future setImageFormatGroup(int cameraId, PlatformImageFormatGroup imageFormatGroup) async { + final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setImageFormatGroup$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final List? pigeonVar_replyList = + await pigeonVar_channel.send([cameraId, imageFormatGroup]) as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } } /// Handler for native callbacks that are tied to a specific camera ID. @@ -1064,6 +1086,8 @@ abstract class CameraEventApi { /// Called when the camera is inialitized for use. void initialized(PlatformCameraState initialState); + void textureId(int textureId); + /// Called when an error occurs in the camera. /// /// This should be used for errors that occur outside of the context of @@ -1097,6 +1121,31 @@ abstract class CameraEventApi { }); } } + { + final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + 'dev.flutter.pigeon.camera_linux.CameraEventApi.textureId$messageChannelSuffix', pigeonChannelCodec, + binaryMessenger: binaryMessenger); + if (api == null) { + pigeonVar_channel.setMessageHandler(null); + } else { + pigeonVar_channel.setMessageHandler((Object? message) async { + assert(message != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.textureId was null.'); + final List args = (message as List?)!; + final int? arg_textureId = (args[0] as int?); + assert(arg_textureId != null, + 'Argument for dev.flutter.pigeon.camera_linux.CameraEventApi.textureId was null, expected non-null int.'); + try { + api.textureId(arg_textureId!); + return wrapResponse(empty: true); + } on PlatformException catch (e) { + return wrapResponse(error: e); + } catch (e) { + return wrapResponse(error: PlatformException(code: 'error', message: e.toString())); + } + }); + } + } { final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( 'dev.flutter.pigeon.camera_linux.CameraEventApi.error$messageChannelSuffix', pigeonChannelCodec, diff --git a/packages/camera/camera_linux/linux/camera.cpp b/packages/camera/camera_linux/linux/camera.cpp index 810480e135db..234f7cc52b97 100644 --- a/packages/camera/camera_linux/linux/camera.cpp +++ b/packages/camera/camera_linux/linux/camera.cpp @@ -13,8 +13,10 @@ Camera::Camera(Pylon::IPylonDevice* device, int64_t camera_id, CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_AUTO), focus_mode(CameraLinuxPlatformFocusMode:: CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED), - width(1920), - height(1080), + width(3840), + height(2160), + imageFormatGroup(CameraLinuxPlatformImageFormatGroup:: + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_RGB8), resolution_preset(resolution_preset), registrar(registrar) { camera = std::make_unique(device); @@ -23,12 +25,18 @@ Camera::Camera(Pylon::IPylonDevice* device, int64_t camera_id, } Camera::~Camera() { - if (camera) camera->Close(); + if (cameraTextureImageEventHandler && camera) + camera->DeregisterImageEventHandler(cameraTextureImageEventHandler.get()); + if (camera) { + if (camera->IsGrabbing()) camera->StopGrabbing(); + if (camera->IsOpen()) camera->Close(); + } if (cameraLinuxCameraEventApi) g_object_unref(cameraLinuxCameraEventApi); if (registrar) g_object_unref(registrar); } -void Camera::initialize(CameraLinuxPlatformImageFormatGroup image_format) { +void Camera::initialize(CameraLinuxPlatformImageFormatGroup imageFormat) { + imageFormatGroup = imageFormat; cameraTextureImageEventHandler = std::make_unique(*this, registrar); camera->Open(); @@ -39,22 +47,62 @@ void Camera::initialize(CameraLinuxPlatformImageFormatGroup image_format) { .TrySetValue(true); Pylon::CFloatParameter(nodemap, "AcquisitionFrameRate").TrySetValue(60.0); Pylon::CFloatParameter(nodemap, "ResultingFrameRate").TrySetValue(60.0); - Pylon::CEnumParameter(nodemap, "PixelFormat").TrySetValue("RGB8"); + setImageFormatGroup(imageFormat); Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("Off"); Pylon::CIntegerParameter(nodemap, "Width").TrySetValue(width); Pylon::CIntegerParameter(nodemap, "Height").TrySetValue(height); Pylon::CIntegerParameter(nodemap, "OffsetX").TrySetValue(0); Pylon::CIntegerParameter(nodemap, "OffsetY").TrySetValue(0); + Pylon::CStringParameter(nodemap, "ExposureAuto").TrySetValue("Continuous"); + Pylon::CBooleanParameter(nodemap, "ReverseY").TrySetValue(true); + Pylon::CBooleanParameter(nodemap, "AutoFunctionROIUseBrightness") + .TrySetValue(true); + Pylon::CBooleanParameter(nodemap, "AutoFunctionROIUseWhiteBalance") + .TrySetValue(true); + Pylon::CEnumParameter(nodemap, "BslDefectPixelCorrectionMode") + .TrySetValue("On"); camera->RegisterImageEventHandler(cameraTextureImageEventHandler.get(), Pylon::RegistrationMode_Append, - Pylon::Cleanup_Delete); + Pylon::Cleanup_None); camera->StartGrabbing(Pylon::GrabStrategy_LatestImages, Pylon::EGrabLoop::GrabLoop_ProvidedByInstantCamera); emitState(); } +void Camera::setImageFormatGroup( + CameraLinuxPlatformImageFormatGroup imageFormatGroup) { + if (!camera) return; + bool wasGrabbing = camera->IsGrabbing(); + if (wasGrabbing) { + camera->StopGrabbing(); + camera->DeregisterImageEventHandler(cameraTextureImageEventHandler.get()); + cameraTextureImageEventHandler.reset(); + } + GenApi::INodeMap& nodemap = camera->GetNodeMap(); + switch (imageFormatGroup) { + case CameraLinuxPlatformImageFormatGroup:: + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_MONO8: + Pylon::CEnumParameter(nodemap, "PixelFormat").SetValue("Mono8"); + break; + case CameraLinuxPlatformImageFormatGroup:: + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_RGB8: + default: + Pylon::CEnumParameter(nodemap, "PixelFormat").SetValue("RGB8"); + break; + } + if (wasGrabbing) { + cameraTextureImageEventHandler = + std::make_unique(*this, registrar); + camera->RegisterImageEventHandler(cameraTextureImageEventHandler.get(), + Pylon::RegistrationMode_Append, + Pylon::Cleanup_None); + camera->StartGrabbing(Pylon::GrabStrategy_LatestImages, + Pylon::EGrabLoop::GrabLoop_ProvidedByInstantCamera); + } +} + int64_t Camera::getTextureId() { if (!cameraTextureImageEventHandler) return -1; return cameraTextureImageEventHandler->get_texture_id(); @@ -77,6 +125,14 @@ void Camera::emitState() { g_object_unref(size); } +void Camera::emitTextureId(int64_t textureId) const { + if (!cameraLinuxCameraEventApi) return; + + camera_linux_camera_event_api_texture_id( + cameraLinuxCameraEventApi, textureId, nullptr, + camera_linux_camera_event_api_initialized_callback, nullptr); +} + Camera& Camera::setResolutionPreset( CameraLinuxPlatformResolutionPreset preset) { switch (preset) { diff --git a/packages/camera/camera_linux/linux/camera.h b/packages/camera/camera_linux/linux/camera.h index 35aac476a85f..e25e2e7ed586 100644 --- a/packages/camera/camera_linux/linux/camera.h +++ b/packages/camera/camera_linux/linux/camera.h @@ -31,17 +31,23 @@ class Camera { ~Camera(); - void initialize(CameraLinuxPlatformImageFormatGroup image_format); + void initialize(CameraLinuxPlatformImageFormatGroup imageFormat); + int64_t getTextureId(); + void setImageFormatGroup( + CameraLinuxPlatformImageFormatGroup imageFormatGroup); + // State public: CameraLinuxPlatformExposureMode exposure_mode; CameraLinuxPlatformFocusMode focus_mode; int width; int height; + CameraLinuxPlatformImageFormatGroup imageFormatGroup; void emitState(); + void emitTextureId(int64_t textureId) const; Camera& setResolutionPreset(CameraLinuxPlatformResolutionPreset preset); diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.cpp b/packages/camera/camera_linux/linux/camera_host_plugin.cpp index 12f670c2f4de..df893be75f15 100644 --- a/packages/camera/camera_linux/linux/camera_host_plugin.cpp +++ b/packages/camera/camera_linux/linux/camera_host_plugin.cpp @@ -39,6 +39,7 @@ CameraHostPlugin::CameraHostPlugin(FlPluginRegistrar* registrar) .resume_preview = resume_preview, .update_description_while_recording = update_description_while_recording, .set_image_file_format = set_image_file_format, + .set_image_format_group = set_image_format_group, }; camera_linux_camera_api_set_method_handlers( @@ -115,6 +116,16 @@ void CameraHostPlugin::create( void CameraHostPlugin::camera_linux_camera_event_api_initialized_callback( GObject* object, GAsyncResult* result, gpointer user_data) {} +void CameraHostPlugin::set_image_format_group( + int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format_group, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(set_image_format_group, { + Camera& camera = get_camera_by_id(camera_id); + camera.setImageFormatGroup(image_format_group); + CAMERA_HOST_VOID_RETURN(); + }); +} + void CameraHostPlugin::initialize( int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.h b/packages/camera/camera_linux/linux/camera_host_plugin.h index a2034071d782..a8a19fbae493 100644 --- a/packages/camera/camera_linux/linux/camera_host_plugin.h +++ b/packages/camera/camera_linux/linux/camera_host_plugin.h @@ -230,6 +230,10 @@ class CameraHostPlugin { static void camera_linux_camera_event_api_initialized_callback( GObject* object, GAsyncResult* result, gpointer user_data); + + static void set_image_format_group( + int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format_group, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); }; #endif // CAMERA_HOST_PLUGIN_PRIVATE_H_ diff --git a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp index 39dbf3579d34..35dd5000a70e 100644 --- a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp +++ b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp @@ -1,5 +1,7 @@ #include "camera_texture_image_event_handler.h" +#include + #include G_DEFINE_TYPE(FlMyTextureGL, fl_my_texture_gl, fl_texture_gl_get_type()) @@ -56,45 +58,180 @@ int64_t CameraTextureImageEventHandler::get_texture_id() { return fl_texture_get_id(FL_TEXTURE(m_texture)); } +GLuint compileShader(GLenum type, const char* src) { + GLuint shader = glCreateShader(type); + glShaderSource(shader, 1, &src, nullptr); + glCompileShader(shader); + GLint success; + glGetShaderiv(shader, GL_COMPILE_STATUS, &success); + if (!success) { + char log[512]; + glGetShaderInfoLog(shader, 512, nullptr, log); + std::cerr << "Shader compile error: " << log << std::endl; + } + return shader; +} + +GLuint createShaderProgram() { + const char* vertexSrc = R"( + #version 300 es + precision mediump float; + layout (location = 0) in vec2 position; + layout (location = 1) in vec2 texCoord; + out vec2 TexCoords; + void main() { + TexCoords = texCoord; + gl_Position = vec4(position, 0.0, 1.0); + } + )"; + + const char* fragmentSrc = R"( + #version 300 es + precision mediump float; + in vec2 TexCoords; + out vec4 FragColor; + uniform sampler2D monoTexture; + void main() { + float gray = texture(monoTexture, TexCoords).r; + FragColor = vec4(gray, gray, gray, 1.0); // convert mono to RGB + } + )"; + + GLuint vs = compileShader(GL_VERTEX_SHADER, vertexSrc); + GLuint fs = compileShader(GL_FRAGMENT_SHADER, fragmentSrc); + + GLuint program = glCreateProgram(); + glAttachShader(program, vs); + glAttachShader(program, fs); + glLinkProgram(program); + + GLint success; + glGetProgramiv(program, GL_LINK_STATUS, &success); + if (!success) { + char log[512]; + glGetProgramInfoLog(program, 512, nullptr, log); + std::cerr << "Shader program link error: " << log << std::endl; + } + + glDeleteShader(vs); + glDeleteShader(fs); + + return program; +} + void CameraTextureImageEventHandler::OnImageEventHandlerRegistered( Pylon::CInstantCamera& _) { FlView* fl_view = FL_VIEW(fl_plugin_registrar_get_view(m_registrar)); GdkWindow* window = gtk_widget_get_parent_window(GTK_WIDGET(fl_view)); m_gl_context = gdk_window_create_gl_context(window, NULL); - - // Create GL texture for the camera preview gdk_gl_context_make_current(m_gl_context); - glGenTextures(1, &m_texture_name); - glBindTexture(GL_TEXTURE_2D, m_texture_name); + + const int width = camera.width; + const int height = camera.height; + + // 1. Create input texture (raw camera frame) + glGenTextures(1, &m_input_texture); + glBindTexture(GL_TEXTURE_2D, m_input_texture); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width, height, 0, GL_RED, + GL_UNSIGNED_BYTE, nullptr); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + + // 2. Create output texture (post-shader result) + glGenTextures(1, &m_output_texture); + glBindTexture(GL_TEXTURE_2D, m_output_texture); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB, + GL_UNSIGNED_BYTE, nullptr); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, camera.width, camera.height, 0, - GL_RGB, GL_UNSIGNED_BYTE, nullptr); - // Wrap GL texture for Flutter - m_texture = fl_my_texture_gl_new(GL_TEXTURE_2D, m_texture_name, camera.width, - camera.height); + // 3. Create framebuffer and attach output texture + glGenFramebuffers(1, &m_fbo); + glBindFramebuffer(GL_FRAMEBUFFER, m_fbo); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, + m_output_texture, 0); + + if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { + std::cerr << "Framebuffer not complete!" << std::endl; + } + + // 4. Create shader program + m_shader_program = createShaderProgram(); + + // 5. Create fullscreen quad VAO/VBO + float quadVertices[] = { + // pos // tex + -1.0f, -1.0f, 0.0f, 0.0f, 1.0f, -1.0f, 1.0f, 0.0f, + -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, + }; + + glGenVertexArrays(1, &m_vao); + glGenBuffers(1, &m_vbo); + glBindVertexArray(m_vao); + glBindBuffer(GL_ARRAY_BUFFER, m_vbo); + glBufferData(GL_ARRAY_BUFFER, sizeof(quadVertices), quadVertices, + GL_STATIC_DRAW); + glEnableVertexAttribArray(0); + glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (void*)0); + glEnableVertexAttribArray(1); + glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), + (void*)(2 * sizeof(float))); + + // 6. Wrap output texture for Flutter + m_texture = + fl_my_texture_gl_new(GL_TEXTURE_2D, m_output_texture, width, height); fl_texture_registrar_register_texture(m_texture_registrar, FL_TEXTURE(m_texture)); fl_texture_registrar_mark_texture_frame_available(m_texture_registrar, FL_TEXTURE(m_texture)); + camera.emitTextureId(get_texture_id()); +} + +void CameraTextureImageEventHandler::OnImageEventHandlerDeregistered( + Pylon::CInstantCamera& _) { + camera.emitTextureId(-1); } void CameraTextureImageEventHandler::OnImageGrabbed( Pylon::CInstantCamera& _, const Pylon::CGrabResultPtr& ptr) { - if (!m_texture) { + if (!m_texture || !ptr->GrabSucceeded()) { + std::cerr << "Error: Grab failed or texture not ready." << std::endl; return; } - if (!ptr->GrabSucceeded()) { - std::cerr << "Error grabbing image" << std::endl; - return; + gdk_gl_context_make_current(m_gl_context); + + const int width = ptr->GetWidth(); + const int height = ptr->GetHeight(); + + if (ptr->GetPixelType() == Pylon::PixelType_Mono8) { + // Upload to input texture (single channel) + glBindTexture(GL_TEXTURE_2D, m_input_texture); + + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RED, + GL_UNSIGNED_BYTE, ptr->GetBuffer()); + + // Use shader to render to output texture via FBO + glBindFramebuffer(GL_FRAMEBUFFER, m_fbo); + + glViewport(0, 0, width, height); + glUseProgram(m_shader_program); + + glBindVertexArray(m_vao); + glActiveTexture(GL_TEXTURE0); + glBindTexture(GL_TEXTURE_2D, m_input_texture); + glUniform1i(glGetUniformLocation(m_shader_program, "monoTexture"), 0); + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + glBindFramebuffer(GL_FRAMEBUFFER, 0); // unbind FBO + } else { + // RGB format: write directly to output texture + glBindTexture(GL_TEXTURE_2D, m_output_texture); + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RGB, + GL_UNSIGNED_BYTE, ptr->GetBuffer()); } - gdk_gl_context_make_current(m_gl_context); - glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, ptr->GetWidth(), ptr->GetHeight(), - GL_RGB, GL_UNSIGNED_BYTE, ptr->GetBuffer()); glFlush(); + // Mark the output texture as new frame available for Flutter fl_texture_registrar_mark_texture_frame_available(m_texture_registrar, FL_TEXTURE(m_texture)); } diff --git a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h index 3c6f922ab78f..ed6040bb9c15 100644 --- a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h +++ b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h @@ -38,6 +38,12 @@ class CameraTextureImageEventHandler : public Pylon::CImageEventHandler { FlTextureRegistrar* m_texture_registrar; GdkGLContext* m_gl_context; + GLuint m_input_texture = 0; + GLuint m_output_texture = 0; + GLuint m_fbo = 0; + GLuint m_shader_program = 0; + GLuint m_vao = 0, m_vbo = 0; + public: CameraTextureImageEventHandler(const Camera& camera, FlPluginRegistrar* registrar); @@ -50,6 +56,8 @@ class CameraTextureImageEventHandler : public Pylon::CImageEventHandler { void OnImageGrabbed(Pylon::CInstantCamera& camera, const Pylon::CGrabResultPtr& ptr) override; + + void OnImageEventHandlerDeregistered(Pylon::CInstantCamera& camera) override; }; #endif // CAMERA_TEXTURE_IMAGE_EVENT_HANDLER_H_ diff --git a/packages/camera/camera_linux/linux/messages.g.cc b/packages/camera/camera_linux/linux/messages.g.cc index 11f8816768b5..b9bfa4472475 100644 --- a/packages/camera/camera_linux/linux/messages.g.cc +++ b/packages/camera/camera_linux/linux/messages.g.cc @@ -1684,6 +1684,45 @@ static CameraLinuxCameraApiSetImageFileFormatResponse* camera_linux_camera_api_s return self; } +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetImageFormatGroupResponse, camera_linux_camera_api_set_image_format_group_response, CAMERA_LINUX, CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE, GObject) + +struct _CameraLinuxCameraApiSetImageFormatGroupResponse { + GObject parent_instance; + + FlValue* value; +}; + +G_DEFINE_TYPE(CameraLinuxCameraApiSetImageFormatGroupResponse, camera_linux_camera_api_set_image_format_group_response, G_TYPE_OBJECT) + +static void camera_linux_camera_api_set_image_format_group_response_dispose(GObject* object) { + CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(object); + g_clear_pointer(&self->value, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_api_set_image_format_group_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_api_set_image_format_group_response_init(CameraLinuxCameraApiSetImageFormatGroupResponse* self) { +} + +static void camera_linux_camera_api_set_image_format_group_response_class_init(CameraLinuxCameraApiSetImageFormatGroupResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_image_format_group_response_dispose; +} + +static CameraLinuxCameraApiSetImageFormatGroupResponse* camera_linux_camera_api_set_image_format_group_response_new() { + CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(g_object_new(camera_linux_camera_api_set_image_format_group_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_null()); + return self; +} + +static CameraLinuxCameraApiSetImageFormatGroupResponse* camera_linux_camera_api_set_image_format_group_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(g_object_new(camera_linux_camera_api_set_image_format_group_response_get_type(), nullptr)); + self->value = fl_value_new_list(); + fl_value_append_take(self->value, fl_value_new_string(code)); + fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); + fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); + return self; +} + struct _CameraLinuxCameraApi { GObject parent_instance; @@ -2106,6 +2145,21 @@ static void camera_linux_camera_api_set_image_file_format_cb(FlBasicMessageChann self->vtable->set_image_file_format(format, handle, self->user_data); } +static void camera_linux_camera_api_set_image_format_group_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { + CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); + + if (self->vtable == nullptr || self->vtable->set_image_format_group == nullptr) { + return; + } + + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformImageFormatGroup image_format_group = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); + g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); + self->vtable->set_image_format_group(camera_id, image_format_group, handle, self->user_data); +} + void camera_linux_camera_api_set_method_handlers(FlBinaryMessenger* messenger, const gchar* suffix, const CameraLinuxCameraApiVTable* vtable, gpointer user_data, GDestroyNotify user_data_free_func) { g_autofree gchar* dot_suffix = suffix != nullptr ? g_strdup_printf(".%s", suffix) : g_strdup(""); g_autoptr(CameraLinuxCameraApi) api_data = camera_linux_camera_api_new(vtable, user_data, user_data_free_func); @@ -2207,6 +2261,9 @@ void camera_linux_camera_api_set_method_handlers(FlBinaryMessenger* messenger, c g_autofree gchar* set_image_file_format_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFileFormat%s", dot_suffix); g_autoptr(FlBasicMessageChannel) set_image_file_format_channel = fl_basic_message_channel_new(messenger, set_image_file_format_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(set_image_file_format_channel, camera_linux_camera_api_set_image_file_format_cb, g_object_ref(api_data), g_object_unref); + g_autofree gchar* set_image_format_group_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFormatGroup%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_image_format_group_channel = fl_basic_message_channel_new(messenger, set_image_format_group_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_image_format_group_channel, camera_linux_camera_api_set_image_format_group_cb, g_object_ref(api_data), g_object_unref); } void camera_linux_camera_api_clear_method_handlers(FlBinaryMessenger* messenger, const gchar* suffix) { @@ -2309,6 +2366,9 @@ void camera_linux_camera_api_clear_method_handlers(FlBinaryMessenger* messenger, g_autofree gchar* set_image_file_format_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFileFormat%s", dot_suffix); g_autoptr(FlBasicMessageChannel) set_image_file_format_channel = fl_basic_message_channel_new(messenger, set_image_file_format_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(set_image_file_format_channel, nullptr, nullptr, nullptr); + g_autofree gchar* set_image_format_group_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFormatGroup%s", dot_suffix); + g_autoptr(FlBasicMessageChannel) set_image_format_group_channel = fl_basic_message_channel_new(messenger, set_image_format_group_channel_name, FL_MESSAGE_CODEC(codec)); + fl_basic_message_channel_set_message_handler(set_image_format_group_channel, nullptr, nullptr, nullptr); } void camera_linux_camera_api_respond_get_available_cameras_names(CameraLinuxCameraApiResponseHandle* response_handle, FlValue* return_value) { @@ -2823,6 +2883,22 @@ void camera_linux_camera_api_respond_error_set_image_file_format(CameraLinuxCame } } +void camera_linux_camera_api_respond_set_image_format_group(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiSetImageFormatGroupResponse) response = camera_linux_camera_api_set_image_format_group_response_new(); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setImageFormatGroup", error->message); + } +} + +void camera_linux_camera_api_respond_error_set_image_format_group(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { + g_autoptr(CameraLinuxCameraApiSetImageFormatGroupResponse) response = camera_linux_camera_api_set_image_format_group_response_new_error(code, message, details); + g_autoptr(GError) error = nullptr; + if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { + g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setImageFormatGroup", error->message); + } +} + struct _CameraLinuxCameraEventApi { GObject parent_instance; @@ -2932,6 +3008,85 @@ CameraLinuxCameraEventApiInitializedResponse* camera_linux_camera_event_api_init return camera_linux_camera_event_api_initialized_response_new(response); } +struct _CameraLinuxCameraEventApiTextureIdResponse { + GObject parent_instance; + + FlValue* error; +}; + +G_DEFINE_TYPE(CameraLinuxCameraEventApiTextureIdResponse, camera_linux_camera_event_api_texture_id_response, G_TYPE_OBJECT) + +static void camera_linux_camera_event_api_texture_id_response_dispose(GObject* object) { + CameraLinuxCameraEventApiTextureIdResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(object); + g_clear_pointer(&self->error, fl_value_unref); + G_OBJECT_CLASS(camera_linux_camera_event_api_texture_id_response_parent_class)->dispose(object); +} + +static void camera_linux_camera_event_api_texture_id_response_init(CameraLinuxCameraEventApiTextureIdResponse* self) { +} + +static void camera_linux_camera_event_api_texture_id_response_class_init(CameraLinuxCameraEventApiTextureIdResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_event_api_texture_id_response_dispose; +} + +static CameraLinuxCameraEventApiTextureIdResponse* camera_linux_camera_event_api_texture_id_response_new(FlValue* response) { + CameraLinuxCameraEventApiTextureIdResponse* self = CAMERA_LINUX_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(g_object_new(camera_linux_camera_event_api_texture_id_response_get_type(), nullptr)); + if (fl_value_get_length(response) > 1) { + self->error = fl_value_ref(response); + } + return self; +} + +gboolean camera_linux_camera_event_api_texture_id_response_is_error(CameraLinuxCameraEventApiTextureIdResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(self), FALSE); + return self->error != nullptr; +} + +const gchar* camera_linux_camera_event_api_texture_id_response_get_error_code(CameraLinuxCameraEventApiTextureIdResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_texture_id_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 0)); +} + +const gchar* camera_linux_camera_event_api_texture_id_response_get_error_message(CameraLinuxCameraEventApiTextureIdResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_texture_id_response_is_error(self)); + return fl_value_get_string(fl_value_get_list_value(self->error, 1)); +} + +FlValue* camera_linux_camera_event_api_texture_id_response_get_error_details(CameraLinuxCameraEventApiTextureIdResponse* self) { + g_return_val_if_fail(CAMERA_LINUX_IS_CAMERA_EVENT_API_TEXTURE_ID_RESPONSE(self), nullptr); + g_assert(camera_linux_camera_event_api_texture_id_response_is_error(self)); + return fl_value_get_list_value(self->error, 2); +} + +static void camera_linux_camera_event_api_texture_id_cb(GObject* object, GAsyncResult* result, gpointer user_data) { + GTask* task = G_TASK(user_data); + g_task_return_pointer(task, result, g_object_unref); +} + +void camera_linux_camera_event_api_texture_id(CameraLinuxCameraEventApi* self, int64_t texture_id, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data) { + g_autoptr(FlValue) args = fl_value_new_list(); + fl_value_append_take(args, fl_value_new_int(texture_id)); + g_autofree gchar* channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraEventApi.textureId%s", self->suffix); + g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); + FlBasicMessageChannel* channel = fl_basic_message_channel_new(self->messenger, channel_name, FL_MESSAGE_CODEC(codec)); + GTask* task = g_task_new(self, cancellable, callback, user_data); + g_task_set_task_data(task, channel, g_object_unref); + fl_basic_message_channel_send(channel, args, cancellable, camera_linux_camera_event_api_texture_id_cb, task); +} + +CameraLinuxCameraEventApiTextureIdResponse* camera_linux_camera_event_api_texture_id_finish(CameraLinuxCameraEventApi* self, GAsyncResult* result, GError** error) { + g_autoptr(GTask) task = G_TASK(result); + GAsyncResult* r = G_ASYNC_RESULT(g_task_propagate_pointer(task, nullptr)); + FlBasicMessageChannel* channel = FL_BASIC_MESSAGE_CHANNEL(g_task_get_task_data(task)); + g_autoptr(FlValue) response = fl_basic_message_channel_send_finish(channel, r, error); + if (response == nullptr) { + return nullptr; + } + return camera_linux_camera_event_api_texture_id_response_new(response); +} + struct _CameraLinuxCameraEventApiErrorResponse { GObject parent_instance; diff --git a/packages/camera/camera_linux/linux/messages.g.h b/packages/camera/camera_linux/linux/messages.g.h index a3cbb7cd9c3f..76798dd42712 100644 --- a/packages/camera/camera_linux/linux/messages.g.h +++ b/packages/camera/camera_linux/linux/messages.g.h @@ -77,13 +77,13 @@ typedef enum { /** * CameraLinuxPlatformImageFormatGroup: - * CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_BGRA8888: - * CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_YUV420: + * CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_RGB8: + * CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_MONO8: * */ typedef enum { - CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_BGRA8888 = 0, - CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_YUV420 = 1 + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_RGB8 = 0, + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_MONO8 = 1 } CameraLinuxPlatformImageFormatGroup; /** @@ -296,6 +296,7 @@ typedef struct { void (*resume_preview)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*update_description_while_recording)(const gchar* camera_name, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*set_image_file_format)(CameraLinuxPlatformImageFileFormat format, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_image_format_group)(int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format_group, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); } CameraLinuxCameraApiVTable; /** @@ -938,6 +939,25 @@ void camera_linux_camera_api_respond_set_image_file_format(CameraLinuxCameraApiR */ void camera_linux_camera_api_respond_error_set_image_file_format(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); +/** + * camera_linux_camera_api_respond_set_image_format_group: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * + * Responds to CameraApi.setImageFormatGroup. + */ +void camera_linux_camera_api_respond_set_image_format_group(CameraLinuxCameraApiResponseHandle* response_handle); + +/** + * camera_linux_camera_api_respond_error_set_image_format_group: + * @response_handle: a #CameraLinuxCameraApiResponseHandle. + * @code: error code. + * @message: error message. + * @details: (allow-none): error details or %NULL. + * + * Responds with an error to CameraApi.setImageFormatGroup. + */ +void camera_linux_camera_api_respond_error_set_image_format_group(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); + G_DECLARE_FINAL_TYPE(CameraLinuxCameraEventApiInitializedResponse, camera_linux_camera_event_api_initialized_response, CAMERA_LINUX, CAMERA_EVENT_API_INITIALIZED_RESPONSE, GObject) /** @@ -980,6 +1000,48 @@ const gchar* camera_linux_camera_event_api_initialized_response_get_error_messag */ FlValue* camera_linux_camera_event_api_initialized_response_get_error_details(CameraLinuxCameraEventApiInitializedResponse* response); +G_DECLARE_FINAL_TYPE(CameraLinuxCameraEventApiTextureIdResponse, camera_linux_camera_event_api_texture_id_response, CAMERA_LINUX, CAMERA_EVENT_API_TEXTURE_ID_RESPONSE, GObject) + +/** + * camera_linux_camera_event_api_texture_id_response_is_error: + * @response: a #CameraLinuxCameraEventApiTextureIdResponse. + * + * Checks if a response to CameraEventApi.textureId is an error. + * + * Returns: a %TRUE if this response is an error. + */ +gboolean camera_linux_camera_event_api_texture_id_response_is_error(CameraLinuxCameraEventApiTextureIdResponse* response); + +/** + * camera_linux_camera_event_api_texture_id_response_get_error_code: + * @response: a #CameraLinuxCameraEventApiTextureIdResponse. + * + * Get the error code for this response. + * + * Returns: an error code or %NULL if not an error. + */ +const gchar* camera_linux_camera_event_api_texture_id_response_get_error_code(CameraLinuxCameraEventApiTextureIdResponse* response); + +/** + * camera_linux_camera_event_api_texture_id_response_get_error_message: + * @response: a #CameraLinuxCameraEventApiTextureIdResponse. + * + * Get the error message for this response. + * + * Returns: an error message. + */ +const gchar* camera_linux_camera_event_api_texture_id_response_get_error_message(CameraLinuxCameraEventApiTextureIdResponse* response); + +/** + * camera_linux_camera_event_api_texture_id_response_get_error_details: + * @response: a #CameraLinuxCameraEventApiTextureIdResponse. + * + * Get the error details for this response. + * + * Returns: (allow-none): an error details or %NULL. + */ +FlValue* camera_linux_camera_event_api_texture_id_response_get_error_details(CameraLinuxCameraEventApiTextureIdResponse* response); + G_DECLARE_FINAL_TYPE(CameraLinuxCameraEventApiErrorResponse, camera_linux_camera_event_api_error_response, CAMERA_LINUX, CAMERA_EVENT_API_ERROR_RESPONSE, GObject) /** @@ -1067,6 +1129,29 @@ void camera_linux_camera_event_api_initialized(CameraLinuxCameraEventApi* api, C */ CameraLinuxCameraEventApiInitializedResponse* camera_linux_camera_event_api_initialized_finish(CameraLinuxCameraEventApi* api, GAsyncResult* result, GError** error); +/** + * camera_linux_camera_event_api_texture_id: + * @api: a #CameraLinuxCameraEventApi. + * @texture_id: parameter for this method. + * @cancellable: (allow-none): a #GCancellable or %NULL. + * @callback: (scope async): (allow-none): a #GAsyncReadyCallback to call when the call is complete or %NULL to ignore the response. + * @user_data: (closure): user data to pass to @callback. + * + */ +void camera_linux_camera_event_api_texture_id(CameraLinuxCameraEventApi* api, int64_t texture_id, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data); + +/** + * camera_linux_camera_event_api_texture_id_finish: + * @api: a #CameraLinuxCameraEventApi. + * @result: a #GAsyncResult. + * @error: (allow-none): #GError location to store the error occurring, or %NULL to ignore. + * + * Completes a camera_linux_camera_event_api_texture_id() call. + * + * Returns: a #CameraLinuxCameraEventApiTextureIdResponse or %NULL on error. + */ +CameraLinuxCameraEventApiTextureIdResponse* camera_linux_camera_event_api_texture_id_finish(CameraLinuxCameraEventApi* api, GAsyncResult* result, GError** error); + /** * camera_linux_camera_event_api_error: * @api: a #CameraLinuxCameraEventApi. diff --git a/packages/camera/camera_linux/pigeons/messages.dart b/packages/camera/camera_linux/pigeons/messages.dart index 706bfe692530..f9a02a24e2e6 100644 --- a/packages/camera/camera_linux/pigeons/messages.dart +++ b/packages/camera/camera_linux/pigeons/messages.dart @@ -56,8 +56,8 @@ enum PlatformImageFileFormat { // Pigeon version of the subset of ImageFormatGroup supported on iOS. enum PlatformImageFormatGroup { - bgra8888, - yuv420, + rgb8, + mono8, } enum PlatformResolutionPreset { @@ -248,6 +248,11 @@ abstract class CameraApi { /// Sets the file format used for taking pictures. @async void setImageFileFormat(PlatformImageFileFormat format); + + //Sets the ImageFormatGroup. + @async + void setImageFormatGroup( + int cameraId, PlatformImageFormatGroup imageFormatGroup); } /// Handler for native callbacks that are tied to a specific camera ID. @@ -258,6 +263,8 @@ abstract class CameraEventApi { /// Called when the camera is inialitized for use. void initialized(PlatformCameraState initialState); + void textureId(int textureId); + /// Called when an error occurs in the camera. /// /// This should be used for errors that occur outside of the context of From d6ebfddb5ba41f79dbf67b257f05a230d8fa658e Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Mon, 9 Jun 2025 15:15:45 -0400 Subject: [PATCH 12/21] camera take picture --- .../camera_linux/lib/src/linux_camera.dart | 92 +- .../camera_linux/lib/src/messages.g.dart | 594 +----- .../camera/camera_linux/linux/CMakeLists.txt | 5 + packages/camera/camera_linux/linux/camera.cpp | 115 +- packages/camera/camera_linux/linux/camera.h | 34 + .../camera_linux/linux/camera_host_plugin.cpp | 69 +- .../camera_linux/linux/camera_host_plugin.h | 151 +- ...era_video_recorder_image_event_handler.cpp | 0 ...amera_video_recorder_image_event_handler.h | 35 + .../camera/camera_linux/linux/messages.g.cc | 1828 +---------------- .../camera/camera_linux/linux/messages.g.h | 469 +---- .../camera/camera_linux/pigeons/messages.dart | 117 +- packages/camera/camera_linux/pubspec.yaml | 1 + 13 files changed, 442 insertions(+), 3068 deletions(-) create mode 100644 packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.cpp create mode 100644 packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.h diff --git a/packages/camera/camera_linux/lib/src/linux_camera.dart b/packages/camera/camera_linux/lib/src/linux_camera.dart index 37104c6e704b..27a649546595 100644 --- a/packages/camera/camera_linux/lib/src/linux_camera.dart +++ b/packages/camera/camera_linux/lib/src/linux_camera.dart @@ -5,6 +5,7 @@ import 'package:camera_linux/src/messages.g.dart'; import 'package:camera_platform_interface/camera_platform_interface.dart'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; +import 'package:path_provider/path_provider.dart'; import 'package:stream_transform/stream_transform.dart'; class CameraLinux extends CameraPlatform { @@ -184,13 +185,21 @@ class CameraLinux extends CameraPlatform { } @override - Future takePicture(int cameraId) { - throw UnimplementedError('takePicture() is not implemented.'); + Future takePicture(int cameraId) async { + try { + final directory = await getTemporaryDirectory(); + final uuid = DateTime.now().millisecondsSinceEpoch.toString(); + final path = '${directory.path}/$uuid.jpg'; + await _hostApi.takePicture(cameraId, path); + return XFile(path); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } } @override - Future prepareForVideoRecording() { - throw UnimplementedError('prepareForVideoRecording() is not implemented.'); + Future prepareForVideoRecording() async { +// No-op for Linux no preparation is needed. } @override @@ -219,13 +228,23 @@ class CameraLinux extends CameraPlatform { } @override - Future setFlashMode(int cameraId, FlashMode mode) async {} + Future setFlashMode(int cameraId, FlashMode mode) async { + // No-op for Linux, as flash mode is not supported. + } @override - Future setExposureMode(int cameraId, ExposureMode mode) async {} + Future setExposureMode(int cameraId, ExposureMode mode) async { + try { + await _hostApi.setExposureMode(cameraId, exposureModeToPlatform(mode)); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } @override - Future setExposurePoint(int cameraId, Point? point) async {} + Future setExposurePoint(int cameraId, Point? point) async { + // No-op for Linux, as exposure point is not supported. + } @override Future getMinExposureOffset(int cameraId) async { @@ -248,14 +267,22 @@ class CameraLinux extends CameraPlatform { } @override - Future setFocusMode(int cameraId, FocusMode mode) async {} + Future setFocusMode(int cameraId, FocusMode mode) async { + try { + await _hostApi.setFocusMode(cameraId, focusModeToPlatform(mode)); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } @override - Future setFocusPoint(int cameraId, Point? point) async {} + Future setFocusPoint(int cameraId, Point? point) async { + // No-op for Linux, as focus point is not supported. + } @override Future getMaxZoomLevel(int cameraId) async { - return 1.0; + return 0.0; } @override @@ -264,17 +291,26 @@ class CameraLinux extends CameraPlatform { } @override - Future setZoomLevel(int cameraId, double zoom) async {} + Future setZoomLevel(int cameraId, double zoom) async { + // No-op for Linux, as zoom is not supported. + } @override - Future pausePreview(int cameraId) async {} + Future pausePreview(int cameraId) async { + throw UnimplementedError('pausePreview() is not implemented.'); + } @override - Future resumePreview(int cameraId) async {} + Future resumePreview(int cameraId) async { + throw UnimplementedError('resumePreview() is not implemented.'); + } @override Future setDescriptionWhileRecording( - CameraDescription description) async {} + CameraDescription description) async { + throw UnimplementedError( + 'setDescriptionWhileRecording() is not implemented.'); + } @override Widget buildPreview(int cameraId) { @@ -305,8 +341,20 @@ class CameraLinux extends CameraPlatform { } @override - Future dispose(int cameraId) async {} + Future dispose(int cameraId) async { + // Remove the handler for this camera. + final HostCameraMessageHandler? handler = + hostCameraHandlers.remove(cameraId); + handler?.dispose(); + + try { + await _hostApi.dispose(cameraId); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } + /// The following methods are not implemented for Linux, as only jpeg is supported @override Future setImageFileFormat(int cameraId, ImageFileFormat format) async {} @@ -416,3 +464,17 @@ FocusMode focusModeFromPlatform(PlatformFocusMode mode) { PlatformFocusMode.locked => FocusMode.locked, }; } + +PlatformFocusMode focusModeToPlatform(FocusMode mode) { + return switch (mode) { + FocusMode.auto => PlatformFocusMode.auto, + FocusMode.locked => PlatformFocusMode.locked, + }; +} + +PlatformExposureMode exposureModeToPlatform(ExposureMode mode) { + return switch (mode) { + ExposureMode.auto => PlatformExposureMode.auto, + ExposureMode.locked => PlatformExposureMode.locked, + }; +} diff --git a/packages/camera/camera_linux/lib/src/messages.g.dart b/packages/camera/camera_linux/lib/src/messages.g.dart index 82c333f56b3a..ebfd0a9bc5f9 100644 --- a/packages/camera/camera_linux/lib/src/messages.g.dart +++ b/packages/camera/camera_linux/lib/src/messages.g.dart @@ -52,12 +52,6 @@ enum PlatformFocusMode { locked, } -/// Pigeon version of ImageFileFormat. -enum PlatformImageFileFormat { - jpeg, - heif, -} - enum PlatformImageFormatGroup { rgb8, mono8, @@ -190,23 +184,20 @@ class _PigeonCodec extends StandardMessageCodec { } else if (value is PlatformFocusMode) { buffer.putUint8(132); writeValue(buffer, value.index); - } else if (value is PlatformImageFileFormat) { - buffer.putUint8(133); - writeValue(buffer, value.index); } else if (value is PlatformImageFormatGroup) { - buffer.putUint8(134); + buffer.putUint8(133); writeValue(buffer, value.index); } else if (value is PlatformResolutionPreset) { - buffer.putUint8(135); + buffer.putUint8(134); writeValue(buffer, value.index); } else if (value is PlatformSize) { - buffer.putUint8(136); + buffer.putUint8(135); writeValue(buffer, value.encode()); } else if (value is PlatformCameraState) { - buffer.putUint8(137); + buffer.putUint8(136); writeValue(buffer, value.encode()); } else if (value is PlatformPoint) { - buffer.putUint8(138); + buffer.putUint8(137); writeValue(buffer, value.encode()); } else { super.writeValue(buffer, value); @@ -229,19 +220,16 @@ class _PigeonCodec extends StandardMessageCodec { final int? value = readValue(buffer) as int?; return value == null ? null : PlatformFocusMode.values[value]; case 133: - final int? value = readValue(buffer) as int?; - return value == null ? null : PlatformImageFileFormat.values[value]; - case 134: final int? value = readValue(buffer) as int?; return value == null ? null : PlatformImageFormatGroup.values[value]; - case 135: + case 134: final int? value = readValue(buffer) as int?; return value == null ? null : PlatformResolutionPreset.values[value]; - case 136: + case 135: return PlatformSize.decode(readValue(buffer)!); - case 137: + case 136: return PlatformCameraState.decode(readValue(buffer)!); - case 138: + case 137: return PlatformPoint.decode(readValue(buffer)!); default: return super.readValueOfType(type, buffer); @@ -341,52 +329,6 @@ class CameraApi { } } - /// Begins streaming frames from the camera. - Future startImageStream() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.startImageStream$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Stops streaming frames from the camera. - Future stopImageStream() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.stopImageStream$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - /// Get the texture ID for the camera with the given ID. Future getTextureId(int cameraId) async { final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getTextureId$pigeonVar_messageChannelSuffix'; @@ -410,32 +352,6 @@ class CameraApi { } } - /// Called by the Dart side of the plugin when it has received the last image - /// frame sent. - /// - /// This is used to throttle sending frames across the channel. - Future receivedImageStreamData() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.receivedImageStreamData$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - /// Indicates that the given camera is no longer being used on the Dart side, /// and any associated resources can be cleaned up. Future dispose(int cameraId) async { @@ -460,56 +376,9 @@ class CameraApi { } } - /// Locks the camera capture to the current device orientation. - Future lockCaptureOrientation(PlatformDeviceOrientation orientation) async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.lockCaptureOrientation$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send([orientation]) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Unlocks camera capture orientation, allowing it to automatically adapt to - /// device orientation. - Future unlockCaptureOrientation() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.unlockCaptureOrientation$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - /// Takes a picture with the current settings, and returns the path to the /// resulting file. - Future takePicture() async { + Future takePicture(int cameraId, String path) async { final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.takePicture$pigeonVar_messageChannelSuffix'; final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( pigeonVar_channelName, @@ -517,35 +386,7 @@ class CameraApi { binaryMessenger: pigeonVar_binaryMessenger, ); final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as String?)!; - } - } - - /// Does any preprocessing necessary before beginning to record video. - Future prepareForVideoRecording() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.prepareForVideoRecording$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; + await pigeonVar_channel.send([cameraId, path]) as List?; if (pigeonVar_replyList == null) { throw _createConnectionError(pigeonVar_channelName); } else if (pigeonVar_replyList.length > 1) { @@ -561,7 +402,7 @@ class CameraApi { /// Begins recording video, optionally enabling streaming to Dart at the same /// time. - Future startVideoRecording(bool enableStream) async { + Future startVideoRecording(int cameraId, bool enableStream) async { final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.startVideoRecording$pigeonVar_messageChannelSuffix'; final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( pigeonVar_channelName, @@ -569,7 +410,7 @@ class CameraApi { binaryMessenger: pigeonVar_binaryMessenger, ); final List? pigeonVar_replyList = - await pigeonVar_channel.send([enableStream]) as List?; + await pigeonVar_channel.send([cameraId, enableStream]) as List?; if (pigeonVar_replyList == null) { throw _createConnectionError(pigeonVar_channelName); } else if (pigeonVar_replyList.length > 1) { @@ -584,7 +425,7 @@ class CameraApi { } /// Stops recording video, and results the path to the resulting file. - Future stopVideoRecording() async { + Future stopVideoRecording(int cameraId) async { final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.stopVideoRecording$pigeonVar_messageChannelSuffix'; final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( pigeonVar_channelName, @@ -592,7 +433,7 @@ class CameraApi { binaryMessenger: pigeonVar_binaryMessenger, ); final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; + await pigeonVar_channel.send([cameraId]) as List?; if (pigeonVar_replyList == null) { throw _createConnectionError(pigeonVar_channelName); } else if (pigeonVar_replyList.length > 1) { @@ -611,77 +452,8 @@ class CameraApi { } } - /// Pauses video recording. - Future pauseVideoRecording() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.pauseVideoRecording$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Resumes a previously paused video recording. - Future resumeVideoRecording() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.resumeVideoRecording$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Switches the camera to the given flash mode. - Future setFlashMode(PlatformFlashMode mode) async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setFlashMode$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send([mode]) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - /// Switches the camera to the given exposure mode. - Future setExposureMode(PlatformExposureMode mode) async { + Future setExposureMode(int cameraId, PlatformExposureMode mode) async { final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setExposureMode$pigeonVar_messageChannelSuffix'; final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( pigeonVar_channelName, @@ -689,137 +461,7 @@ class CameraApi { binaryMessenger: pigeonVar_binaryMessenger, ); final List? pigeonVar_replyList = - await pigeonVar_channel.send([mode]) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Anchors auto-exposure to the given point in (0,1) coordinate space. - /// - /// A null value resets to the default exposure point. - Future setExposurePoint(PlatformPoint? point) async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setExposurePoint$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send([point]) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Sets the lens position manually to the given value. - /// The value should be between 0 and 1. - /// 0 means the lens is at the minimum position. - /// 1 means the lens is at the maximum position. - Future setLensPosition(double position) async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setLensPosition$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send([position]) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Returns the minimum exposure offset supported by the camera. - Future getMinExposureOffset() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getMinExposureOffset$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } - } - - /// Returns the maximum exposure offset supported by the camera. - Future getMaxExposureOffset() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getMaxExposureOffset$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } - } - - /// Sets the exposure offset manually to the given value. - Future setExposureOffset(double offset) async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setExposureOffset$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send([offset]) as List?; + await pigeonVar_channel.send([cameraId, mode]) as List?; if (pigeonVar_replyList == null) { throw _createConnectionError(pigeonVar_channelName); } else if (pigeonVar_replyList.length > 1) { @@ -834,7 +476,7 @@ class CameraApi { } /// Switches the camera to the given focus mode. - Future setFocusMode(PlatformFocusMode mode) async { + Future setFocusMode(int cameraId, PlatformFocusMode mode) async { final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setFocusMode$pigeonVar_messageChannelSuffix'; final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( pigeonVar_channelName, @@ -842,205 +484,7 @@ class CameraApi { binaryMessenger: pigeonVar_binaryMessenger, ); final List? pigeonVar_replyList = - await pigeonVar_channel.send([mode]) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Anchors auto-focus to the given point in (0,1) coordinate space. - /// - /// A null value resets to the default focus point. - Future setFocusPoint(PlatformPoint? point) async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setFocusPoint$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send([point]) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Returns the minimum zoom level supported by the camera. - Future getMinZoomLevel() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getMinZoomLevel$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } - } - - /// Returns the maximum zoom level supported by the camera. - Future getMaxZoomLevel() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.getMaxZoomLevel$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } - } - - /// Sets the zoom factor. - Future setZoomLevel(double zoom) async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setZoomLevel$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send([zoom]) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Pauses streaming of preview frames. - Future pausePreview() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.pausePreview$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Resumes a previously paused preview stream. - Future resumePreview() async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.resumePreview$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send(null) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Changes the camera used while recording video. - /// - /// This should only be called while video recording is active. - Future updateDescriptionWhileRecording(String cameraName) async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.updateDescriptionWhileRecording$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send([cameraName]) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } - } - - /// Sets the file format used for taking pictures. - Future setImageFileFormat(PlatformImageFileFormat format) async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.setImageFileFormat$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( - pigeonVar_channelName, - pigeonChannelCodec, - binaryMessenger: pigeonVar_binaryMessenger, - ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send([format]) as List?; + await pigeonVar_channel.send([cameraId, mode]) as List?; if (pigeonVar_replyList == null) { throw _createConnectionError(pigeonVar_channelName); } else if (pigeonVar_replyList.length > 1) { diff --git a/packages/camera/camera_linux/linux/CMakeLists.txt b/packages/camera/camera_linux/linux/CMakeLists.txt index 3e4ea240da32..dfa6c7bf8998 100644 --- a/packages/camera/camera_linux/linux/CMakeLists.txt +++ b/packages/camera/camera_linux/linux/CMakeLists.txt @@ -50,6 +50,7 @@ list(APPEND PLUGIN_SOURCES "camera_plugin.cpp" "camera_host_plugin.cpp" "camera_texture_image_event_handler.cpp" + "camera_video_recorder_image_event_handler.cpp" "camera.cpp" "messages.g.cc" @@ -92,6 +93,10 @@ target_link_libraries(${PLUGIN_NAME} ) find_package(PkgConfig REQUIRED) +# --- OpenCV Integration --- +find_package(OpenCV REQUIRED) +include_directories(${OpenCV_INCLUDE_DIRS}) +target_link_libraries(${PLUGIN_NAME} PRIVATE ${OpenCV_LIBS}) # List of absolute paths to libraries that should be bundled with the plugin. # This list could contain prebuilt libraries, or libraries created by an diff --git a/packages/camera/camera_linux/linux/camera.cpp b/packages/camera/camera_linux/linux/camera.cpp index 234f7cc52b97..be21e2d74f35 100644 --- a/packages/camera/camera_linux/linux/camera.cpp +++ b/packages/camera/camera_linux/linux/camera.cpp @@ -1,5 +1,7 @@ #include "camera.h" +#include + #include "camera_texture_image_event_handler.h" Camera::Camera(Pylon::IPylonDevice* device, int64_t camera_id, @@ -73,34 +75,20 @@ void Camera::initialize(CameraLinuxPlatformImageFormatGroup imageFormat) { void Camera::setImageFormatGroup( CameraLinuxPlatformImageFormatGroup imageFormatGroup) { - if (!camera) return; - bool wasGrabbing = camera->IsGrabbing(); - if (wasGrabbing) { - camera->StopGrabbing(); - camera->DeregisterImageEventHandler(cameraTextureImageEventHandler.get()); - cameraTextureImageEventHandler.reset(); - } - GenApi::INodeMap& nodemap = camera->GetNodeMap(); - switch (imageFormatGroup) { - case CameraLinuxPlatformImageFormatGroup:: - CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_MONO8: - Pylon::CEnumParameter(nodemap, "PixelFormat").SetValue("Mono8"); - break; - case CameraLinuxPlatformImageFormatGroup:: - CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_RGB8: - default: - Pylon::CEnumParameter(nodemap, "PixelFormat").SetValue("RGB8"); - break; - } - if (wasGrabbing) { - cameraTextureImageEventHandler = - std::make_unique(*this, registrar); - camera->RegisterImageEventHandler(cameraTextureImageEventHandler.get(), - Pylon::RegistrationMode_Append, - Pylon::Cleanup_None); - camera->StartGrabbing(Pylon::GrabStrategy_LatestImages, - Pylon::EGrabLoop::GrabLoop_ProvidedByInstantCamera); - } + CAMERA_CONFIG_LOCK({ + GenApi::INodeMap& nodemap = camera->GetNodeMap(); + switch (imageFormatGroup) { + case CameraLinuxPlatformImageFormatGroup:: + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_MONO8: + Pylon::CEnumParameter(nodemap, "PixelFormat").SetValue("Mono8"); + break; + case CameraLinuxPlatformImageFormatGroup:: + CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_RGB8: + default: + Pylon::CEnumParameter(nodemap, "PixelFormat").SetValue("RGB8"); + break; + } + }); } int64_t Camera::getTextureId() { @@ -108,6 +96,36 @@ int64_t Camera::getTextureId() { return cameraTextureImageEventHandler->get_texture_id(); } +void Camera::takePicture(std::string file_path) { + CAMERA_CONFIG_LOCK( + Pylon::CGrabResultPtr grabResult; + + if (camera->IsGrabbing()) { camera->StopGrabbing(); } + + if (!camera->GrabOne(Pylon::INFINITE, grabResult, + Pylon::TimeoutHandling_Return)) { + std::cerr << "Failed to grab image within timeout." << std::endl; + return; + } + + if (!grabResult.IsValid() || !grabResult->GrabSucceeded()) { + std::cerr << "Failed to grab image." << std::endl; + return; + }; + Pylon::CPylonImage image; image.AttachGrabResultBuffer(grabResult); + bool isMono = image.GetPixelType() == Pylon::PixelType_Mono8 || + image.GetPixelType() == Pylon::PixelType_Mono12 || + image.GetPixelType() == Pylon::PixelType_Mono16; + + cv::Mat mat(grabResult->GetHeight(), grabResult->GetWidth(), + isMono ? CV_8UC1 : CV_8UC3, (uint8_t*)image.GetBuffer()); + cv::Mat bgr; + cv::cvtColor(mat, bgr, isMono ? cv::COLOR_GRAY2BGR : cv::COLOR_RGB2BGR); + cv::imwrite(file_path, bgr); + + ); +} + void camera_linux_camera_event_api_initialized_callback(GObject* object, GAsyncResult* result, gpointer user_data) {} @@ -171,3 +189,44 @@ Camera& Camera::setResolutionPreset( resolution_preset = preset; return *this; } + +void Camera::setExposureMode(CameraLinuxPlatformExposureMode mode) { + CAMERA_CONFIG_LOCK({ + GenApi::INodeMap& nodemap = camera->GetNodeMap(); + switch (mode) { + case CameraLinuxPlatformExposureMode:: + CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_AUTO: + Pylon::CEnumParameter(nodemap, "ExposureAuto").SetValue("Continuous"); + break; + case CameraLinuxPlatformExposureMode:: + CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_LOCKED: + Pylon::CEnumParameter(nodemap, "ExposureAuto").SetValue("Off"); + break; + default: + Pylon::CEnumParameter(nodemap, "ExposureAuto").SetValue("Continuous"); + break; + } + exposure_mode = mode; + emitState(); + }); +} + +void Camera::setFocusMode(CameraLinuxPlatformFocusMode mode) { + CAMERA_CONFIG_LOCK({ + GenApi::INodeMap& nodemap = camera->GetNodeMap(); + switch (mode) { + case CameraLinuxPlatformFocusMode::CAMERA_LINUX_PLATFORM_FOCUS_MODE_AUTO: + Pylon::CEnumParameter(nodemap, "FocusMode").SetValue("Auto"); + break; + case CameraLinuxPlatformFocusMode:: + CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED: + Pylon::CEnumParameter(nodemap, "FocusMode").SetValue("Locked"); + break; + default: + Pylon::CEnumParameter(nodemap, "FocusMode").SetValue("Auto"); + break; + } + focus_mode = mode; + emitState(); + }); +} diff --git a/packages/camera/camera_linux/linux/camera.h b/packages/camera/camera_linux/linux/camera.h index e25e2e7ed586..05ed36d49e07 100644 --- a/packages/camera/camera_linux/linux/camera.h +++ b/packages/camera/camera_linux/linux/camera.h @@ -2,6 +2,8 @@ #ifndef CAMERA_H_ #define CAMERA_H_ +#include + #include "flutter_linux/flutter_linux.h" #include "messages.g.h" @@ -35,8 +37,14 @@ class Camera { int64_t getTextureId(); + void takePicture(std::string file_path); + // void startVideoRecording(); + // void stopVideoRecording(); + void setImageFormatGroup( CameraLinuxPlatformImageFormatGroup imageFormatGroup); + void setExposureMode(CameraLinuxPlatformExposureMode mode); + void setFocusMode(CameraLinuxPlatformFocusMode mode); // State public: @@ -56,4 +64,30 @@ class Camera { FlPluginRegistrar* registrar; }; +#define CAMERA_CONFIG_LOCK(code) \ + do { \ + bool wasGrabbing = camera->IsGrabbing(); \ + if (!camera) { \ + std::cerr << "Camera is not initialized." << std::endl; \ + return; \ + } \ + if (wasGrabbing) { \ + camera->StopGrabbing(); \ + camera->DeregisterImageEventHandler( \ + cameraTextureImageEventHandler.get()); \ + cameraTextureImageEventHandler.reset(); \ + } \ + {code}; \ + if (wasGrabbing) { \ + cameraTextureImageEventHandler = \ + std::make_unique(*this, registrar); \ + camera->RegisterImageEventHandler(cameraTextureImageEventHandler.get(), \ + Pylon::RegistrationMode_Append, \ + Pylon::Cleanup_None); \ + camera->StartGrabbing( \ + Pylon::GrabStrategy_LatestImages, \ + Pylon::EGrabLoop::GrabLoop_ProvidedByInstantCamera); \ + } \ + } while (0) + #endif // CAMERA_H_ diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.cpp b/packages/camera/camera_linux/linux/camera_host_plugin.cpp index df893be75f15..2bc90ab72dcb 100644 --- a/packages/camera/camera_linux/linux/camera_host_plugin.cpp +++ b/packages/camera/camera_linux/linux/camera_host_plugin.cpp @@ -10,35 +10,13 @@ CameraHostPlugin::CameraHostPlugin(FlPluginRegistrar* registrar) .get_available_cameras_names = get_available_cameras_names, .create = create, .initialize = initialize, - .start_image_stream = start_image_stream, - .stop_image_stream = stop_image_stream, .get_texture_id = get_texture_id, - .received_image_stream_data = received_image_stream_data, .dispose = dispose, - .lock_capture_orientation = lock_capture_orientation, - .unlock_capture_orientation = unlock_capture_orientation, .take_picture = take_picture, - .prepare_for_video_recording = prepare_for_video_recording, .start_video_recording = start_video_recording, .stop_video_recording = stop_video_recording, - .pause_video_recording = pause_video_recording, - .resume_video_recording = resume_video_recording, - .set_flash_mode = set_flash_mode, .set_exposure_mode = set_exposure_mode, - .set_exposure_point = set_exposure_point, - .set_lens_position = set_lens_position, - .get_min_exposure_offset = get_min_exposure_offset, - .get_max_exposure_offset = get_max_exposure_offset, - .set_exposure_offset = set_exposure_offset, .set_focus_mode = set_focus_mode, - .set_focus_point = set_focus_point, - .get_min_zoom_level = get_min_zoom_level, - .get_max_zoom_level = get_max_zoom_level, - .set_zoom_level = set_zoom_level, - .pause_preview = pause_preview, - .resume_preview = resume_preview, - .update_description_while_recording = update_description_while_recording, - .set_image_file_format = set_image_file_format, .set_image_format_group = set_image_format_group, }; @@ -113,6 +91,20 @@ void CameraHostPlugin::create( }); } +void CameraHostPlugin::dispose( + int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(dispose, { + for (auto&& camera_it = cameras.begin(); camera_it != cameras.end(); + ++camera_it) { + if (camera_it->camera_id == camera_id) { + cameras.erase(camera_it); + } + } + CAMERA_HOST_VOID_RETURN(); + }); +} + void CameraHostPlugin::camera_linux_camera_event_api_initialized_callback( GObject* object, GAsyncResult* result, gpointer user_data) {} @@ -148,3 +140,36 @@ void CameraHostPlugin::get_texture_id( CAMERA_HOST_RETURN(&texture_id); }); } + +void CameraHostPlugin::set_exposure_mode( + int64_t camera_id, CameraLinuxPlatformExposureMode mode, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(set_exposure_mode, { + Camera& camera = get_camera_by_id(camera_id); + camera.setExposureMode(mode); + + CAMERA_HOST_VOID_RETURN(); + }); +} + +void CameraHostPlugin::set_focus_mode( + int64_t camera_id, CameraLinuxPlatformFocusMode mode, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(set_focus_mode, { + Camera& camera = get_camera_by_id(camera_id); + camera.setFocusMode(mode); + + CAMERA_HOST_VOID_RETURN(); + }); +} + +void CameraHostPlugin::take_picture( + int64_t camera_id, const gchar* path, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(take_picture, { + Camera& camera = get_camera_by_id(camera_id); + + camera.takePicture(std::string(path)); + CAMERA_HOST_VOID_RETURN(); + }); +} \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.h b/packages/camera/camera_linux/linux/camera_host_plugin.h index a8a19fbae493..92f5971f52c5 100644 --- a/packages/camera/camera_linux/linux/camera_host_plugin.h +++ b/packages/camera/camera_linux/linux/camera_host_plugin.h @@ -26,11 +26,13 @@ &camera_linux_camera_api_respond_error_##method_name; \ code \ } catch (const Pylon::GenericException& e) { \ + std::cerr << e.GetDescription() << std::endl; \ camera_linux_camera_api_respond_error_##method_name( \ - response_handle, nullptr, e.what(), nullptr); \ + response_handle, nullptr, e.GetDescription(), nullptr); \ } catch (const std::exception& e) { \ camera_linux_camera_api_respond_error_##method_name( \ response_handle, nullptr, e.what(), nullptr); \ + std::cerr << "Exception occurred: " << e.what() << std::endl; \ } catch (...) { \ camera_linux_camera_api_respond_error_##method_name( \ response_handle, nullptr, "CameraLinuxPlugin Unknown error", nullptr); \ @@ -71,162 +73,37 @@ class CameraHostPlugin { CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - static void start_image_stream( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void stop_image_stream( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void received_image_stream_data( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - static void dispose(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, - gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void lock_capture_orientation( - CameraLinuxPlatformDeviceOrientation orientation, - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void unlock_capture_orientation( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } + gpointer user_data); static void get_texture_id( int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - static void take_picture(CameraLinuxCameraApiResponseHandle* response_handle, - gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void prepare_for_video_recording( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } + static void take_picture(int64_t camera_id, const gchar* path, + CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data); static void start_video_recording( - gboolean enable_stream, + int64_t camera_id, gboolean enable_stream, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { throw new std::runtime_error("Not Implemented"); } static void stop_video_recording( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void pause_video_recording( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void resume_video_recording( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void set_flash_mode( - CameraLinuxPlatformFlashMode mode, - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void set_exposure_mode( - CameraLinuxPlatformExposureMode mode, - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void set_exposure_point( - CameraLinuxPlatformPoint* point, - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void set_lens_position( - double position, CameraLinuxCameraApiResponseHandle* response_handle, + int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { throw new std::runtime_error("Not Implemented"); } - static void get_min_exposure_offset( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void get_max_exposure_offset( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void set_exposure_offset( - double offset, CameraLinuxCameraApiResponseHandle* response_handle, - gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } + static void set_exposure_mode( + int64_t camera_id, CameraLinuxPlatformExposureMode mode, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); static void set_focus_mode( - CameraLinuxPlatformFocusMode mode, - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void set_focus_point( - CameraLinuxPlatformPoint* point, - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void get_min_zoom_level( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void get_max_zoom_level( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void set_zoom_level( - double zoom, CameraLinuxCameraApiResponseHandle* response_handle, - gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void pause_preview(CameraLinuxCameraApiResponseHandle* response_handle, - gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void resume_preview( - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void update_description_while_recording( - const gchar* camera_name, - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } - - static void set_image_file_format( - CameraLinuxPlatformImageFileFormat format, - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } + int64_t camera_id, CameraLinuxPlatformFocusMode mode, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); static void camera_linux_camera_event_api_initialized_callback( GObject* object, GAsyncResult* result, gpointer user_data); diff --git a/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.cpp b/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.cpp new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.h b/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.h new file mode 100644 index 000000000000..6d3ab39f0a97 --- /dev/null +++ b/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.h @@ -0,0 +1,35 @@ + +#ifndef CAMERA_VIDEO_RECORDER_IMAGE_EVENT_HANDLER_H_ +#define CAMERA_VIDEO_RECORDER_IMAGE_EVENT_HANDLER_H_ + +#include + +#include "camera.h" +#include "flutter_linux/flutter_linux.h" +#include "messages.g.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Woverloaded-virtual" +#pragma clang diagnostic ignored "-Wunused-variable" + +#include + +#pragma clang diagnostic pop + +class CameraVideoRecorderImageEventHandler : public Pylon::CImageEventHandler { + const Camera& camera; + + public: + CameraVideoRecorderImageEventHandler(const Camera& camera); + + ~CameraVideoRecorderImageEventHandler() override; + + void OnImageEventHandlerRegistered(Pylon::CInstantCamera& camera) override; + + void OnImageGrabbed(Pylon::CInstantCamera& camera, + const Pylon::CGrabResultPtr& ptr) override; + + void OnImageEventHandlerDeregistered(Pylon::CInstantCamera& camera) override; +}; + +#endif // CAMERA_VIDEO_RECORDER_IMAGE_EVENT_HANDLER_H_ \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/messages.g.cc b/packages/camera/camera_linux/linux/messages.g.cc index b9bfa4472475..e717f09980e3 100644 --- a/packages/camera/camera_linux/linux/messages.g.cc +++ b/packages/camera/camera_linux/linux/messages.g.cc @@ -120,7 +120,7 @@ gboolean camera_linux_platform_camera_state_get_focus_point_supported(CameraLinu static FlValue* camera_linux_platform_camera_state_to_list(CameraLinuxPlatformCameraState* self) { FlValue* values = fl_value_new_list(); - fl_value_append_take(values, fl_value_new_custom_object(136, G_OBJECT(self->preview_size))); + fl_value_append_take(values, fl_value_new_custom_object(135, G_OBJECT(self->preview_size))); fl_value_append_take(values, fl_value_new_custom(130, fl_value_new_int(self->exposure_mode), (GDestroyNotify)fl_value_unref)); fl_value_append_take(values, fl_value_new_custom(132, fl_value_new_int(self->focus_mode), (GDestroyNotify)fl_value_unref)); fl_value_append_take(values, fl_value_new_bool(self->exposure_point_supported)); @@ -225,40 +225,34 @@ static gboolean camera_linux_message_codec_write_camera_linux_platform_focus_mod return fl_standard_message_codec_write_value(codec, buffer, value, error); } -static gboolean camera_linux_message_codec_write_camera_linux_platform_image_file_format(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { - uint8_t type = 133; - g_byte_array_append(buffer, &type, sizeof(uint8_t)); - return fl_standard_message_codec_write_value(codec, buffer, value, error); -} - static gboolean camera_linux_message_codec_write_camera_linux_platform_image_format_group(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { - uint8_t type = 134; + uint8_t type = 133; g_byte_array_append(buffer, &type, sizeof(uint8_t)); return fl_standard_message_codec_write_value(codec, buffer, value, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_resolution_preset(FlStandardMessageCodec* codec, GByteArray* buffer, FlValue* value, GError** error) { - uint8_t type = 135; + uint8_t type = 134; g_byte_array_append(buffer, &type, sizeof(uint8_t)); return fl_standard_message_codec_write_value(codec, buffer, value, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_size(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformSize* value, GError** error) { - uint8_t type = 136; + uint8_t type = 135; g_byte_array_append(buffer, &type, sizeof(uint8_t)); g_autoptr(FlValue) values = camera_linux_platform_size_to_list(value); return fl_standard_message_codec_write_value(codec, buffer, values, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_camera_state(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformCameraState* value, GError** error) { - uint8_t type = 137; + uint8_t type = 136; g_byte_array_append(buffer, &type, sizeof(uint8_t)); g_autoptr(FlValue) values = camera_linux_platform_camera_state_to_list(value); return fl_standard_message_codec_write_value(codec, buffer, values, error); } static gboolean camera_linux_message_codec_write_camera_linux_platform_point(FlStandardMessageCodec* codec, GByteArray* buffer, CameraLinuxPlatformPoint* value, GError** error) { - uint8_t type = 138; + uint8_t type = 137; g_byte_array_append(buffer, &type, sizeof(uint8_t)); g_autoptr(FlValue) values = camera_linux_platform_point_to_list(value); return fl_standard_message_codec_write_value(codec, buffer, values, error); @@ -276,16 +270,14 @@ static gboolean camera_linux_message_codec_write_value(FlStandardMessageCodec* c case 132: return camera_linux_message_codec_write_camera_linux_platform_focus_mode(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); case 133: - return camera_linux_message_codec_write_camera_linux_platform_image_file_format(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); - case 134: return camera_linux_message_codec_write_camera_linux_platform_image_format_group(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); - case 135: + case 134: return camera_linux_message_codec_write_camera_linux_platform_resolution_preset(codec, buffer, reinterpret_cast(const_cast(fl_value_get_custom_value(value))), error); - case 136: + case 135: return camera_linux_message_codec_write_camera_linux_platform_size(codec, buffer, CAMERA_LINUX_PLATFORM_SIZE(fl_value_get_custom_value_object(value)), error); - case 137: + case 136: return camera_linux_message_codec_write_camera_linux_platform_camera_state(codec, buffer, CAMERA_LINUX_PLATFORM_CAMERA_STATE(fl_value_get_custom_value_object(value)), error); - case 138: + case 137: return camera_linux_message_codec_write_camera_linux_platform_point(codec, buffer, CAMERA_LINUX_PLATFORM_POINT(fl_value_get_custom_value_object(value)), error); } } @@ -309,16 +301,12 @@ static FlValue* camera_linux_message_codec_read_camera_linux_platform_focus_mode return fl_value_new_custom(132, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); } -static FlValue* camera_linux_message_codec_read_camera_linux_platform_image_file_format(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - return fl_value_new_custom(133, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); -} - static FlValue* camera_linux_message_codec_read_camera_linux_platform_image_format_group(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - return fl_value_new_custom(134, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); + return fl_value_new_custom(133, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_resolution_preset(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { - return fl_value_new_custom(135, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); + return fl_value_new_custom(134, fl_standard_message_codec_read_value(codec, buffer, offset, error), (GDestroyNotify)fl_value_unref); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_size(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { @@ -333,7 +321,7 @@ static FlValue* camera_linux_message_codec_read_camera_linux_platform_size(FlSta return nullptr; } - return fl_value_new_custom_object(136, G_OBJECT(value)); + return fl_value_new_custom_object(135, G_OBJECT(value)); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_state(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { @@ -348,7 +336,7 @@ static FlValue* camera_linux_message_codec_read_camera_linux_platform_camera_sta return nullptr; } - return fl_value_new_custom_object(137, G_OBJECT(value)); + return fl_value_new_custom_object(136, G_OBJECT(value)); } static FlValue* camera_linux_message_codec_read_camera_linux_platform_point(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, GError** error) { @@ -363,7 +351,7 @@ static FlValue* camera_linux_message_codec_read_camera_linux_platform_point(FlSt return nullptr; } - return fl_value_new_custom_object(138, G_OBJECT(value)); + return fl_value_new_custom_object(137, G_OBJECT(value)); } static FlValue* camera_linux_message_codec_read_value_of_type(FlStandardMessageCodec* codec, GBytes* buffer, size_t* offset, int type, GError** error) { @@ -377,16 +365,14 @@ static FlValue* camera_linux_message_codec_read_value_of_type(FlStandardMessageC case 132: return camera_linux_message_codec_read_camera_linux_platform_focus_mode(codec, buffer, offset, error); case 133: - return camera_linux_message_codec_read_camera_linux_platform_image_file_format(codec, buffer, offset, error); - case 134: return camera_linux_message_codec_read_camera_linux_platform_image_format_group(codec, buffer, offset, error); - case 135: + case 134: return camera_linux_message_codec_read_camera_linux_platform_resolution_preset(codec, buffer, offset, error); - case 136: + case 135: return camera_linux_message_codec_read_camera_linux_platform_size(codec, buffer, offset, error); - case 137: + case 136: return camera_linux_message_codec_read_camera_linux_platform_camera_state(codec, buffer, offset, error); - case 138: + case 137: return camera_linux_message_codec_read_camera_linux_platform_point(codec, buffer, offset, error); default: return FL_STANDARD_MESSAGE_CODEC_CLASS(camera_linux_message_codec_parent_class)->read_value_of_type(codec, buffer, offset, type, error); @@ -553,84 +539,6 @@ static CameraLinuxCameraApiInitializeResponse* camera_linux_camera_api_initializ return self; } -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiStartImageStreamResponse, camera_linux_camera_api_start_image_stream_response, CAMERA_LINUX, CAMERA_API_START_IMAGE_STREAM_RESPONSE, GObject) - -struct _CameraLinuxCameraApiStartImageStreamResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiStartImageStreamResponse, camera_linux_camera_api_start_image_stream_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_start_image_stream_response_dispose(GObject* object) { - CameraLinuxCameraApiStartImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_START_IMAGE_STREAM_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_start_image_stream_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_start_image_stream_response_init(CameraLinuxCameraApiStartImageStreamResponse* self) { -} - -static void camera_linux_camera_api_start_image_stream_response_class_init(CameraLinuxCameraApiStartImageStreamResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_start_image_stream_response_dispose; -} - -static CameraLinuxCameraApiStartImageStreamResponse* camera_linux_camera_api_start_image_stream_response_new() { - CameraLinuxCameraApiStartImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_START_IMAGE_STREAM_RESPONSE(g_object_new(camera_linux_camera_api_start_image_stream_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiStartImageStreamResponse* camera_linux_camera_api_start_image_stream_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiStartImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_START_IMAGE_STREAM_RESPONSE(g_object_new(camera_linux_camera_api_start_image_stream_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiStopImageStreamResponse, camera_linux_camera_api_stop_image_stream_response, CAMERA_LINUX, CAMERA_API_STOP_IMAGE_STREAM_RESPONSE, GObject) - -struct _CameraLinuxCameraApiStopImageStreamResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiStopImageStreamResponse, camera_linux_camera_api_stop_image_stream_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_stop_image_stream_response_dispose(GObject* object) { - CameraLinuxCameraApiStopImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_STOP_IMAGE_STREAM_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_stop_image_stream_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_stop_image_stream_response_init(CameraLinuxCameraApiStopImageStreamResponse* self) { -} - -static void camera_linux_camera_api_stop_image_stream_response_class_init(CameraLinuxCameraApiStopImageStreamResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_stop_image_stream_response_dispose; -} - -static CameraLinuxCameraApiStopImageStreamResponse* camera_linux_camera_api_stop_image_stream_response_new() { - CameraLinuxCameraApiStopImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_STOP_IMAGE_STREAM_RESPONSE(g_object_new(camera_linux_camera_api_stop_image_stream_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiStopImageStreamResponse* camera_linux_camera_api_stop_image_stream_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiStopImageStreamResponse* self = CAMERA_LINUX_CAMERA_API_STOP_IMAGE_STREAM_RESPONSE(g_object_new(camera_linux_camera_api_stop_image_stream_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetTextureIdResponse, camera_linux_camera_api_get_texture_id_response, CAMERA_LINUX, CAMERA_API_GET_TEXTURE_ID_RESPONSE, GObject) struct _CameraLinuxCameraApiGetTextureIdResponse { @@ -670,45 +578,6 @@ static CameraLinuxCameraApiGetTextureIdResponse* camera_linux_camera_api_get_tex return self; } -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiReceivedImageStreamDataResponse, camera_linux_camera_api_received_image_stream_data_response, CAMERA_LINUX, CAMERA_API_RECEIVED_IMAGE_STREAM_DATA_RESPONSE, GObject) - -struct _CameraLinuxCameraApiReceivedImageStreamDataResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiReceivedImageStreamDataResponse, camera_linux_camera_api_received_image_stream_data_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_received_image_stream_data_response_dispose(GObject* object) { - CameraLinuxCameraApiReceivedImageStreamDataResponse* self = CAMERA_LINUX_CAMERA_API_RECEIVED_IMAGE_STREAM_DATA_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_received_image_stream_data_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_received_image_stream_data_response_init(CameraLinuxCameraApiReceivedImageStreamDataResponse* self) { -} - -static void camera_linux_camera_api_received_image_stream_data_response_class_init(CameraLinuxCameraApiReceivedImageStreamDataResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_received_image_stream_data_response_dispose; -} - -static CameraLinuxCameraApiReceivedImageStreamDataResponse* camera_linux_camera_api_received_image_stream_data_response_new() { - CameraLinuxCameraApiReceivedImageStreamDataResponse* self = CAMERA_LINUX_CAMERA_API_RECEIVED_IMAGE_STREAM_DATA_RESPONSE(g_object_new(camera_linux_camera_api_received_image_stream_data_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiReceivedImageStreamDataResponse* camera_linux_camera_api_received_image_stream_data_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiReceivedImageStreamDataResponse* self = CAMERA_LINUX_CAMERA_API_RECEIVED_IMAGE_STREAM_DATA_RESPONSE(g_object_new(camera_linux_camera_api_received_image_stream_data_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiDisposeResponse, camera_linux_camera_api_dispose_response, CAMERA_LINUX, CAMERA_API_DISPOSE_RESPONSE, GObject) struct _CameraLinuxCameraApiDisposeResponse { @@ -748,84 +617,6 @@ static CameraLinuxCameraApiDisposeResponse* camera_linux_camera_api_dispose_resp return self; } -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiLockCaptureOrientationResponse, camera_linux_camera_api_lock_capture_orientation_response, CAMERA_LINUX, CAMERA_API_LOCK_CAPTURE_ORIENTATION_RESPONSE, GObject) - -struct _CameraLinuxCameraApiLockCaptureOrientationResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiLockCaptureOrientationResponse, camera_linux_camera_api_lock_capture_orientation_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_lock_capture_orientation_response_dispose(GObject* object) { - CameraLinuxCameraApiLockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_LOCK_CAPTURE_ORIENTATION_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_lock_capture_orientation_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_lock_capture_orientation_response_init(CameraLinuxCameraApiLockCaptureOrientationResponse* self) { -} - -static void camera_linux_camera_api_lock_capture_orientation_response_class_init(CameraLinuxCameraApiLockCaptureOrientationResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_lock_capture_orientation_response_dispose; -} - -static CameraLinuxCameraApiLockCaptureOrientationResponse* camera_linux_camera_api_lock_capture_orientation_response_new() { - CameraLinuxCameraApiLockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_LOCK_CAPTURE_ORIENTATION_RESPONSE(g_object_new(camera_linux_camera_api_lock_capture_orientation_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiLockCaptureOrientationResponse* camera_linux_camera_api_lock_capture_orientation_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiLockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_LOCK_CAPTURE_ORIENTATION_RESPONSE(g_object_new(camera_linux_camera_api_lock_capture_orientation_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiUnlockCaptureOrientationResponse, camera_linux_camera_api_unlock_capture_orientation_response, CAMERA_LINUX, CAMERA_API_UNLOCK_CAPTURE_ORIENTATION_RESPONSE, GObject) - -struct _CameraLinuxCameraApiUnlockCaptureOrientationResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiUnlockCaptureOrientationResponse, camera_linux_camera_api_unlock_capture_orientation_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_unlock_capture_orientation_response_dispose(GObject* object) { - CameraLinuxCameraApiUnlockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_UNLOCK_CAPTURE_ORIENTATION_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_unlock_capture_orientation_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_unlock_capture_orientation_response_init(CameraLinuxCameraApiUnlockCaptureOrientationResponse* self) { -} - -static void camera_linux_camera_api_unlock_capture_orientation_response_class_init(CameraLinuxCameraApiUnlockCaptureOrientationResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_unlock_capture_orientation_response_dispose; -} - -static CameraLinuxCameraApiUnlockCaptureOrientationResponse* camera_linux_camera_api_unlock_capture_orientation_response_new() { - CameraLinuxCameraApiUnlockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_UNLOCK_CAPTURE_ORIENTATION_RESPONSE(g_object_new(camera_linux_camera_api_unlock_capture_orientation_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiUnlockCaptureOrientationResponse* camera_linux_camera_api_unlock_capture_orientation_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiUnlockCaptureOrientationResponse* self = CAMERA_LINUX_CAMERA_API_UNLOCK_CAPTURE_ORIENTATION_RESPONSE(g_object_new(camera_linux_camera_api_unlock_capture_orientation_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiTakePictureResponse, camera_linux_camera_api_take_picture_response, CAMERA_LINUX, CAMERA_API_TAKE_PICTURE_RESPONSE, GObject) struct _CameraLinuxCameraApiTakePictureResponse { @@ -849,10 +640,10 @@ static void camera_linux_camera_api_take_picture_response_class_init(CameraLinux G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_take_picture_response_dispose; } -static CameraLinuxCameraApiTakePictureResponse* camera_linux_camera_api_take_picture_response_new(const gchar* return_value) { +static CameraLinuxCameraApiTakePictureResponse* camera_linux_camera_api_take_picture_response_new() { CameraLinuxCameraApiTakePictureResponse* self = CAMERA_LINUX_CAMERA_API_TAKE_PICTURE_RESPONSE(g_object_new(camera_linux_camera_api_take_picture_response_get_type(), nullptr)); self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(return_value)); + fl_value_append_take(self->value, fl_value_new_null()); return self; } @@ -865,45 +656,6 @@ static CameraLinuxCameraApiTakePictureResponse* camera_linux_camera_api_take_pic return self; } -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiPrepareForVideoRecordingResponse, camera_linux_camera_api_prepare_for_video_recording_response, CAMERA_LINUX, CAMERA_API_PREPARE_FOR_VIDEO_RECORDING_RESPONSE, GObject) - -struct _CameraLinuxCameraApiPrepareForVideoRecordingResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiPrepareForVideoRecordingResponse, camera_linux_camera_api_prepare_for_video_recording_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_prepare_for_video_recording_response_dispose(GObject* object) { - CameraLinuxCameraApiPrepareForVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PREPARE_FOR_VIDEO_RECORDING_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_prepare_for_video_recording_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_prepare_for_video_recording_response_init(CameraLinuxCameraApiPrepareForVideoRecordingResponse* self) { -} - -static void camera_linux_camera_api_prepare_for_video_recording_response_class_init(CameraLinuxCameraApiPrepareForVideoRecordingResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_prepare_for_video_recording_response_dispose; -} - -static CameraLinuxCameraApiPrepareForVideoRecordingResponse* camera_linux_camera_api_prepare_for_video_recording_response_new() { - CameraLinuxCameraApiPrepareForVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PREPARE_FOR_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_prepare_for_video_recording_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiPrepareForVideoRecordingResponse* camera_linux_camera_api_prepare_for_video_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiPrepareForVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PREPARE_FOR_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_prepare_for_video_recording_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiStartVideoRecordingResponse, camera_linux_camera_api_start_video_recording_response, CAMERA_LINUX, CAMERA_API_START_VIDEO_RECORDING_RESPONSE, GObject) struct _CameraLinuxCameraApiStartVideoRecordingResponse { @@ -982,38 +734,38 @@ static CameraLinuxCameraApiStopVideoRecordingResponse* camera_linux_camera_api_s return self; } -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiPauseVideoRecordingResponse, camera_linux_camera_api_pause_video_recording_response, CAMERA_LINUX, CAMERA_API_PAUSE_VIDEO_RECORDING_RESPONSE, GObject) +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetExposureModeResponse, camera_linux_camera_api_set_exposure_mode_response, CAMERA_LINUX, CAMERA_API_SET_EXPOSURE_MODE_RESPONSE, GObject) -struct _CameraLinuxCameraApiPauseVideoRecordingResponse { +struct _CameraLinuxCameraApiSetExposureModeResponse { GObject parent_instance; FlValue* value; }; -G_DEFINE_TYPE(CameraLinuxCameraApiPauseVideoRecordingResponse, camera_linux_camera_api_pause_video_recording_response, G_TYPE_OBJECT) +G_DEFINE_TYPE(CameraLinuxCameraApiSetExposureModeResponse, camera_linux_camera_api_set_exposure_mode_response, G_TYPE_OBJECT) -static void camera_linux_camera_api_pause_video_recording_response_dispose(GObject* object) { - CameraLinuxCameraApiPauseVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_VIDEO_RECORDING_RESPONSE(object); +static void camera_linux_camera_api_set_exposure_mode_response_dispose(GObject* object) { + CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(object); g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_pause_video_recording_response_parent_class)->dispose(object); + G_OBJECT_CLASS(camera_linux_camera_api_set_exposure_mode_response_parent_class)->dispose(object); } -static void camera_linux_camera_api_pause_video_recording_response_init(CameraLinuxCameraApiPauseVideoRecordingResponse* self) { +static void camera_linux_camera_api_set_exposure_mode_response_init(CameraLinuxCameraApiSetExposureModeResponse* self) { } -static void camera_linux_camera_api_pause_video_recording_response_class_init(CameraLinuxCameraApiPauseVideoRecordingResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_pause_video_recording_response_dispose; +static void camera_linux_camera_api_set_exposure_mode_response_class_init(CameraLinuxCameraApiSetExposureModeResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_exposure_mode_response_dispose; } -static CameraLinuxCameraApiPauseVideoRecordingResponse* camera_linux_camera_api_pause_video_recording_response_new() { - CameraLinuxCameraApiPauseVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_pause_video_recording_response_get_type(), nullptr)); +static CameraLinuxCameraApiSetExposureModeResponse* camera_linux_camera_api_set_exposure_mode_response_new() { + CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_mode_response_get_type(), nullptr)); self->value = fl_value_new_list(); fl_value_append_take(self->value, fl_value_new_null()); return self; } -static CameraLinuxCameraApiPauseVideoRecordingResponse* camera_linux_camera_api_pause_video_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiPauseVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_pause_video_recording_response_get_type(), nullptr)); +static CameraLinuxCameraApiSetExposureModeResponse* camera_linux_camera_api_set_exposure_mode_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_mode_response_get_type(), nullptr)); self->value = fl_value_new_list(); fl_value_append_take(self->value, fl_value_new_string(code)); fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); @@ -1021,38 +773,38 @@ static CameraLinuxCameraApiPauseVideoRecordingResponse* camera_linux_camera_api_ return self; } -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiResumeVideoRecordingResponse, camera_linux_camera_api_resume_video_recording_response, CAMERA_LINUX, CAMERA_API_RESUME_VIDEO_RECORDING_RESPONSE, GObject) +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetFocusModeResponse, camera_linux_camera_api_set_focus_mode_response, CAMERA_LINUX, CAMERA_API_SET_FOCUS_MODE_RESPONSE, GObject) -struct _CameraLinuxCameraApiResumeVideoRecordingResponse { +struct _CameraLinuxCameraApiSetFocusModeResponse { GObject parent_instance; FlValue* value; }; -G_DEFINE_TYPE(CameraLinuxCameraApiResumeVideoRecordingResponse, camera_linux_camera_api_resume_video_recording_response, G_TYPE_OBJECT) +G_DEFINE_TYPE(CameraLinuxCameraApiSetFocusModeResponse, camera_linux_camera_api_set_focus_mode_response, G_TYPE_OBJECT) -static void camera_linux_camera_api_resume_video_recording_response_dispose(GObject* object) { - CameraLinuxCameraApiResumeVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_VIDEO_RECORDING_RESPONSE(object); +static void camera_linux_camera_api_set_focus_mode_response_dispose(GObject* object) { + CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(object); g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_resume_video_recording_response_parent_class)->dispose(object); + G_OBJECT_CLASS(camera_linux_camera_api_set_focus_mode_response_parent_class)->dispose(object); } -static void camera_linux_camera_api_resume_video_recording_response_init(CameraLinuxCameraApiResumeVideoRecordingResponse* self) { +static void camera_linux_camera_api_set_focus_mode_response_init(CameraLinuxCameraApiSetFocusModeResponse* self) { } -static void camera_linux_camera_api_resume_video_recording_response_class_init(CameraLinuxCameraApiResumeVideoRecordingResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_resume_video_recording_response_dispose; +static void camera_linux_camera_api_set_focus_mode_response_class_init(CameraLinuxCameraApiSetFocusModeResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_focus_mode_response_dispose; } -static CameraLinuxCameraApiResumeVideoRecordingResponse* camera_linux_camera_api_resume_video_recording_response_new() { - CameraLinuxCameraApiResumeVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_resume_video_recording_response_get_type(), nullptr)); +static CameraLinuxCameraApiSetFocusModeResponse* camera_linux_camera_api_set_focus_mode_response_new() { + CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_mode_response_get_type(), nullptr)); self->value = fl_value_new_list(); fl_value_append_take(self->value, fl_value_new_null()); return self; } -static CameraLinuxCameraApiResumeVideoRecordingResponse* camera_linux_camera_api_resume_video_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiResumeVideoRecordingResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_VIDEO_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_resume_video_recording_response_get_type(), nullptr)); +static CameraLinuxCameraApiSetFocusModeResponse* camera_linux_camera_api_set_focus_mode_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_mode_response_get_type(), nullptr)); self->value = fl_value_new_list(); fl_value_append_take(self->value, fl_value_new_string(code)); fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); @@ -1060,38 +812,38 @@ static CameraLinuxCameraApiResumeVideoRecordingResponse* camera_linux_camera_api return self; } -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetFlashModeResponse, camera_linux_camera_api_set_flash_mode_response, CAMERA_LINUX, CAMERA_API_SET_FLASH_MODE_RESPONSE, GObject) +G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetImageFormatGroupResponse, camera_linux_camera_api_set_image_format_group_response, CAMERA_LINUX, CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE, GObject) -struct _CameraLinuxCameraApiSetFlashModeResponse { +struct _CameraLinuxCameraApiSetImageFormatGroupResponse { GObject parent_instance; FlValue* value; }; -G_DEFINE_TYPE(CameraLinuxCameraApiSetFlashModeResponse, camera_linux_camera_api_set_flash_mode_response, G_TYPE_OBJECT) +G_DEFINE_TYPE(CameraLinuxCameraApiSetImageFormatGroupResponse, camera_linux_camera_api_set_image_format_group_response, G_TYPE_OBJECT) -static void camera_linux_camera_api_set_flash_mode_response_dispose(GObject* object) { - CameraLinuxCameraApiSetFlashModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FLASH_MODE_RESPONSE(object); +static void camera_linux_camera_api_set_image_format_group_response_dispose(GObject* object) { + CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(object); g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_set_flash_mode_response_parent_class)->dispose(object); + G_OBJECT_CLASS(camera_linux_camera_api_set_image_format_group_response_parent_class)->dispose(object); } -static void camera_linux_camera_api_set_flash_mode_response_init(CameraLinuxCameraApiSetFlashModeResponse* self) { +static void camera_linux_camera_api_set_image_format_group_response_init(CameraLinuxCameraApiSetImageFormatGroupResponse* self) { } -static void camera_linux_camera_api_set_flash_mode_response_class_init(CameraLinuxCameraApiSetFlashModeResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_flash_mode_response_dispose; +static void camera_linux_camera_api_set_image_format_group_response_class_init(CameraLinuxCameraApiSetImageFormatGroupResponseClass* klass) { + G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_image_format_group_response_dispose; } -static CameraLinuxCameraApiSetFlashModeResponse* camera_linux_camera_api_set_flash_mode_response_new() { - CameraLinuxCameraApiSetFlashModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FLASH_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_flash_mode_response_get_type(), nullptr)); +static CameraLinuxCameraApiSetImageFormatGroupResponse* camera_linux_camera_api_set_image_format_group_response_new() { + CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(g_object_new(camera_linux_camera_api_set_image_format_group_response_get_type(), nullptr)); self->value = fl_value_new_list(); fl_value_append_take(self->value, fl_value_new_null()); return self; } -static CameraLinuxCameraApiSetFlashModeResponse* camera_linux_camera_api_set_flash_mode_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiSetFlashModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FLASH_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_flash_mode_response_get_type(), nullptr)); +static CameraLinuxCameraApiSetImageFormatGroupResponse* camera_linux_camera_api_set_image_format_group_response_new_error(const gchar* code, const gchar* message, FlValue* details) { + CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(g_object_new(camera_linux_camera_api_set_image_format_group_response_get_type(), nullptr)); self->value = fl_value_new_list(); fl_value_append_take(self->value, fl_value_new_string(code)); fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); @@ -1099,639 +851,15 @@ static CameraLinuxCameraApiSetFlashModeResponse* camera_linux_camera_api_set_fla return self; } -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetExposureModeResponse, camera_linux_camera_api_set_exposure_mode_response, CAMERA_LINUX, CAMERA_API_SET_EXPOSURE_MODE_RESPONSE, GObject) - -struct _CameraLinuxCameraApiSetExposureModeResponse { +struct _CameraLinuxCameraApi { GObject parent_instance; - FlValue* value; + const CameraLinuxCameraApiVTable* vtable; + gpointer user_data; + GDestroyNotify user_data_free_func; }; -G_DEFINE_TYPE(CameraLinuxCameraApiSetExposureModeResponse, camera_linux_camera_api_set_exposure_mode_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_set_exposure_mode_response_dispose(GObject* object) { - CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_set_exposure_mode_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_set_exposure_mode_response_init(CameraLinuxCameraApiSetExposureModeResponse* self) { -} - -static void camera_linux_camera_api_set_exposure_mode_response_class_init(CameraLinuxCameraApiSetExposureModeResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_exposure_mode_response_dispose; -} - -static CameraLinuxCameraApiSetExposureModeResponse* camera_linux_camera_api_set_exposure_mode_response_new() { - CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_mode_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiSetExposureModeResponse* camera_linux_camera_api_set_exposure_mode_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiSetExposureModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_mode_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetExposurePointResponse, camera_linux_camera_api_set_exposure_point_response, CAMERA_LINUX, CAMERA_API_SET_EXPOSURE_POINT_RESPONSE, GObject) - -struct _CameraLinuxCameraApiSetExposurePointResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiSetExposurePointResponse, camera_linux_camera_api_set_exposure_point_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_set_exposure_point_response_dispose(GObject* object) { - CameraLinuxCameraApiSetExposurePointResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_POINT_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_set_exposure_point_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_set_exposure_point_response_init(CameraLinuxCameraApiSetExposurePointResponse* self) { -} - -static void camera_linux_camera_api_set_exposure_point_response_class_init(CameraLinuxCameraApiSetExposurePointResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_exposure_point_response_dispose; -} - -static CameraLinuxCameraApiSetExposurePointResponse* camera_linux_camera_api_set_exposure_point_response_new() { - CameraLinuxCameraApiSetExposurePointResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_POINT_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_point_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiSetExposurePointResponse* camera_linux_camera_api_set_exposure_point_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiSetExposurePointResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_POINT_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_point_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetLensPositionResponse, camera_linux_camera_api_set_lens_position_response, CAMERA_LINUX, CAMERA_API_SET_LENS_POSITION_RESPONSE, GObject) - -struct _CameraLinuxCameraApiSetLensPositionResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiSetLensPositionResponse, camera_linux_camera_api_set_lens_position_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_set_lens_position_response_dispose(GObject* object) { - CameraLinuxCameraApiSetLensPositionResponse* self = CAMERA_LINUX_CAMERA_API_SET_LENS_POSITION_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_set_lens_position_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_set_lens_position_response_init(CameraLinuxCameraApiSetLensPositionResponse* self) { -} - -static void camera_linux_camera_api_set_lens_position_response_class_init(CameraLinuxCameraApiSetLensPositionResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_lens_position_response_dispose; -} - -static CameraLinuxCameraApiSetLensPositionResponse* camera_linux_camera_api_set_lens_position_response_new() { - CameraLinuxCameraApiSetLensPositionResponse* self = CAMERA_LINUX_CAMERA_API_SET_LENS_POSITION_RESPONSE(g_object_new(camera_linux_camera_api_set_lens_position_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiSetLensPositionResponse* camera_linux_camera_api_set_lens_position_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiSetLensPositionResponse* self = CAMERA_LINUX_CAMERA_API_SET_LENS_POSITION_RESPONSE(g_object_new(camera_linux_camera_api_set_lens_position_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetMinExposureOffsetResponse, camera_linux_camera_api_get_min_exposure_offset_response, CAMERA_LINUX, CAMERA_API_GET_MIN_EXPOSURE_OFFSET_RESPONSE, GObject) - -struct _CameraLinuxCameraApiGetMinExposureOffsetResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiGetMinExposureOffsetResponse, camera_linux_camera_api_get_min_exposure_offset_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_get_min_exposure_offset_response_dispose(GObject* object) { - CameraLinuxCameraApiGetMinExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_EXPOSURE_OFFSET_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_get_min_exposure_offset_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_get_min_exposure_offset_response_init(CameraLinuxCameraApiGetMinExposureOffsetResponse* self) { -} - -static void camera_linux_camera_api_get_min_exposure_offset_response_class_init(CameraLinuxCameraApiGetMinExposureOffsetResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_min_exposure_offset_response_dispose; -} - -static CameraLinuxCameraApiGetMinExposureOffsetResponse* camera_linux_camera_api_get_min_exposure_offset_response_new(double return_value) { - CameraLinuxCameraApiGetMinExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_get_min_exposure_offset_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_float(return_value)); - return self; -} - -static CameraLinuxCameraApiGetMinExposureOffsetResponse* camera_linux_camera_api_get_min_exposure_offset_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiGetMinExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_get_min_exposure_offset_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetMaxExposureOffsetResponse, camera_linux_camera_api_get_max_exposure_offset_response, CAMERA_LINUX, CAMERA_API_GET_MAX_EXPOSURE_OFFSET_RESPONSE, GObject) - -struct _CameraLinuxCameraApiGetMaxExposureOffsetResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiGetMaxExposureOffsetResponse, camera_linux_camera_api_get_max_exposure_offset_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_get_max_exposure_offset_response_dispose(GObject* object) { - CameraLinuxCameraApiGetMaxExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_EXPOSURE_OFFSET_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_get_max_exposure_offset_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_get_max_exposure_offset_response_init(CameraLinuxCameraApiGetMaxExposureOffsetResponse* self) { -} - -static void camera_linux_camera_api_get_max_exposure_offset_response_class_init(CameraLinuxCameraApiGetMaxExposureOffsetResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_max_exposure_offset_response_dispose; -} - -static CameraLinuxCameraApiGetMaxExposureOffsetResponse* camera_linux_camera_api_get_max_exposure_offset_response_new(double return_value) { - CameraLinuxCameraApiGetMaxExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_get_max_exposure_offset_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_float(return_value)); - return self; -} - -static CameraLinuxCameraApiGetMaxExposureOffsetResponse* camera_linux_camera_api_get_max_exposure_offset_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiGetMaxExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_get_max_exposure_offset_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetExposureOffsetResponse, camera_linux_camera_api_set_exposure_offset_response, CAMERA_LINUX, CAMERA_API_SET_EXPOSURE_OFFSET_RESPONSE, GObject) - -struct _CameraLinuxCameraApiSetExposureOffsetResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiSetExposureOffsetResponse, camera_linux_camera_api_set_exposure_offset_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_set_exposure_offset_response_dispose(GObject* object) { - CameraLinuxCameraApiSetExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_OFFSET_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_set_exposure_offset_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_set_exposure_offset_response_init(CameraLinuxCameraApiSetExposureOffsetResponse* self) { -} - -static void camera_linux_camera_api_set_exposure_offset_response_class_init(CameraLinuxCameraApiSetExposureOffsetResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_exposure_offset_response_dispose; -} - -static CameraLinuxCameraApiSetExposureOffsetResponse* camera_linux_camera_api_set_exposure_offset_response_new() { - CameraLinuxCameraApiSetExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_offset_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiSetExposureOffsetResponse* camera_linux_camera_api_set_exposure_offset_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiSetExposureOffsetResponse* self = CAMERA_LINUX_CAMERA_API_SET_EXPOSURE_OFFSET_RESPONSE(g_object_new(camera_linux_camera_api_set_exposure_offset_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetFocusModeResponse, camera_linux_camera_api_set_focus_mode_response, CAMERA_LINUX, CAMERA_API_SET_FOCUS_MODE_RESPONSE, GObject) - -struct _CameraLinuxCameraApiSetFocusModeResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiSetFocusModeResponse, camera_linux_camera_api_set_focus_mode_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_set_focus_mode_response_dispose(GObject* object) { - CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_set_focus_mode_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_set_focus_mode_response_init(CameraLinuxCameraApiSetFocusModeResponse* self) { -} - -static void camera_linux_camera_api_set_focus_mode_response_class_init(CameraLinuxCameraApiSetFocusModeResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_focus_mode_response_dispose; -} - -static CameraLinuxCameraApiSetFocusModeResponse* camera_linux_camera_api_set_focus_mode_response_new() { - CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_mode_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiSetFocusModeResponse* camera_linux_camera_api_set_focus_mode_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiSetFocusModeResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_MODE_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_mode_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetFocusPointResponse, camera_linux_camera_api_set_focus_point_response, CAMERA_LINUX, CAMERA_API_SET_FOCUS_POINT_RESPONSE, GObject) - -struct _CameraLinuxCameraApiSetFocusPointResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiSetFocusPointResponse, camera_linux_camera_api_set_focus_point_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_set_focus_point_response_dispose(GObject* object) { - CameraLinuxCameraApiSetFocusPointResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_POINT_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_set_focus_point_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_set_focus_point_response_init(CameraLinuxCameraApiSetFocusPointResponse* self) { -} - -static void camera_linux_camera_api_set_focus_point_response_class_init(CameraLinuxCameraApiSetFocusPointResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_focus_point_response_dispose; -} - -static CameraLinuxCameraApiSetFocusPointResponse* camera_linux_camera_api_set_focus_point_response_new() { - CameraLinuxCameraApiSetFocusPointResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_POINT_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_point_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiSetFocusPointResponse* camera_linux_camera_api_set_focus_point_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiSetFocusPointResponse* self = CAMERA_LINUX_CAMERA_API_SET_FOCUS_POINT_RESPONSE(g_object_new(camera_linux_camera_api_set_focus_point_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetMinZoomLevelResponse, camera_linux_camera_api_get_min_zoom_level_response, CAMERA_LINUX, CAMERA_API_GET_MIN_ZOOM_LEVEL_RESPONSE, GObject) - -struct _CameraLinuxCameraApiGetMinZoomLevelResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiGetMinZoomLevelResponse, camera_linux_camera_api_get_min_zoom_level_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_get_min_zoom_level_response_dispose(GObject* object) { - CameraLinuxCameraApiGetMinZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_ZOOM_LEVEL_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_get_min_zoom_level_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_get_min_zoom_level_response_init(CameraLinuxCameraApiGetMinZoomLevelResponse* self) { -} - -static void camera_linux_camera_api_get_min_zoom_level_response_class_init(CameraLinuxCameraApiGetMinZoomLevelResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_min_zoom_level_response_dispose; -} - -static CameraLinuxCameraApiGetMinZoomLevelResponse* camera_linux_camera_api_get_min_zoom_level_response_new(double return_value) { - CameraLinuxCameraApiGetMinZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_get_min_zoom_level_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_float(return_value)); - return self; -} - -static CameraLinuxCameraApiGetMinZoomLevelResponse* camera_linux_camera_api_get_min_zoom_level_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiGetMinZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MIN_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_get_min_zoom_level_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiGetMaxZoomLevelResponse, camera_linux_camera_api_get_max_zoom_level_response, CAMERA_LINUX, CAMERA_API_GET_MAX_ZOOM_LEVEL_RESPONSE, GObject) - -struct _CameraLinuxCameraApiGetMaxZoomLevelResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiGetMaxZoomLevelResponse, camera_linux_camera_api_get_max_zoom_level_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_get_max_zoom_level_response_dispose(GObject* object) { - CameraLinuxCameraApiGetMaxZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_ZOOM_LEVEL_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_get_max_zoom_level_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_get_max_zoom_level_response_init(CameraLinuxCameraApiGetMaxZoomLevelResponse* self) { -} - -static void camera_linux_camera_api_get_max_zoom_level_response_class_init(CameraLinuxCameraApiGetMaxZoomLevelResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_get_max_zoom_level_response_dispose; -} - -static CameraLinuxCameraApiGetMaxZoomLevelResponse* camera_linux_camera_api_get_max_zoom_level_response_new(double return_value) { - CameraLinuxCameraApiGetMaxZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_get_max_zoom_level_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_float(return_value)); - return self; -} - -static CameraLinuxCameraApiGetMaxZoomLevelResponse* camera_linux_camera_api_get_max_zoom_level_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiGetMaxZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_GET_MAX_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_get_max_zoom_level_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetZoomLevelResponse, camera_linux_camera_api_set_zoom_level_response, CAMERA_LINUX, CAMERA_API_SET_ZOOM_LEVEL_RESPONSE, GObject) - -struct _CameraLinuxCameraApiSetZoomLevelResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiSetZoomLevelResponse, camera_linux_camera_api_set_zoom_level_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_set_zoom_level_response_dispose(GObject* object) { - CameraLinuxCameraApiSetZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_SET_ZOOM_LEVEL_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_set_zoom_level_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_set_zoom_level_response_init(CameraLinuxCameraApiSetZoomLevelResponse* self) { -} - -static void camera_linux_camera_api_set_zoom_level_response_class_init(CameraLinuxCameraApiSetZoomLevelResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_zoom_level_response_dispose; -} - -static CameraLinuxCameraApiSetZoomLevelResponse* camera_linux_camera_api_set_zoom_level_response_new() { - CameraLinuxCameraApiSetZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_SET_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_set_zoom_level_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiSetZoomLevelResponse* camera_linux_camera_api_set_zoom_level_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiSetZoomLevelResponse* self = CAMERA_LINUX_CAMERA_API_SET_ZOOM_LEVEL_RESPONSE(g_object_new(camera_linux_camera_api_set_zoom_level_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiPausePreviewResponse, camera_linux_camera_api_pause_preview_response, CAMERA_LINUX, CAMERA_API_PAUSE_PREVIEW_RESPONSE, GObject) - -struct _CameraLinuxCameraApiPausePreviewResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiPausePreviewResponse, camera_linux_camera_api_pause_preview_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_pause_preview_response_dispose(GObject* object) { - CameraLinuxCameraApiPausePreviewResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_PREVIEW_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_pause_preview_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_pause_preview_response_init(CameraLinuxCameraApiPausePreviewResponse* self) { -} - -static void camera_linux_camera_api_pause_preview_response_class_init(CameraLinuxCameraApiPausePreviewResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_pause_preview_response_dispose; -} - -static CameraLinuxCameraApiPausePreviewResponse* camera_linux_camera_api_pause_preview_response_new() { - CameraLinuxCameraApiPausePreviewResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_PREVIEW_RESPONSE(g_object_new(camera_linux_camera_api_pause_preview_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiPausePreviewResponse* camera_linux_camera_api_pause_preview_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiPausePreviewResponse* self = CAMERA_LINUX_CAMERA_API_PAUSE_PREVIEW_RESPONSE(g_object_new(camera_linux_camera_api_pause_preview_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiResumePreviewResponse, camera_linux_camera_api_resume_preview_response, CAMERA_LINUX, CAMERA_API_RESUME_PREVIEW_RESPONSE, GObject) - -struct _CameraLinuxCameraApiResumePreviewResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiResumePreviewResponse, camera_linux_camera_api_resume_preview_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_resume_preview_response_dispose(GObject* object) { - CameraLinuxCameraApiResumePreviewResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_PREVIEW_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_resume_preview_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_resume_preview_response_init(CameraLinuxCameraApiResumePreviewResponse* self) { -} - -static void camera_linux_camera_api_resume_preview_response_class_init(CameraLinuxCameraApiResumePreviewResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_resume_preview_response_dispose; -} - -static CameraLinuxCameraApiResumePreviewResponse* camera_linux_camera_api_resume_preview_response_new() { - CameraLinuxCameraApiResumePreviewResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_PREVIEW_RESPONSE(g_object_new(camera_linux_camera_api_resume_preview_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiResumePreviewResponse* camera_linux_camera_api_resume_preview_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiResumePreviewResponse* self = CAMERA_LINUX_CAMERA_API_RESUME_PREVIEW_RESPONSE(g_object_new(camera_linux_camera_api_resume_preview_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse, camera_linux_camera_api_update_description_while_recording_response, CAMERA_LINUX, CAMERA_API_UPDATE_DESCRIPTION_WHILE_RECORDING_RESPONSE, GObject) - -struct _CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse, camera_linux_camera_api_update_description_while_recording_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_update_description_while_recording_response_dispose(GObject* object) { - CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* self = CAMERA_LINUX_CAMERA_API_UPDATE_DESCRIPTION_WHILE_RECORDING_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_update_description_while_recording_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_update_description_while_recording_response_init(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* self) { -} - -static void camera_linux_camera_api_update_description_while_recording_response_class_init(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_update_description_while_recording_response_dispose; -} - -static CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* camera_linux_camera_api_update_description_while_recording_response_new() { - CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* self = CAMERA_LINUX_CAMERA_API_UPDATE_DESCRIPTION_WHILE_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_update_description_while_recording_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* camera_linux_camera_api_update_description_while_recording_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse* self = CAMERA_LINUX_CAMERA_API_UPDATE_DESCRIPTION_WHILE_RECORDING_RESPONSE(g_object_new(camera_linux_camera_api_update_description_while_recording_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetImageFileFormatResponse, camera_linux_camera_api_set_image_file_format_response, CAMERA_LINUX, CAMERA_API_SET_IMAGE_FILE_FORMAT_RESPONSE, GObject) - -struct _CameraLinuxCameraApiSetImageFileFormatResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiSetImageFileFormatResponse, camera_linux_camera_api_set_image_file_format_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_set_image_file_format_response_dispose(GObject* object) { - CameraLinuxCameraApiSetImageFileFormatResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FILE_FORMAT_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_set_image_file_format_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_set_image_file_format_response_init(CameraLinuxCameraApiSetImageFileFormatResponse* self) { -} - -static void camera_linux_camera_api_set_image_file_format_response_class_init(CameraLinuxCameraApiSetImageFileFormatResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_image_file_format_response_dispose; -} - -static CameraLinuxCameraApiSetImageFileFormatResponse* camera_linux_camera_api_set_image_file_format_response_new() { - CameraLinuxCameraApiSetImageFileFormatResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FILE_FORMAT_RESPONSE(g_object_new(camera_linux_camera_api_set_image_file_format_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiSetImageFileFormatResponse* camera_linux_camera_api_set_image_file_format_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiSetImageFileFormatResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FILE_FORMAT_RESPONSE(g_object_new(camera_linux_camera_api_set_image_file_format_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -G_DECLARE_FINAL_TYPE(CameraLinuxCameraApiSetImageFormatGroupResponse, camera_linux_camera_api_set_image_format_group_response, CAMERA_LINUX, CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE, GObject) - -struct _CameraLinuxCameraApiSetImageFormatGroupResponse { - GObject parent_instance; - - FlValue* value; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApiSetImageFormatGroupResponse, camera_linux_camera_api_set_image_format_group_response, G_TYPE_OBJECT) - -static void camera_linux_camera_api_set_image_format_group_response_dispose(GObject* object) { - CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(object); - g_clear_pointer(&self->value, fl_value_unref); - G_OBJECT_CLASS(camera_linux_camera_api_set_image_format_group_response_parent_class)->dispose(object); -} - -static void camera_linux_camera_api_set_image_format_group_response_init(CameraLinuxCameraApiSetImageFormatGroupResponse* self) { -} - -static void camera_linux_camera_api_set_image_format_group_response_class_init(CameraLinuxCameraApiSetImageFormatGroupResponseClass* klass) { - G_OBJECT_CLASS(klass)->dispose = camera_linux_camera_api_set_image_format_group_response_dispose; -} - -static CameraLinuxCameraApiSetImageFormatGroupResponse* camera_linux_camera_api_set_image_format_group_response_new() { - CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(g_object_new(camera_linux_camera_api_set_image_format_group_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_null()); - return self; -} - -static CameraLinuxCameraApiSetImageFormatGroupResponse* camera_linux_camera_api_set_image_format_group_response_new_error(const gchar* code, const gchar* message, FlValue* details) { - CameraLinuxCameraApiSetImageFormatGroupResponse* self = CAMERA_LINUX_CAMERA_API_SET_IMAGE_FORMAT_GROUP_RESPONSE(g_object_new(camera_linux_camera_api_set_image_format_group_response_get_type(), nullptr)); - self->value = fl_value_new_list(); - fl_value_append_take(self->value, fl_value_new_string(code)); - fl_value_append_take(self->value, fl_value_new_string(message != nullptr ? message : "")); - fl_value_append_take(self->value, details != nullptr ? fl_value_ref(details) : fl_value_new_null()); - return self; -} - -struct _CameraLinuxCameraApi { - GObject parent_instance; - - const CameraLinuxCameraApiVTable* vtable; - gpointer user_data; - GDestroyNotify user_data_free_func; -}; - -G_DEFINE_TYPE(CameraLinuxCameraApi, camera_linux_camera_api, G_TYPE_OBJECT) +G_DEFINE_TYPE(CameraLinuxCameraApi, camera_linux_camera_api, G_TYPE_OBJECT) static void camera_linux_camera_api_dispose(GObject* object) { CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(object); @@ -1791,358 +919,110 @@ static void camera_linux_camera_api_initialize_cb(FlBasicMessageChannel* channel } FlValue* value0 = fl_value_get_list_value(message_, 0); - int64_t camera_id = fl_value_get_int(value0); - FlValue* value1 = fl_value_get_list_value(message_, 1); - CameraLinuxPlatformImageFormatGroup image_format = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->initialize(camera_id, image_format, handle, self->user_data); -} - -static void camera_linux_camera_api_start_image_stream_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->start_image_stream == nullptr) { - return; - } - - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->start_image_stream(handle, self->user_data); -} - -static void camera_linux_camera_api_stop_image_stream_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->stop_image_stream == nullptr) { - return; - } - - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->stop_image_stream(handle, self->user_data); -} - -static void camera_linux_camera_api_get_texture_id_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->get_texture_id == nullptr) { - return; - } - - FlValue* value0 = fl_value_get_list_value(message_, 0); - int64_t camera_id = fl_value_get_int(value0); - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->get_texture_id(camera_id, handle, self->user_data); -} - -static void camera_linux_camera_api_received_image_stream_data_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->received_image_stream_data == nullptr) { - return; - } - - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->received_image_stream_data(handle, self->user_data); -} - -static void camera_linux_camera_api_dispose_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->dispose == nullptr) { - return; - } - - FlValue* value0 = fl_value_get_list_value(message_, 0); - int64_t camera_id = fl_value_get_int(value0); - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->dispose(camera_id, handle, self->user_data); -} - -static void camera_linux_camera_api_lock_capture_orientation_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->lock_capture_orientation == nullptr) { - return; - } - - FlValue* value0 = fl_value_get_list_value(message_, 0); - CameraLinuxPlatformDeviceOrientation orientation = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->lock_capture_orientation(orientation, handle, self->user_data); -} - -static void camera_linux_camera_api_unlock_capture_orientation_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->unlock_capture_orientation == nullptr) { - return; - } - - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->unlock_capture_orientation(handle, self->user_data); -} - -static void camera_linux_camera_api_take_picture_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->take_picture == nullptr) { - return; - } - - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->take_picture(handle, self->user_data); -} - -static void camera_linux_camera_api_prepare_for_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->prepare_for_video_recording == nullptr) { - return; - } - - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->prepare_for_video_recording(handle, self->user_data); -} - -static void camera_linux_camera_api_start_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->start_video_recording == nullptr) { - return; - } - - FlValue* value0 = fl_value_get_list_value(message_, 0); - gboolean enable_stream = fl_value_get_bool(value0); - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->start_video_recording(enable_stream, handle, self->user_data); -} - -static void camera_linux_camera_api_stop_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->stop_video_recording == nullptr) { - return; - } - - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->stop_video_recording(handle, self->user_data); -} - -static void camera_linux_camera_api_pause_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->pause_video_recording == nullptr) { - return; - } - - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->pause_video_recording(handle, self->user_data); -} - -static void camera_linux_camera_api_resume_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->resume_video_recording == nullptr) { - return; - } - - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->resume_video_recording(handle, self->user_data); -} - -static void camera_linux_camera_api_set_flash_mode_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->set_flash_mode == nullptr) { - return; - } - - FlValue* value0 = fl_value_get_list_value(message_, 0); - CameraLinuxPlatformFlashMode mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->set_flash_mode(mode, handle, self->user_data); -} - -static void camera_linux_camera_api_set_exposure_mode_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->set_exposure_mode == nullptr) { - return; - } - - FlValue* value0 = fl_value_get_list_value(message_, 0); - CameraLinuxPlatformExposureMode mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->set_exposure_mode(mode, handle, self->user_data); -} - -static void camera_linux_camera_api_set_exposure_point_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->set_exposure_point == nullptr) { - return; - } - - FlValue* value0 = fl_value_get_list_value(message_, 0); - CameraLinuxPlatformPoint* point = CAMERA_LINUX_PLATFORM_POINT(fl_value_get_custom_value_object(value0)); - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->set_exposure_point(point, handle, self->user_data); -} - -static void camera_linux_camera_api_set_lens_position_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->set_lens_position == nullptr) { - return; - } - - FlValue* value0 = fl_value_get_list_value(message_, 0); - double position = fl_value_get_float(value0); - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->set_lens_position(position, handle, self->user_data); -} - -static void camera_linux_camera_api_get_min_exposure_offset_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->get_min_exposure_offset == nullptr) { - return; - } - - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->get_min_exposure_offset(handle, self->user_data); -} - -static void camera_linux_camera_api_get_max_exposure_offset_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->get_max_exposure_offset == nullptr) { - return; - } - - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->get_max_exposure_offset(handle, self->user_data); -} - -static void camera_linux_camera_api_set_exposure_offset_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->set_exposure_offset == nullptr) { - return; - } - - FlValue* value0 = fl_value_get_list_value(message_, 0); - double offset = fl_value_get_float(value0); - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->set_exposure_offset(offset, handle, self->user_data); -} - -static void camera_linux_camera_api_set_focus_mode_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->set_focus_mode == nullptr) { - return; - } - - FlValue* value0 = fl_value_get_list_value(message_, 0); - CameraLinuxPlatformFocusMode mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); - g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->set_focus_mode(mode, handle, self->user_data); -} - -static void camera_linux_camera_api_set_focus_point_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { - CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - - if (self->vtable == nullptr || self->vtable->set_focus_point == nullptr) { - return; - } - - FlValue* value0 = fl_value_get_list_value(message_, 0); - CameraLinuxPlatformPoint* point = CAMERA_LINUX_PLATFORM_POINT(fl_value_get_custom_value_object(value0)); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformImageFormatGroup image_format = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->set_focus_point(point, handle, self->user_data); + self->vtable->initialize(camera_id, image_format, handle, self->user_data); } -static void camera_linux_camera_api_get_min_zoom_level_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { +static void camera_linux_camera_api_get_texture_id_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - if (self->vtable == nullptr || self->vtable->get_min_zoom_level == nullptr) { + if (self->vtable == nullptr || self->vtable->get_texture_id == nullptr) { return; } + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->get_min_zoom_level(handle, self->user_data); + self->vtable->get_texture_id(camera_id, handle, self->user_data); } -static void camera_linux_camera_api_get_max_zoom_level_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { +static void camera_linux_camera_api_dispose_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - if (self->vtable == nullptr || self->vtable->get_max_zoom_level == nullptr) { + if (self->vtable == nullptr || self->vtable->dispose == nullptr) { return; } + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->get_max_zoom_level(handle, self->user_data); + self->vtable->dispose(camera_id, handle, self->user_data); } -static void camera_linux_camera_api_set_zoom_level_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { +static void camera_linux_camera_api_take_picture_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - if (self->vtable == nullptr || self->vtable->set_zoom_level == nullptr) { + if (self->vtable == nullptr || self->vtable->take_picture == nullptr) { return; } FlValue* value0 = fl_value_get_list_value(message_, 0); - double zoom = fl_value_get_float(value0); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + const gchar* path = fl_value_get_string(value1); g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->set_zoom_level(zoom, handle, self->user_data); + self->vtable->take_picture(camera_id, path, handle, self->user_data); } -static void camera_linux_camera_api_pause_preview_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { +static void camera_linux_camera_api_start_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - if (self->vtable == nullptr || self->vtable->pause_preview == nullptr) { + if (self->vtable == nullptr || self->vtable->start_video_recording == nullptr) { return; } + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + gboolean enable_stream = fl_value_get_bool(value1); g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->pause_preview(handle, self->user_data); + self->vtable->start_video_recording(camera_id, enable_stream, handle, self->user_data); } -static void camera_linux_camera_api_resume_preview_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { +static void camera_linux_camera_api_stop_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - if (self->vtable == nullptr || self->vtable->resume_preview == nullptr) { + if (self->vtable == nullptr || self->vtable->stop_video_recording == nullptr) { return; } + FlValue* value0 = fl_value_get_list_value(message_, 0); + int64_t camera_id = fl_value_get_int(value0); g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->resume_preview(handle, self->user_data); + self->vtable->stop_video_recording(camera_id, handle, self->user_data); } -static void camera_linux_camera_api_update_description_while_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { +static void camera_linux_camera_api_set_exposure_mode_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - if (self->vtable == nullptr || self->vtable->update_description_while_recording == nullptr) { + if (self->vtable == nullptr || self->vtable->set_exposure_mode == nullptr) { return; } FlValue* value0 = fl_value_get_list_value(message_, 0); - const gchar* camera_name = fl_value_get_string(value0); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformExposureMode mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->update_description_while_recording(camera_name, handle, self->user_data); + self->vtable->set_exposure_mode(camera_id, mode, handle, self->user_data); } -static void camera_linux_camera_api_set_image_file_format_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { +static void camera_linux_camera_api_set_focus_mode_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { CameraLinuxCameraApi* self = CAMERA_LINUX_CAMERA_API(user_data); - if (self->vtable == nullptr || self->vtable->set_image_file_format == nullptr) { + if (self->vtable == nullptr || self->vtable->set_focus_mode == nullptr) { return; } FlValue* value0 = fl_value_get_list_value(message_, 0); - CameraLinuxPlatformImageFileFormat format = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value0))))); + int64_t camera_id = fl_value_get_int(value0); + FlValue* value1 = fl_value_get_list_value(message_, 1); + CameraLinuxPlatformFocusMode mode = static_cast(fl_value_get_int(reinterpret_cast(const_cast(fl_value_get_custom_value(value1))))); g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->set_image_file_format(format, handle, self->user_data); + self->vtable->set_focus_mode(camera_id, mode, handle, self->user_data); } static void camera_linux_camera_api_set_image_format_group_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { @@ -2174,93 +1054,27 @@ void camera_linux_camera_api_set_method_handlers(FlBinaryMessenger* messenger, c g_autofree gchar* initialize_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.initialize%s", dot_suffix); g_autoptr(FlBasicMessageChannel) initialize_channel = fl_basic_message_channel_new(messenger, initialize_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(initialize_channel, camera_linux_camera_api_initialize_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* start_image_stream_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.startImageStream%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) start_image_stream_channel = fl_basic_message_channel_new(messenger, start_image_stream_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(start_image_stream_channel, camera_linux_camera_api_start_image_stream_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* stop_image_stream_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopImageStream%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) stop_image_stream_channel = fl_basic_message_channel_new(messenger, stop_image_stream_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(stop_image_stream_channel, camera_linux_camera_api_stop_image_stream_cb, g_object_ref(api_data), g_object_unref); g_autofree gchar* get_texture_id_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getTextureId%s", dot_suffix); g_autoptr(FlBasicMessageChannel) get_texture_id_channel = fl_basic_message_channel_new(messenger, get_texture_id_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(get_texture_id_channel, camera_linux_camera_api_get_texture_id_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* received_image_stream_data_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.receivedImageStreamData%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) received_image_stream_data_channel = fl_basic_message_channel_new(messenger, received_image_stream_data_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(received_image_stream_data_channel, camera_linux_camera_api_received_image_stream_data_cb, g_object_ref(api_data), g_object_unref); g_autofree gchar* dispose_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.dispose%s", dot_suffix); g_autoptr(FlBasicMessageChannel) dispose_channel = fl_basic_message_channel_new(messenger, dispose_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(dispose_channel, camera_linux_camera_api_dispose_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* lock_capture_orientation_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.lockCaptureOrientation%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) lock_capture_orientation_channel = fl_basic_message_channel_new(messenger, lock_capture_orientation_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(lock_capture_orientation_channel, camera_linux_camera_api_lock_capture_orientation_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* unlock_capture_orientation_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.unlockCaptureOrientation%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) unlock_capture_orientation_channel = fl_basic_message_channel_new(messenger, unlock_capture_orientation_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(unlock_capture_orientation_channel, camera_linux_camera_api_unlock_capture_orientation_cb, g_object_ref(api_data), g_object_unref); g_autofree gchar* take_picture_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.takePicture%s", dot_suffix); g_autoptr(FlBasicMessageChannel) take_picture_channel = fl_basic_message_channel_new(messenger, take_picture_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(take_picture_channel, camera_linux_camera_api_take_picture_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* prepare_for_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.prepareForVideoRecording%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) prepare_for_video_recording_channel = fl_basic_message_channel_new(messenger, prepare_for_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(prepare_for_video_recording_channel, camera_linux_camera_api_prepare_for_video_recording_cb, g_object_ref(api_data), g_object_unref); g_autofree gchar* start_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.startVideoRecording%s", dot_suffix); g_autoptr(FlBasicMessageChannel) start_video_recording_channel = fl_basic_message_channel_new(messenger, start_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(start_video_recording_channel, camera_linux_camera_api_start_video_recording_cb, g_object_ref(api_data), g_object_unref); g_autofree gchar* stop_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopVideoRecording%s", dot_suffix); g_autoptr(FlBasicMessageChannel) stop_video_recording_channel = fl_basic_message_channel_new(messenger, stop_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(stop_video_recording_channel, camera_linux_camera_api_stop_video_recording_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* pause_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.pauseVideoRecording%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) pause_video_recording_channel = fl_basic_message_channel_new(messenger, pause_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(pause_video_recording_channel, camera_linux_camera_api_pause_video_recording_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* resume_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.resumeVideoRecording%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) resume_video_recording_channel = fl_basic_message_channel_new(messenger, resume_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(resume_video_recording_channel, camera_linux_camera_api_resume_video_recording_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* set_flash_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFlashMode%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_flash_mode_channel = fl_basic_message_channel_new(messenger, set_flash_mode_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_flash_mode_channel, camera_linux_camera_api_set_flash_mode_cb, g_object_ref(api_data), g_object_unref); g_autofree gchar* set_exposure_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposureMode%s", dot_suffix); g_autoptr(FlBasicMessageChannel) set_exposure_mode_channel = fl_basic_message_channel_new(messenger, set_exposure_mode_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(set_exposure_mode_channel, camera_linux_camera_api_set_exposure_mode_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* set_exposure_point_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposurePoint%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_exposure_point_channel = fl_basic_message_channel_new(messenger, set_exposure_point_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_exposure_point_channel, camera_linux_camera_api_set_exposure_point_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* set_lens_position_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setLensPosition%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_lens_position_channel = fl_basic_message_channel_new(messenger, set_lens_position_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_lens_position_channel, camera_linux_camera_api_set_lens_position_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* get_min_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMinExposureOffset%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) get_min_exposure_offset_channel = fl_basic_message_channel_new(messenger, get_min_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(get_min_exposure_offset_channel, camera_linux_camera_api_get_min_exposure_offset_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* get_max_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMaxExposureOffset%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) get_max_exposure_offset_channel = fl_basic_message_channel_new(messenger, get_max_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(get_max_exposure_offset_channel, camera_linux_camera_api_get_max_exposure_offset_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* set_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposureOffset%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_exposure_offset_channel = fl_basic_message_channel_new(messenger, set_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_exposure_offset_channel, camera_linux_camera_api_set_exposure_offset_cb, g_object_ref(api_data), g_object_unref); g_autofree gchar* set_focus_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFocusMode%s", dot_suffix); g_autoptr(FlBasicMessageChannel) set_focus_mode_channel = fl_basic_message_channel_new(messenger, set_focus_mode_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(set_focus_mode_channel, camera_linux_camera_api_set_focus_mode_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* set_focus_point_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFocusPoint%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_focus_point_channel = fl_basic_message_channel_new(messenger, set_focus_point_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_focus_point_channel, camera_linux_camera_api_set_focus_point_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* get_min_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMinZoomLevel%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) get_min_zoom_level_channel = fl_basic_message_channel_new(messenger, get_min_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(get_min_zoom_level_channel, camera_linux_camera_api_get_min_zoom_level_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* get_max_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMaxZoomLevel%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) get_max_zoom_level_channel = fl_basic_message_channel_new(messenger, get_max_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(get_max_zoom_level_channel, camera_linux_camera_api_get_max_zoom_level_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* set_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setZoomLevel%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_zoom_level_channel = fl_basic_message_channel_new(messenger, set_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_zoom_level_channel, camera_linux_camera_api_set_zoom_level_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* pause_preview_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.pausePreview%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) pause_preview_channel = fl_basic_message_channel_new(messenger, pause_preview_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(pause_preview_channel, camera_linux_camera_api_pause_preview_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* resume_preview_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.resumePreview%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) resume_preview_channel = fl_basic_message_channel_new(messenger, resume_preview_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(resume_preview_channel, camera_linux_camera_api_resume_preview_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* update_description_while_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.updateDescriptionWhileRecording%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) update_description_while_recording_channel = fl_basic_message_channel_new(messenger, update_description_while_recording_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(update_description_while_recording_channel, camera_linux_camera_api_update_description_while_recording_cb, g_object_ref(api_data), g_object_unref); - g_autofree gchar* set_image_file_format_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFileFormat%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_image_file_format_channel = fl_basic_message_channel_new(messenger, set_image_file_format_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_image_file_format_channel, camera_linux_camera_api_set_image_file_format_cb, g_object_ref(api_data), g_object_unref); g_autofree gchar* set_image_format_group_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFormatGroup%s", dot_suffix); g_autoptr(FlBasicMessageChannel) set_image_format_group_channel = fl_basic_message_channel_new(messenger, set_image_format_group_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(set_image_format_group_channel, camera_linux_camera_api_set_image_format_group_cb, g_object_ref(api_data), g_object_unref); @@ -2279,93 +1093,27 @@ void camera_linux_camera_api_clear_method_handlers(FlBinaryMessenger* messenger, g_autofree gchar* initialize_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.initialize%s", dot_suffix); g_autoptr(FlBasicMessageChannel) initialize_channel = fl_basic_message_channel_new(messenger, initialize_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(initialize_channel, nullptr, nullptr, nullptr); - g_autofree gchar* start_image_stream_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.startImageStream%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) start_image_stream_channel = fl_basic_message_channel_new(messenger, start_image_stream_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(start_image_stream_channel, nullptr, nullptr, nullptr); - g_autofree gchar* stop_image_stream_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopImageStream%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) stop_image_stream_channel = fl_basic_message_channel_new(messenger, stop_image_stream_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(stop_image_stream_channel, nullptr, nullptr, nullptr); g_autofree gchar* get_texture_id_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getTextureId%s", dot_suffix); g_autoptr(FlBasicMessageChannel) get_texture_id_channel = fl_basic_message_channel_new(messenger, get_texture_id_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(get_texture_id_channel, nullptr, nullptr, nullptr); - g_autofree gchar* received_image_stream_data_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.receivedImageStreamData%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) received_image_stream_data_channel = fl_basic_message_channel_new(messenger, received_image_stream_data_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(received_image_stream_data_channel, nullptr, nullptr, nullptr); g_autofree gchar* dispose_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.dispose%s", dot_suffix); g_autoptr(FlBasicMessageChannel) dispose_channel = fl_basic_message_channel_new(messenger, dispose_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(dispose_channel, nullptr, nullptr, nullptr); - g_autofree gchar* lock_capture_orientation_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.lockCaptureOrientation%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) lock_capture_orientation_channel = fl_basic_message_channel_new(messenger, lock_capture_orientation_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(lock_capture_orientation_channel, nullptr, nullptr, nullptr); - g_autofree gchar* unlock_capture_orientation_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.unlockCaptureOrientation%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) unlock_capture_orientation_channel = fl_basic_message_channel_new(messenger, unlock_capture_orientation_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(unlock_capture_orientation_channel, nullptr, nullptr, nullptr); g_autofree gchar* take_picture_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.takePicture%s", dot_suffix); g_autoptr(FlBasicMessageChannel) take_picture_channel = fl_basic_message_channel_new(messenger, take_picture_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(take_picture_channel, nullptr, nullptr, nullptr); - g_autofree gchar* prepare_for_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.prepareForVideoRecording%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) prepare_for_video_recording_channel = fl_basic_message_channel_new(messenger, prepare_for_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(prepare_for_video_recording_channel, nullptr, nullptr, nullptr); g_autofree gchar* start_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.startVideoRecording%s", dot_suffix); g_autoptr(FlBasicMessageChannel) start_video_recording_channel = fl_basic_message_channel_new(messenger, start_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(start_video_recording_channel, nullptr, nullptr, nullptr); g_autofree gchar* stop_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.stopVideoRecording%s", dot_suffix); g_autoptr(FlBasicMessageChannel) stop_video_recording_channel = fl_basic_message_channel_new(messenger, stop_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(stop_video_recording_channel, nullptr, nullptr, nullptr); - g_autofree gchar* pause_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.pauseVideoRecording%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) pause_video_recording_channel = fl_basic_message_channel_new(messenger, pause_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(pause_video_recording_channel, nullptr, nullptr, nullptr); - g_autofree gchar* resume_video_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.resumeVideoRecording%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) resume_video_recording_channel = fl_basic_message_channel_new(messenger, resume_video_recording_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(resume_video_recording_channel, nullptr, nullptr, nullptr); - g_autofree gchar* set_flash_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFlashMode%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_flash_mode_channel = fl_basic_message_channel_new(messenger, set_flash_mode_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_flash_mode_channel, nullptr, nullptr, nullptr); g_autofree gchar* set_exposure_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposureMode%s", dot_suffix); g_autoptr(FlBasicMessageChannel) set_exposure_mode_channel = fl_basic_message_channel_new(messenger, set_exposure_mode_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(set_exposure_mode_channel, nullptr, nullptr, nullptr); - g_autofree gchar* set_exposure_point_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposurePoint%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_exposure_point_channel = fl_basic_message_channel_new(messenger, set_exposure_point_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_exposure_point_channel, nullptr, nullptr, nullptr); - g_autofree gchar* set_lens_position_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setLensPosition%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_lens_position_channel = fl_basic_message_channel_new(messenger, set_lens_position_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_lens_position_channel, nullptr, nullptr, nullptr); - g_autofree gchar* get_min_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMinExposureOffset%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) get_min_exposure_offset_channel = fl_basic_message_channel_new(messenger, get_min_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(get_min_exposure_offset_channel, nullptr, nullptr, nullptr); - g_autofree gchar* get_max_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMaxExposureOffset%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) get_max_exposure_offset_channel = fl_basic_message_channel_new(messenger, get_max_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(get_max_exposure_offset_channel, nullptr, nullptr, nullptr); - g_autofree gchar* set_exposure_offset_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setExposureOffset%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_exposure_offset_channel = fl_basic_message_channel_new(messenger, set_exposure_offset_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_exposure_offset_channel, nullptr, nullptr, nullptr); g_autofree gchar* set_focus_mode_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFocusMode%s", dot_suffix); g_autoptr(FlBasicMessageChannel) set_focus_mode_channel = fl_basic_message_channel_new(messenger, set_focus_mode_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(set_focus_mode_channel, nullptr, nullptr, nullptr); - g_autofree gchar* set_focus_point_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setFocusPoint%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_focus_point_channel = fl_basic_message_channel_new(messenger, set_focus_point_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_focus_point_channel, nullptr, nullptr, nullptr); - g_autofree gchar* get_min_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMinZoomLevel%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) get_min_zoom_level_channel = fl_basic_message_channel_new(messenger, get_min_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(get_min_zoom_level_channel, nullptr, nullptr, nullptr); - g_autofree gchar* get_max_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.getMaxZoomLevel%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) get_max_zoom_level_channel = fl_basic_message_channel_new(messenger, get_max_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(get_max_zoom_level_channel, nullptr, nullptr, nullptr); - g_autofree gchar* set_zoom_level_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setZoomLevel%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_zoom_level_channel = fl_basic_message_channel_new(messenger, set_zoom_level_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_zoom_level_channel, nullptr, nullptr, nullptr); - g_autofree gchar* pause_preview_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.pausePreview%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) pause_preview_channel = fl_basic_message_channel_new(messenger, pause_preview_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(pause_preview_channel, nullptr, nullptr, nullptr); - g_autofree gchar* resume_preview_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.resumePreview%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) resume_preview_channel = fl_basic_message_channel_new(messenger, resume_preview_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(resume_preview_channel, nullptr, nullptr, nullptr); - g_autofree gchar* update_description_while_recording_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.updateDescriptionWhileRecording%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) update_description_while_recording_channel = fl_basic_message_channel_new(messenger, update_description_while_recording_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(update_description_while_recording_channel, nullptr, nullptr, nullptr); - g_autofree gchar* set_image_file_format_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFileFormat%s", dot_suffix); - g_autoptr(FlBasicMessageChannel) set_image_file_format_channel = fl_basic_message_channel_new(messenger, set_image_file_format_channel_name, FL_MESSAGE_CODEC(codec)); - fl_basic_message_channel_set_message_handler(set_image_file_format_channel, nullptr, nullptr, nullptr); g_autofree gchar* set_image_format_group_channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraApi.setImageFormatGroup%s", dot_suffix); g_autoptr(FlBasicMessageChannel) set_image_format_group_channel = fl_basic_message_channel_new(messenger, set_image_format_group_channel_name, FL_MESSAGE_CODEC(codec)); fl_basic_message_channel_set_message_handler(set_image_format_group_channel, nullptr, nullptr, nullptr); @@ -2419,38 +1167,6 @@ void camera_linux_camera_api_respond_error_initialize(CameraLinuxCameraApiRespon } } -void camera_linux_camera_api_respond_start_image_stream(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiStartImageStreamResponse) response = camera_linux_camera_api_start_image_stream_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "startImageStream", error->message); - } -} - -void camera_linux_camera_api_respond_error_start_image_stream(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiStartImageStreamResponse) response = camera_linux_camera_api_start_image_stream_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "startImageStream", error->message); - } -} - -void camera_linux_camera_api_respond_stop_image_stream(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiStopImageStreamResponse) response = camera_linux_camera_api_stop_image_stream_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "stopImageStream", error->message); - } -} - -void camera_linux_camera_api_respond_error_stop_image_stream(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiStopImageStreamResponse) response = camera_linux_camera_api_stop_image_stream_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "stopImageStream", error->message); - } -} - void camera_linux_camera_api_respond_get_texture_id(CameraLinuxCameraApiResponseHandle* response_handle, int64_t* return_value) { g_autoptr(CameraLinuxCameraApiGetTextureIdResponse) response = camera_linux_camera_api_get_texture_id_response_new(return_value); g_autoptr(GError) error = nullptr; @@ -2467,22 +1183,6 @@ void camera_linux_camera_api_respond_error_get_texture_id(CameraLinuxCameraApiRe } } -void camera_linux_camera_api_respond_received_image_stream_data(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiReceivedImageStreamDataResponse) response = camera_linux_camera_api_received_image_stream_data_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "receivedImageStreamData", error->message); - } -} - -void camera_linux_camera_api_respond_error_received_image_stream_data(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiReceivedImageStreamDataResponse) response = camera_linux_camera_api_received_image_stream_data_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "receivedImageStreamData", error->message); - } -} - void camera_linux_camera_api_respond_dispose(CameraLinuxCameraApiResponseHandle* response_handle) { g_autoptr(CameraLinuxCameraApiDisposeResponse) response = camera_linux_camera_api_dispose_response_new(); g_autoptr(GError) error = nullptr; @@ -2499,40 +1199,8 @@ void camera_linux_camera_api_respond_error_dispose(CameraLinuxCameraApiResponseH } } -void camera_linux_camera_api_respond_lock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiLockCaptureOrientationResponse) response = camera_linux_camera_api_lock_capture_orientation_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "lockCaptureOrientation", error->message); - } -} - -void camera_linux_camera_api_respond_error_lock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiLockCaptureOrientationResponse) response = camera_linux_camera_api_lock_capture_orientation_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "lockCaptureOrientation", error->message); - } -} - -void camera_linux_camera_api_respond_unlock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiUnlockCaptureOrientationResponse) response = camera_linux_camera_api_unlock_capture_orientation_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "unlockCaptureOrientation", error->message); - } -} - -void camera_linux_camera_api_respond_error_unlock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiUnlockCaptureOrientationResponse) response = camera_linux_camera_api_unlock_capture_orientation_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "unlockCaptureOrientation", error->message); - } -} - -void camera_linux_camera_api_respond_take_picture(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* return_value) { - g_autoptr(CameraLinuxCameraApiTakePictureResponse) response = camera_linux_camera_api_take_picture_response_new(return_value); +void camera_linux_camera_api_respond_take_picture(CameraLinuxCameraApiResponseHandle* response_handle) { + g_autoptr(CameraLinuxCameraApiTakePictureResponse) response = camera_linux_camera_api_take_picture_response_new(); g_autoptr(GError) error = nullptr; if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { g_warning("Failed to send response to %s.%s: %s", "CameraApi", "takePicture", error->message); @@ -2547,22 +1215,6 @@ void camera_linux_camera_api_respond_error_take_picture(CameraLinuxCameraApiResp } } -void camera_linux_camera_api_respond_prepare_for_video_recording(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiPrepareForVideoRecordingResponse) response = camera_linux_camera_api_prepare_for_video_recording_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "prepareForVideoRecording", error->message); - } -} - -void camera_linux_camera_api_respond_error_prepare_for_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiPrepareForVideoRecordingResponse) response = camera_linux_camera_api_prepare_for_video_recording_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "prepareForVideoRecording", error->message); - } -} - void camera_linux_camera_api_respond_start_video_recording(CameraLinuxCameraApiResponseHandle* response_handle) { g_autoptr(CameraLinuxCameraApiStartVideoRecordingResponse) response = camera_linux_camera_api_start_video_recording_response_new(); g_autoptr(GError) error = nullptr; @@ -2595,54 +1247,6 @@ void camera_linux_camera_api_respond_error_stop_video_recording(CameraLinuxCamer } } -void camera_linux_camera_api_respond_pause_video_recording(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiPauseVideoRecordingResponse) response = camera_linux_camera_api_pause_video_recording_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "pauseVideoRecording", error->message); - } -} - -void camera_linux_camera_api_respond_error_pause_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiPauseVideoRecordingResponse) response = camera_linux_camera_api_pause_video_recording_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "pauseVideoRecording", error->message); - } -} - -void camera_linux_camera_api_respond_resume_video_recording(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiResumeVideoRecordingResponse) response = camera_linux_camera_api_resume_video_recording_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "resumeVideoRecording", error->message); - } -} - -void camera_linux_camera_api_respond_error_resume_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiResumeVideoRecordingResponse) response = camera_linux_camera_api_resume_video_recording_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "resumeVideoRecording", error->message); - } -} - -void camera_linux_camera_api_respond_set_flash_mode(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiSetFlashModeResponse) response = camera_linux_camera_api_set_flash_mode_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFlashMode", error->message); - } -} - -void camera_linux_camera_api_respond_error_set_flash_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiSetFlashModeResponse) response = camera_linux_camera_api_set_flash_mode_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFlashMode", error->message); - } -} - void camera_linux_camera_api_respond_set_exposure_mode(CameraLinuxCameraApiResponseHandle* response_handle) { g_autoptr(CameraLinuxCameraApiSetExposureModeResponse) response = camera_linux_camera_api_set_exposure_mode_response_new(); g_autoptr(GError) error = nullptr; @@ -2659,86 +1263,6 @@ void camera_linux_camera_api_respond_error_set_exposure_mode(CameraLinuxCameraAp } } -void camera_linux_camera_api_respond_set_exposure_point(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiSetExposurePointResponse) response = camera_linux_camera_api_set_exposure_point_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposurePoint", error->message); - } -} - -void camera_linux_camera_api_respond_error_set_exposure_point(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiSetExposurePointResponse) response = camera_linux_camera_api_set_exposure_point_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposurePoint", error->message); - } -} - -void camera_linux_camera_api_respond_set_lens_position(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiSetLensPositionResponse) response = camera_linux_camera_api_set_lens_position_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setLensPosition", error->message); - } -} - -void camera_linux_camera_api_respond_error_set_lens_position(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiSetLensPositionResponse) response = camera_linux_camera_api_set_lens_position_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setLensPosition", error->message); - } -} - -void camera_linux_camera_api_respond_get_min_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, double return_value) { - g_autoptr(CameraLinuxCameraApiGetMinExposureOffsetResponse) response = camera_linux_camera_api_get_min_exposure_offset_response_new(return_value); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMinExposureOffset", error->message); - } -} - -void camera_linux_camera_api_respond_error_get_min_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiGetMinExposureOffsetResponse) response = camera_linux_camera_api_get_min_exposure_offset_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMinExposureOffset", error->message); - } -} - -void camera_linux_camera_api_respond_get_max_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, double return_value) { - g_autoptr(CameraLinuxCameraApiGetMaxExposureOffsetResponse) response = camera_linux_camera_api_get_max_exposure_offset_response_new(return_value); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMaxExposureOffset", error->message); - } -} - -void camera_linux_camera_api_respond_error_get_max_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiGetMaxExposureOffsetResponse) response = camera_linux_camera_api_get_max_exposure_offset_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMaxExposureOffset", error->message); - } -} - -void camera_linux_camera_api_respond_set_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiSetExposureOffsetResponse) response = camera_linux_camera_api_set_exposure_offset_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposureOffset", error->message); - } -} - -void camera_linux_camera_api_respond_error_set_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiSetExposureOffsetResponse) response = camera_linux_camera_api_set_exposure_offset_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setExposureOffset", error->message); - } -} - void camera_linux_camera_api_respond_set_focus_mode(CameraLinuxCameraApiResponseHandle* response_handle) { g_autoptr(CameraLinuxCameraApiSetFocusModeResponse) response = camera_linux_camera_api_set_focus_mode_response_new(); g_autoptr(GError) error = nullptr; @@ -2755,134 +1279,6 @@ void camera_linux_camera_api_respond_error_set_focus_mode(CameraLinuxCameraApiRe } } -void camera_linux_camera_api_respond_set_focus_point(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiSetFocusPointResponse) response = camera_linux_camera_api_set_focus_point_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFocusPoint", error->message); - } -} - -void camera_linux_camera_api_respond_error_set_focus_point(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiSetFocusPointResponse) response = camera_linux_camera_api_set_focus_point_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setFocusPoint", error->message); - } -} - -void camera_linux_camera_api_respond_get_min_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, double return_value) { - g_autoptr(CameraLinuxCameraApiGetMinZoomLevelResponse) response = camera_linux_camera_api_get_min_zoom_level_response_new(return_value); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMinZoomLevel", error->message); - } -} - -void camera_linux_camera_api_respond_error_get_min_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiGetMinZoomLevelResponse) response = camera_linux_camera_api_get_min_zoom_level_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMinZoomLevel", error->message); - } -} - -void camera_linux_camera_api_respond_get_max_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, double return_value) { - g_autoptr(CameraLinuxCameraApiGetMaxZoomLevelResponse) response = camera_linux_camera_api_get_max_zoom_level_response_new(return_value); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMaxZoomLevel", error->message); - } -} - -void camera_linux_camera_api_respond_error_get_max_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiGetMaxZoomLevelResponse) response = camera_linux_camera_api_get_max_zoom_level_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "getMaxZoomLevel", error->message); - } -} - -void camera_linux_camera_api_respond_set_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiSetZoomLevelResponse) response = camera_linux_camera_api_set_zoom_level_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setZoomLevel", error->message); - } -} - -void camera_linux_camera_api_respond_error_set_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiSetZoomLevelResponse) response = camera_linux_camera_api_set_zoom_level_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setZoomLevel", error->message); - } -} - -void camera_linux_camera_api_respond_pause_preview(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiPausePreviewResponse) response = camera_linux_camera_api_pause_preview_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "pausePreview", error->message); - } -} - -void camera_linux_camera_api_respond_error_pause_preview(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiPausePreviewResponse) response = camera_linux_camera_api_pause_preview_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "pausePreview", error->message); - } -} - -void camera_linux_camera_api_respond_resume_preview(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiResumePreviewResponse) response = camera_linux_camera_api_resume_preview_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "resumePreview", error->message); - } -} - -void camera_linux_camera_api_respond_error_resume_preview(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiResumePreviewResponse) response = camera_linux_camera_api_resume_preview_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "resumePreview", error->message); - } -} - -void camera_linux_camera_api_respond_update_description_while_recording(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse) response = camera_linux_camera_api_update_description_while_recording_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "updateDescriptionWhileRecording", error->message); - } -} - -void camera_linux_camera_api_respond_error_update_description_while_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiUpdateDescriptionWhileRecordingResponse) response = camera_linux_camera_api_update_description_while_recording_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "updateDescriptionWhileRecording", error->message); - } -} - -void camera_linux_camera_api_respond_set_image_file_format(CameraLinuxCameraApiResponseHandle* response_handle) { - g_autoptr(CameraLinuxCameraApiSetImageFileFormatResponse) response = camera_linux_camera_api_set_image_file_format_response_new(); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setImageFileFormat", error->message); - } -} - -void camera_linux_camera_api_respond_error_set_image_file_format(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details) { - g_autoptr(CameraLinuxCameraApiSetImageFileFormatResponse) response = camera_linux_camera_api_set_image_file_format_response_new_error(code, message, details); - g_autoptr(GError) error = nullptr; - if (!fl_basic_message_channel_respond(response_handle->channel, response_handle->response_handle, response->value, &error)) { - g_warning("Failed to send response to %s.%s: %s", "CameraApi", "setImageFileFormat", error->message); - } -} - void camera_linux_camera_api_respond_set_image_format_group(CameraLinuxCameraApiResponseHandle* response_handle) { g_autoptr(CameraLinuxCameraApiSetImageFormatGroupResponse) response = camera_linux_camera_api_set_image_format_group_response_new(); g_autoptr(GError) error = nullptr; @@ -2988,7 +1384,7 @@ static void camera_linux_camera_event_api_initialized_cb(GObject* object, GAsync void camera_linux_camera_event_api_initialized(CameraLinuxCameraEventApi* self, CameraLinuxPlatformCameraState* initial_state, GCancellable* cancellable, GAsyncReadyCallback callback, gpointer user_data) { g_autoptr(FlValue) args = fl_value_new_list(); - fl_value_append_take(args, fl_value_new_custom_object(137, G_OBJECT(initial_state))); + fl_value_append_take(args, fl_value_new_custom_object(136, G_OBJECT(initial_state))); g_autofree gchar* channel_name = g_strdup_printf("dev.flutter.pigeon.camera_linux.CameraEventApi.initialized%s", self->suffix); g_autoptr(CameraLinuxMessageCodec) codec = camera_linux_message_codec_new(); FlBasicMessageChannel* channel = fl_basic_message_channel_new(self->messenger, channel_name, FL_MESSAGE_CODEC(codec)); diff --git a/packages/camera/camera_linux/linux/messages.g.h b/packages/camera/camera_linux/linux/messages.g.h index 76798dd42712..653b28d53a0c 100644 --- a/packages/camera/camera_linux/linux/messages.g.h +++ b/packages/camera/camera_linux/linux/messages.g.h @@ -63,18 +63,6 @@ typedef enum { CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED = 1 } CameraLinuxPlatformFocusMode; -/** - * CameraLinuxPlatformImageFileFormat: - * CAMERA_LINUX_PLATFORM_IMAGE_FILE_FORMAT_JPEG: - * CAMERA_LINUX_PLATFORM_IMAGE_FILE_FORMAT_HEIF: - * - * Pigeon version of ImageFileFormat. - */ -typedef enum { - CAMERA_LINUX_PLATFORM_IMAGE_FILE_FORMAT_JPEG = 0, - CAMERA_LINUX_PLATFORM_IMAGE_FILE_FORMAT_HEIF = 1 -} CameraLinuxPlatformImageFileFormat; - /** * CameraLinuxPlatformImageFormatGroup: * CAMERA_LINUX_PLATFORM_IMAGE_FORMAT_GROUP_RGB8: @@ -267,35 +255,13 @@ typedef struct { void (*get_available_cameras_names)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*create)(const gchar* camera_name, CameraLinuxPlatformResolutionPreset resolution_preset, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*initialize)(int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*start_image_stream)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*stop_image_stream)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*get_texture_id)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*received_image_stream_data)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*dispose)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*lock_capture_orientation)(CameraLinuxPlatformDeviceOrientation orientation, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*unlock_capture_orientation)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*take_picture)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*prepare_for_video_recording)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*start_video_recording)(gboolean enable_stream, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*stop_video_recording)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*pause_video_recording)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*resume_video_recording)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*set_flash_mode)(CameraLinuxPlatformFlashMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*set_exposure_mode)(CameraLinuxPlatformExposureMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*set_exposure_point)(CameraLinuxPlatformPoint* point, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*set_lens_position)(double position, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*get_min_exposure_offset)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*get_max_exposure_offset)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*set_exposure_offset)(double offset, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*set_focus_mode)(CameraLinuxPlatformFocusMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*set_focus_point)(CameraLinuxPlatformPoint* point, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*get_min_zoom_level)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*get_max_zoom_level)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*set_zoom_level)(double zoom, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*pause_preview)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*resume_preview)(CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*update_description_while_recording)(const gchar* camera_name, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*set_image_file_format)(CameraLinuxPlatformImageFileFormat format, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*take_picture)(int64_t camera_id, const gchar* path, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*start_video_recording)(int64_t camera_id, gboolean enable_stream, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*stop_video_recording)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_exposure_mode)(int64_t camera_id, CameraLinuxPlatformExposureMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*set_focus_mode)(int64_t camera_id, CameraLinuxPlatformFocusMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*set_image_format_group)(int64_t camera_id, CameraLinuxPlatformImageFormatGroup image_format_group, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); } CameraLinuxCameraApiVTable; @@ -381,44 +347,6 @@ void camera_linux_camera_api_respond_initialize(CameraLinuxCameraApiResponseHand */ void camera_linux_camera_api_respond_error_initialize(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); -/** - * camera_linux_camera_api_respond_start_image_stream: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.startImageStream. - */ -void camera_linux_camera_api_respond_start_image_stream(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_start_image_stream: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.startImageStream. - */ -void camera_linux_camera_api_respond_error_start_image_stream(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_stop_image_stream: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.stopImageStream. - */ -void camera_linux_camera_api_respond_stop_image_stream(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_stop_image_stream: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.stopImageStream. - */ -void camera_linux_camera_api_respond_error_stop_image_stream(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - /** * camera_linux_camera_api_respond_get_texture_id: * @response_handle: a #CameraLinuxCameraApiResponseHandle. @@ -439,25 +367,6 @@ void camera_linux_camera_api_respond_get_texture_id(CameraLinuxCameraApiResponse */ void camera_linux_camera_api_respond_error_get_texture_id(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); -/** - * camera_linux_camera_api_respond_received_image_stream_data: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.receivedImageStreamData. - */ -void camera_linux_camera_api_respond_received_image_stream_data(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_received_image_stream_data: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.receivedImageStreamData. - */ -void camera_linux_camera_api_respond_error_received_image_stream_data(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - /** * camera_linux_camera_api_respond_dispose: * @response_handle: a #CameraLinuxCameraApiResponseHandle. @@ -477,52 +386,13 @@ void camera_linux_camera_api_respond_dispose(CameraLinuxCameraApiResponseHandle* */ void camera_linux_camera_api_respond_error_dispose(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); -/** - * camera_linux_camera_api_respond_lock_capture_orientation: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.lockCaptureOrientation. - */ -void camera_linux_camera_api_respond_lock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_lock_capture_orientation: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.lockCaptureOrientation. - */ -void camera_linux_camera_api_respond_error_lock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_unlock_capture_orientation: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.unlockCaptureOrientation. - */ -void camera_linux_camera_api_respond_unlock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_unlock_capture_orientation: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.unlockCaptureOrientation. - */ -void camera_linux_camera_api_respond_error_unlock_capture_orientation(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - /** * camera_linux_camera_api_respond_take_picture: * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @return_value: location to write the value returned by this method. * * Responds to CameraApi.takePicture. */ -void camera_linux_camera_api_respond_take_picture(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* return_value); +void camera_linux_camera_api_respond_take_picture(CameraLinuxCameraApiResponseHandle* response_handle); /** * camera_linux_camera_api_respond_error_take_picture: @@ -535,25 +405,6 @@ void camera_linux_camera_api_respond_take_picture(CameraLinuxCameraApiResponseHa */ void camera_linux_camera_api_respond_error_take_picture(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); -/** - * camera_linux_camera_api_respond_prepare_for_video_recording: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.prepareForVideoRecording. - */ -void camera_linux_camera_api_respond_prepare_for_video_recording(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_prepare_for_video_recording: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.prepareForVideoRecording. - */ -void camera_linux_camera_api_respond_error_prepare_for_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - /** * camera_linux_camera_api_respond_start_video_recording: * @response_handle: a #CameraLinuxCameraApiResponseHandle. @@ -593,63 +444,6 @@ void camera_linux_camera_api_respond_stop_video_recording(CameraLinuxCameraApiRe */ void camera_linux_camera_api_respond_error_stop_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); -/** - * camera_linux_camera_api_respond_pause_video_recording: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.pauseVideoRecording. - */ -void camera_linux_camera_api_respond_pause_video_recording(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_pause_video_recording: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.pauseVideoRecording. - */ -void camera_linux_camera_api_respond_error_pause_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_resume_video_recording: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.resumeVideoRecording. - */ -void camera_linux_camera_api_respond_resume_video_recording(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_resume_video_recording: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.resumeVideoRecording. - */ -void camera_linux_camera_api_respond_error_resume_video_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_set_flash_mode: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.setFlashMode. - */ -void camera_linux_camera_api_respond_set_flash_mode(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_set_flash_mode: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.setFlashMode. - */ -void camera_linux_camera_api_respond_error_set_flash_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - /** * camera_linux_camera_api_respond_set_exposure_mode: * @response_handle: a #CameraLinuxCameraApiResponseHandle. @@ -669,103 +463,6 @@ void camera_linux_camera_api_respond_set_exposure_mode(CameraLinuxCameraApiRespo */ void camera_linux_camera_api_respond_error_set_exposure_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); -/** - * camera_linux_camera_api_respond_set_exposure_point: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.setExposurePoint. - */ -void camera_linux_camera_api_respond_set_exposure_point(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_set_exposure_point: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.setExposurePoint. - */ -void camera_linux_camera_api_respond_error_set_exposure_point(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_set_lens_position: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.setLensPosition. - */ -void camera_linux_camera_api_respond_set_lens_position(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_set_lens_position: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.setLensPosition. - */ -void camera_linux_camera_api_respond_error_set_lens_position(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_get_min_exposure_offset: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @return_value: location to write the value returned by this method. - * - * Responds to CameraApi.getMinExposureOffset. - */ -void camera_linux_camera_api_respond_get_min_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, double return_value); - -/** - * camera_linux_camera_api_respond_error_get_min_exposure_offset: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.getMinExposureOffset. - */ -void camera_linux_camera_api_respond_error_get_min_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_get_max_exposure_offset: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @return_value: location to write the value returned by this method. - * - * Responds to CameraApi.getMaxExposureOffset. - */ -void camera_linux_camera_api_respond_get_max_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, double return_value); - -/** - * camera_linux_camera_api_respond_error_get_max_exposure_offset: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.getMaxExposureOffset. - */ -void camera_linux_camera_api_respond_error_get_max_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_set_exposure_offset: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.setExposureOffset. - */ -void camera_linux_camera_api_respond_set_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_set_exposure_offset: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.setExposureOffset. - */ -void camera_linux_camera_api_respond_error_set_exposure_offset(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - /** * camera_linux_camera_api_respond_set_focus_mode: * @response_handle: a #CameraLinuxCameraApiResponseHandle. @@ -785,160 +482,6 @@ void camera_linux_camera_api_respond_set_focus_mode(CameraLinuxCameraApiResponse */ void camera_linux_camera_api_respond_error_set_focus_mode(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); -/** - * camera_linux_camera_api_respond_set_focus_point: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.setFocusPoint. - */ -void camera_linux_camera_api_respond_set_focus_point(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_set_focus_point: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.setFocusPoint. - */ -void camera_linux_camera_api_respond_error_set_focus_point(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_get_min_zoom_level: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @return_value: location to write the value returned by this method. - * - * Responds to CameraApi.getMinZoomLevel. - */ -void camera_linux_camera_api_respond_get_min_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, double return_value); - -/** - * camera_linux_camera_api_respond_error_get_min_zoom_level: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.getMinZoomLevel. - */ -void camera_linux_camera_api_respond_error_get_min_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_get_max_zoom_level: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @return_value: location to write the value returned by this method. - * - * Responds to CameraApi.getMaxZoomLevel. - */ -void camera_linux_camera_api_respond_get_max_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, double return_value); - -/** - * camera_linux_camera_api_respond_error_get_max_zoom_level: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.getMaxZoomLevel. - */ -void camera_linux_camera_api_respond_error_get_max_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_set_zoom_level: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.setZoomLevel. - */ -void camera_linux_camera_api_respond_set_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_set_zoom_level: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.setZoomLevel. - */ -void camera_linux_camera_api_respond_error_set_zoom_level(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_pause_preview: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.pausePreview. - */ -void camera_linux_camera_api_respond_pause_preview(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_pause_preview: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.pausePreview. - */ -void camera_linux_camera_api_respond_error_pause_preview(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_resume_preview: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.resumePreview. - */ -void camera_linux_camera_api_respond_resume_preview(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_resume_preview: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.resumePreview. - */ -void camera_linux_camera_api_respond_error_resume_preview(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_update_description_while_recording: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.updateDescriptionWhileRecording. - */ -void camera_linux_camera_api_respond_update_description_while_recording(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_update_description_while_recording: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.updateDescriptionWhileRecording. - */ -void camera_linux_camera_api_respond_error_update_description_while_recording(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - -/** - * camera_linux_camera_api_respond_set_image_file_format: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * - * Responds to CameraApi.setImageFileFormat. - */ -void camera_linux_camera_api_respond_set_image_file_format(CameraLinuxCameraApiResponseHandle* response_handle); - -/** - * camera_linux_camera_api_respond_error_set_image_file_format: - * @response_handle: a #CameraLinuxCameraApiResponseHandle. - * @code: error code. - * @message: error message. - * @details: (allow-none): error details or %NULL. - * - * Responds with an error to CameraApi.setImageFileFormat. - */ -void camera_linux_camera_api_respond_error_set_image_file_format(CameraLinuxCameraApiResponseHandle* response_handle, const gchar* code, const gchar* message, FlValue* details); - /** * camera_linux_camera_api_respond_set_image_format_group: * @response_handle: a #CameraLinuxCameraApiResponseHandle. diff --git a/packages/camera/camera_linux/pigeons/messages.dart b/packages/camera/camera_linux/pigeons/messages.dart index f9a02a24e2e6..cba2d909c068 100644 --- a/packages/camera/camera_linux/pigeons/messages.dart +++ b/packages/camera/camera_linux/pigeons/messages.dart @@ -48,12 +48,6 @@ enum PlatformFocusMode { locked, } -/// Pigeon version of ImageFileFormat. -enum PlatformImageFileFormat { - jpeg, - heif, -} - // Pigeon version of the subset of ImageFormatGroup supported on iOS. enum PlatformImageFormatGroup { rgb8, @@ -117,137 +111,36 @@ abstract class CameraApi { @async void initialize(int cameraId, PlatformImageFormatGroup imageFormat); - /// Begins streaming frames from the camera. - @async - void startImageStream(); - - /// Stops streaming frames from the camera. - @async - void stopImageStream(); - /// Get the texture ID for the camera with the given ID. @async int? getTextureId(int cameraId); - /// Called by the Dart side of the plugin when it has received the last image - /// frame sent. - /// - /// This is used to throttle sending frames across the channel. - @async - void receivedImageStreamData(); - /// Indicates that the given camera is no longer being used on the Dart side, /// and any associated resources can be cleaned up. @async void dispose(int cameraId); - /// Locks the camera capture to the current device orientation. - @async - void lockCaptureOrientation(PlatformDeviceOrientation orientation); - - /// Unlocks camera capture orientation, allowing it to automatically adapt to - /// device orientation. - @async - void unlockCaptureOrientation(); - /// Takes a picture with the current settings, and returns the path to the /// resulting file. @async - String takePicture(); - - /// Does any preprocessing necessary before beginning to record video. - @async - void prepareForVideoRecording(); + void takePicture(int cameraId, String path); /// Begins recording video, optionally enabling streaming to Dart at the same /// time. @async - void startVideoRecording(bool enableStream); + void startVideoRecording(int cameraId, bool enableStream); /// Stops recording video, and results the path to the resulting file. @async - String stopVideoRecording(); - - /// Pauses video recording. - @async - void pauseVideoRecording(); - - /// Resumes a previously paused video recording. - @async - void resumeVideoRecording(); - - /// Switches the camera to the given flash mode. - @async - void setFlashMode(PlatformFlashMode mode); + String stopVideoRecording(int cameraId); /// Switches the camera to the given exposure mode. @async - void setExposureMode(PlatformExposureMode mode); - - /// Anchors auto-exposure to the given point in (0,1) coordinate space. - /// - /// A null value resets to the default exposure point. - @async - void setExposurePoint(PlatformPoint? point); - - /// Sets the lens position manually to the given value. - /// The value should be between 0 and 1. - /// 0 means the lens is at the minimum position. - /// 1 means the lens is at the maximum position. - @async - void setLensPosition(double position); - - /// Returns the minimum exposure offset supported by the camera. - @async - double getMinExposureOffset(); - - /// Returns the maximum exposure offset supported by the camera. - @async - double getMaxExposureOffset(); - - /// Sets the exposure offset manually to the given value. - @async - void setExposureOffset(double offset); + void setExposureMode(int cameraId, PlatformExposureMode mode); /// Switches the camera to the given focus mode. @async - void setFocusMode(PlatformFocusMode mode); - - /// Anchors auto-focus to the given point in (0,1) coordinate space. - /// - /// A null value resets to the default focus point. - @async - void setFocusPoint(PlatformPoint? point); - - /// Returns the minimum zoom level supported by the camera. - @async - double getMinZoomLevel(); - - /// Returns the maximum zoom level supported by the camera. - @async - double getMaxZoomLevel(); - - /// Sets the zoom factor. - @async - void setZoomLevel(double zoom); - - /// Pauses streaming of preview frames. - @async - void pausePreview(); - - /// Resumes a previously paused preview stream. - @async - void resumePreview(); - - /// Changes the camera used while recording video. - /// - /// This should only be called while video recording is active. - @async - void updateDescriptionWhileRecording(String cameraName); - - /// Sets the file format used for taking pictures. - @async - void setImageFileFormat(PlatformImageFileFormat format); + void setFocusMode(int cameraId, PlatformFocusMode mode); //Sets the ImageFormatGroup. @async diff --git a/packages/camera/camera_linux/pubspec.yaml b/packages/camera/camera_linux/pubspec.yaml index 043bc9e2d79e..d3fdd73d210c 100644 --- a/packages/camera/camera_linux/pubspec.yaml +++ b/packages/camera/camera_linux/pubspec.yaml @@ -12,6 +12,7 @@ dependencies: plugin_platform_interface: ^2.0.2 camera_platform_interface: ^2.7.0 stream_transform: ^2.1.1 + path_provider: ^2.1.5 dev_dependencies: flutter_test: From 766813d2253c0f54c2f4cdd97ad84821d1689aca Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Tue, 10 Jun 2025 09:30:46 -0400 Subject: [PATCH 13/21] video recording --- packages/camera/camera/pubspec.yaml | 2 +- .../camera_linux/lib/src/linux_camera.dart | 22 ++++++++-- .../camera_linux/lib/src/messages.g.dart | 4 +- .../camera/camera_linux/linux/CMakeLists.txt | 27 +++++++++++-- packages/camera/camera_linux/linux/camera.cpp | 33 ++++++++++++++- packages/camera/camera_linux/linux/camera.h | 9 +++-- .../camera_linux/linux/camera_host_plugin.cpp | 26 ++++++++++++ .../camera_linux/linux/camera_host_plugin.h | 10 ++--- ...era_video_recorder_image_event_handler.cpp | 40 +++++++++++++++++++ ...amera_video_recorder_image_event_handler.h | 15 ++++--- .../camera/camera_linux/linux/messages.g.cc | 4 +- .../camera/camera_linux/linux/messages.g.h | 2 +- .../camera/camera_linux/pigeons/messages.dart | 2 +- 13 files changed, 162 insertions(+), 34 deletions(-) diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 7bedf1cb8e5d..20d57820ff83 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -30,7 +30,7 @@ dependencies: git: url: git@github.com:LightX-Innovations/flutter_packages.git path: packages/camera/camera_linux - ref: camera_0.3 + ref: camera_0.4 camera_web: ^0.3.3 flutter: sdk: flutter diff --git a/packages/camera/camera_linux/lib/src/linux_camera.dart b/packages/camera/camera_linux/lib/src/linux_camera.dart index 27a649546595..2b6161a6e4dd 100644 --- a/packages/camera/camera_linux/lib/src/linux_camera.dart +++ b/packages/camera/camera_linux/lib/src/linux_camera.dart @@ -208,13 +208,27 @@ class CameraLinux extends CameraPlatform { @Deprecated( 'This parameter is unused, and will be ignored on all platforms') Duration? maxVideoDuration, - }) { - throw UnimplementedError('startVideoRecording() is not implemented.'); + }) async { + try { + final directory = await getTemporaryDirectory(); + final uuid = DateTime.now().millisecondsSinceEpoch.toString(); + final path = '${directory.path}/$uuid.mp4'; + await _hostApi.startVideoRecording(cameraId, path); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + // No-op for Linux, as video recording is not supported. + return Future.value(); } @override - Future stopVideoRecording(int cameraId) { - throw UnimplementedError('stopVideoRecording() is not implemented.'); + Future stopVideoRecording(int cameraId) async { + try { + final path = await _hostApi.stopVideoRecording(cameraId); + return XFile(path); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } } @override diff --git a/packages/camera/camera_linux/lib/src/messages.g.dart b/packages/camera/camera_linux/lib/src/messages.g.dart index ebfd0a9bc5f9..a1d2cac8878b 100644 --- a/packages/camera/camera_linux/lib/src/messages.g.dart +++ b/packages/camera/camera_linux/lib/src/messages.g.dart @@ -402,7 +402,7 @@ class CameraApi { /// Begins recording video, optionally enabling streaming to Dart at the same /// time. - Future startVideoRecording(int cameraId, bool enableStream) async { + Future startVideoRecording(int cameraId, String path) async { final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_linux.CameraApi.startVideoRecording$pigeonVar_messageChannelSuffix'; final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( pigeonVar_channelName, @@ -410,7 +410,7 @@ class CameraApi { binaryMessenger: pigeonVar_binaryMessenger, ); final List? pigeonVar_replyList = - await pigeonVar_channel.send([cameraId, enableStream]) as List?; + await pigeonVar_channel.send([cameraId, path]) as List?; if (pigeonVar_replyList == null) { throw _createConnectionError(pigeonVar_channelName); } else if (pigeonVar_replyList.length > 1) { diff --git a/packages/camera/camera_linux/linux/CMakeLists.txt b/packages/camera/camera_linux/linux/CMakeLists.txt index dfa6c7bf8998..80811bca1f0a 100644 --- a/packages/camera/camera_linux/linux/CMakeLists.txt +++ b/packages/camera/camera_linux/linux/CMakeLists.txt @@ -8,11 +8,15 @@ set(CMAKE_CXX_STANDARD 17) # Set variables set(PYLON_VERSION "8.0.2.16314") set(PYLON_ARCHIVE_NAME "pylon-${PYLON_VERSION}_linux-aarch64.tar.gz") -set(PYLON_ARCHIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/../deps/${PYLON_ARCHIVE_NAME}) +set(PYLON_ARCHIVE_PATH ${CMAKE_BINARY_DIR}/downloads/${PYLON_ARCHIVE_NAME}) set(PYLON_DOWNLOAD_URL "https://github.com/LightX-Innovations/flutter_packages/releases/download/camera_linux_v0.1/pylon-${PYLON_VERSION}_linux-aarch64.tar.gz") set(PYLON_ROOT ${CMAKE_BINARY_DIR}/pylon-sdk) -# Download the archive if it does not exist +set(PYLON_MPEG_ARCHIVE_NAME "pylon-supplementary-package-for-mpeg-4-1.0.2.121_aarch64.tar.gz") +set(PYLON_MPEG_ARCHIVE_PATH ${CMAKE_BINARY_DIR}/downloads/${PYLON_MPEG_ARCHIVE_NAME}) +set(PYLON_MPEG_DOWNLOAD_URL "https://github.com/LightX-Innovations/flutter_packages/releases/download/camera_linux_v0.1/pylon-supplementary-package-for-mpeg-4-1.0.2.121_aarch64.tar.gz") + +# Download the archives if they do not exist if(NOT EXISTS "${PYLON_ARCHIVE_PATH}") message(STATUS "Downloading Pylon SDK archive...") file(DOWNLOAD @@ -24,7 +28,18 @@ if(NOT EXISTS "${PYLON_ARCHIVE_PATH}") list(GET DOWNLOAD_STATUS 0 DOWNLOAD_RESULT) endif() -# Extract the archive only if not already extracted +if(NOT EXISTS "${PYLON_MPEG_ARCHIVE_PATH}") + message(STATUS "Downloading Pylon SDK MPEG archive...") + file(DOWNLOAD + "${PYLON_MPEG_DOWNLOAD_URL}" + "${PYLON_MPEG_ARCHIVE_PATH}" + SHOW_PROGRESS + STATUS DOWNLOAD_STATUS + ) + list(GET DOWNLOAD_STATUS 0 DOWNLOAD_RESULT) +endif() + + if(NOT EXISTS "${PYLON_ROOT}") message(STATUS "Extracting Pylon SDK to ${PYLON_ROOT}...") file(MAKE_DIRECTORY "${PYLON_ROOT}") @@ -32,9 +47,15 @@ if(NOT EXISTS "${PYLON_ROOT}") COMMAND ${CMAKE_COMMAND} -E tar -xzf "${PYLON_ARCHIVE_PATH}" WORKING_DIRECTORY "${PYLON_ROOT}" ) + message(STATUS "Extracting Pylon MPEG SDK to ${PYLON_ROOT}...") + execute_process( + COMMAND ${CMAKE_COMMAND} -E tar -xzf "${PYLON_MPEG_ARCHIVE_PATH}" + WORKING_DIRECTORY "${PYLON_ROOT}" + ) endif() include_directories(${PYLON_ROOT}/include) + link_directories(${PYLON_ROOT}/lib) # Project-level configuration. diff --git a/packages/camera/camera_linux/linux/camera.cpp b/packages/camera/camera_linux/linux/camera.cpp index be21e2d74f35..ef160c2044fb 100644 --- a/packages/camera/camera_linux/linux/camera.cpp +++ b/packages/camera/camera_linux/linux/camera.cpp @@ -96,7 +96,7 @@ int64_t Camera::getTextureId() { return cameraTextureImageEventHandler->get_texture_id(); } -void Camera::takePicture(std::string file_path) { +void Camera::takePicture(std::string filePath) { CAMERA_CONFIG_LOCK( Pylon::CGrabResultPtr grabResult; @@ -121,7 +121,7 @@ void Camera::takePicture(std::string file_path) { isMono ? CV_8UC1 : CV_8UC3, (uint8_t*)image.GetBuffer()); cv::Mat bgr; cv::cvtColor(mat, bgr, isMono ? cv::COLOR_GRAY2BGR : cv::COLOR_RGB2BGR); - cv::imwrite(file_path, bgr); + cv::imwrite(filePath, bgr); ); } @@ -230,3 +230,32 @@ void Camera::setFocusMode(CameraLinuxPlatformFocusMode mode) { emitState(); }); } + +void Camera::startVideoRecording(std::string filePath) { + if (!camera || !Pylon::CVideoWriter::IsSupported() || + cameraVideoRecorderImageEventHandler) { + std::cerr << "Video recording is not supported or camera is not " + "initialized. or already recording." + << std::endl; + return; + } + CAMERA_CONFIG_LOCK({ + cameraVideoRecorderImageEventHandler = + std::make_unique(filePath); + camera->RegisterImageEventHandler( + cameraVideoRecorderImageEventHandler.get(), + Pylon::RegistrationMode_Append, Pylon::Cleanup_None); + }); +} + +void Camera::stopVideoRecording(std::string& filePath) { + if (!camera || !cameraVideoRecorderImageEventHandler) { + return; + } + CAMERA_CONFIG_LOCK({ + filePath = cameraVideoRecorderImageEventHandler->m_videoFilePath; + camera->DeregisterImageEventHandler( + cameraVideoRecorderImageEventHandler.get()); + cameraVideoRecorderImageEventHandler.reset(); + }); +} \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/camera.h b/packages/camera/camera_linux/linux/camera.h index 05ed36d49e07..d2f9f9ffaadb 100644 --- a/packages/camera/camera_linux/linux/camera.h +++ b/packages/camera/camera_linux/linux/camera.h @@ -4,6 +4,7 @@ #include +#include "camera_video_recorder_image_event_handler.h" #include "flutter_linux/flutter_linux.h" #include "messages.g.h" @@ -23,6 +24,8 @@ class Camera { std::unique_ptr cameraTextureImageEventHandler; CameraLinuxCameraEventApi* cameraLinuxCameraEventApi; + std::unique_ptr + cameraVideoRecorderImageEventHandler; Camera(Pylon::IPylonDevice* device, int64_t camera_id, FlPluginRegistrar* registrar, @@ -37,9 +40,9 @@ class Camera { int64_t getTextureId(); - void takePicture(std::string file_path); - // void startVideoRecording(); - // void stopVideoRecording(); + void takePicture(std::string filePath); + void startVideoRecording(std::string filePath); + void stopVideoRecording(std::string& filePath); void setImageFormatGroup( CameraLinuxPlatformImageFormatGroup imageFormatGroup); diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.cpp b/packages/camera/camera_linux/linux/camera_host_plugin.cpp index 2bc90ab72dcb..0fcb98dfc145 100644 --- a/packages/camera/camera_linux/linux/camera_host_plugin.cpp +++ b/packages/camera/camera_linux/linux/camera_host_plugin.cpp @@ -172,4 +172,30 @@ void CameraHostPlugin::take_picture( camera.takePicture(std::string(path)); CAMERA_HOST_VOID_RETURN(); }); +} + +void CameraHostPlugin::start_video_recording( + int64_t camera_id, const gchar* path, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(start_video_recording, { + Camera& camera = get_camera_by_id(camera_id); + + camera.startVideoRecording(std::string(path)); + CAMERA_HOST_VOID_RETURN(); + }); +} + +void CameraHostPlugin::stop_video_recording( + int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, + gpointer user_data) { + CAMERA_HOST_ERROR_HANDLING(stop_video_recording, { + Camera& camera = get_camera_by_id(camera_id); + + std::string path; + camera.stopVideoRecording(path); + if (path.empty()) { + CAMERA_HOST_RAISE_ERROR("Video recording not started"); + } + CAMERA_HOST_RETURN(path.c_str()); + }); } \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.h b/packages/camera/camera_linux/linux/camera_host_plugin.h index 92f5971f52c5..e059dc12284a 100644 --- a/packages/camera/camera_linux/linux/camera_host_plugin.h +++ b/packages/camera/camera_linux/linux/camera_host_plugin.h @@ -86,16 +86,12 @@ class CameraHostPlugin { gpointer user_data); static void start_video_recording( - int64_t camera_id, gboolean enable_stream, - CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } + int64_t camera_id, const gchar* path, + CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); static void stop_video_recording( int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, - gpointer user_data) { - throw new std::runtime_error("Not Implemented"); - } + gpointer user_data); static void set_exposure_mode( int64_t camera_id, CameraLinuxPlatformExposureMode mode, diff --git a/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.cpp b/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.cpp index e69de29bb2d1..5fbf1316568e 100644 --- a/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.cpp +++ b/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.cpp @@ -0,0 +1,40 @@ +#include "camera_video_recorder_image_event_handler.h" + +CameraVideoRecorderImageEventHandler::CameraVideoRecorderImageEventHandler( + std::string videoFilePath) + : m_videoFilePath(std::move(videoFilePath)) {} + +void CameraVideoRecorderImageEventHandler::OnImageGrabbed( + Pylon::CInstantCamera& camera, const Pylon::CGrabResultPtr& ptr) { + if (!ptr->GrabSucceeded()) { + std::cerr << "Error: Grab failed or texture not ready." << std::endl; + return; + } + + static bool isFirstFrame = true; + if (isFirstFrame) { + m_videoWriter.SetParameter(ptr->GetWidth(), ptr->GetHeight(), + ptr->GetPixelType(), + CAMERA_VIDEO_RECORDER_PLAY_BACK_FRAME_RATE, + CAMERA_VIDEO_RECORDER_QUALITY); + m_videoWriter.Open(m_videoFilePath.c_str()); + isFirstFrame = false; + } + + Pylon::CPylonImage image; + image.AttachGrabResultBuffer(ptr); + m_videoWriter.Add(image); +} + +void CameraVideoRecorderImageEventHandler::OnImageEventHandlerDeregistered( + Pylon::CInstantCamera& camera) { + if (m_videoWriter.IsOpen()) { + m_videoWriter.Close(); + } +} + +CameraVideoRecorderImageEventHandler::~CameraVideoRecorderImageEventHandler() { + if (m_videoWriter.IsOpen()) { + m_videoWriter.Close(); + } +} diff --git a/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.h b/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.h index 6d3ab39f0a97..de95fb51d73f 100644 --- a/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.h +++ b/packages/camera/camera_linux/linux/camera_video_recorder_image_event_handler.h @@ -2,11 +2,7 @@ #ifndef CAMERA_VIDEO_RECORDER_IMAGE_EVENT_HANDLER_H_ #define CAMERA_VIDEO_RECORDER_IMAGE_EVENT_HANDLER_H_ -#include - -#include "camera.h" #include "flutter_linux/flutter_linux.h" -#include "messages.g.h" #pragma clang diagnostic push #pragma clang diagnostic ignored "-Woverloaded-virtual" @@ -16,15 +12,18 @@ #pragma clang diagnostic pop +#define CAMERA_VIDEO_RECORDER_PLAY_BACK_FRAME_RATE 60.0 +#define CAMERA_VIDEO_RECORDER_QUALITY 100 + class CameraVideoRecorderImageEventHandler : public Pylon::CImageEventHandler { - const Camera& camera; + Pylon::CVideoWriter m_videoWriter; public: - CameraVideoRecorderImageEventHandler(const Camera& camera); + std::string m_videoFilePath; - ~CameraVideoRecorderImageEventHandler() override; + CameraVideoRecorderImageEventHandler(std::string videoFilePath); - void OnImageEventHandlerRegistered(Pylon::CInstantCamera& camera) override; + ~CameraVideoRecorderImageEventHandler() override; void OnImageGrabbed(Pylon::CInstantCamera& camera, const Pylon::CGrabResultPtr& ptr) override; diff --git a/packages/camera/camera_linux/linux/messages.g.cc b/packages/camera/camera_linux/linux/messages.g.cc index e717f09980e3..ec5cfd5389ca 100644 --- a/packages/camera/camera_linux/linux/messages.g.cc +++ b/packages/camera/camera_linux/linux/messages.g.cc @@ -977,9 +977,9 @@ static void camera_linux_camera_api_start_video_recording_cb(FlBasicMessageChann FlValue* value0 = fl_value_get_list_value(message_, 0); int64_t camera_id = fl_value_get_int(value0); FlValue* value1 = fl_value_get_list_value(message_, 1); - gboolean enable_stream = fl_value_get_bool(value1); + const gchar* path = fl_value_get_string(value1); g_autoptr(CameraLinuxCameraApiResponseHandle) handle = camera_linux_camera_api_response_handle_new(channel, response_handle); - self->vtable->start_video_recording(camera_id, enable_stream, handle, self->user_data); + self->vtable->start_video_recording(camera_id, path, handle, self->user_data); } static void camera_linux_camera_api_stop_video_recording_cb(FlBasicMessageChannel* channel, FlValue* message_, FlBasicMessageChannelResponseHandle* response_handle, gpointer user_data) { diff --git a/packages/camera/camera_linux/linux/messages.g.h b/packages/camera/camera_linux/linux/messages.g.h index 653b28d53a0c..e84a5660ef81 100644 --- a/packages/camera/camera_linux/linux/messages.g.h +++ b/packages/camera/camera_linux/linux/messages.g.h @@ -258,7 +258,7 @@ typedef struct { void (*get_texture_id)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*dispose)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*take_picture)(int64_t camera_id, const gchar* path, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); - void (*start_video_recording)(int64_t camera_id, gboolean enable_stream, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); + void (*start_video_recording)(int64_t camera_id, const gchar* path, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*stop_video_recording)(int64_t camera_id, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*set_exposure_mode)(int64_t camera_id, CameraLinuxPlatformExposureMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); void (*set_focus_mode)(int64_t camera_id, CameraLinuxPlatformFocusMode mode, CameraLinuxCameraApiResponseHandle* response_handle, gpointer user_data); diff --git a/packages/camera/camera_linux/pigeons/messages.dart b/packages/camera/camera_linux/pigeons/messages.dart index cba2d909c068..6f1ee783d31b 100644 --- a/packages/camera/camera_linux/pigeons/messages.dart +++ b/packages/camera/camera_linux/pigeons/messages.dart @@ -128,7 +128,7 @@ abstract class CameraApi { /// Begins recording video, optionally enabling streaming to Dart at the same /// time. @async - void startVideoRecording(int cameraId, bool enableStream); + void startVideoRecording(int cameraId, String path); /// Stops recording video, and results the path to the resulting file. @async From 92192e65a28c15f8c9f1e2ae33e15f810f307813 Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Tue, 10 Jun 2025 09:52:30 -0400 Subject: [PATCH 14/21] Update pubspec.yaml and camera.cpp: Upgrade camera_linux and camera_avfoundation dependencies to camera_0.5 and modify exposure and focus mode settings to use TrySetValue for better error handling. --- packages/camera/camera/pubspec.yaml | 2 +- packages/camera/camera_linux/linux/camera.cpp | 17 +++++++++++------ 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 20d57820ff83..5ec4822e2eee 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -30,7 +30,7 @@ dependencies: git: url: git@github.com:LightX-Innovations/flutter_packages.git path: packages/camera/camera_linux - ref: camera_0.4 + ref: camera_0.5 camera_web: ^0.3.3 flutter: sdk: flutter diff --git a/packages/camera/camera_linux/linux/camera.cpp b/packages/camera/camera_linux/linux/camera.cpp index ef160c2044fb..f40dc5e5c8ef 100644 --- a/packages/camera/camera_linux/linux/camera.cpp +++ b/packages/camera/camera_linux/linux/camera.cpp @@ -196,14 +196,16 @@ void Camera::setExposureMode(CameraLinuxPlatformExposureMode mode) { switch (mode) { case CameraLinuxPlatformExposureMode:: CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_AUTO: - Pylon::CEnumParameter(nodemap, "ExposureAuto").SetValue("Continuous"); + Pylon::CEnumParameter(nodemap, "ExposureAuto") + .TrySetValue("Continuous"); break; case CameraLinuxPlatformExposureMode:: CAMERA_LINUX_PLATFORM_EXPOSURE_MODE_LOCKED: - Pylon::CEnumParameter(nodemap, "ExposureAuto").SetValue("Off"); + Pylon::CEnumParameter(nodemap, "ExposureAuto").TrySetValue("Off"); break; default: - Pylon::CEnumParameter(nodemap, "ExposureAuto").SetValue("Continuous"); + Pylon::CEnumParameter(nodemap, "ExposureAuto") + .TrySetValue("Continuous"); break; } exposure_mode = mode; @@ -216,14 +218,17 @@ void Camera::setFocusMode(CameraLinuxPlatformFocusMode mode) { GenApi::INodeMap& nodemap = camera->GetNodeMap(); switch (mode) { case CameraLinuxPlatformFocusMode::CAMERA_LINUX_PLATFORM_FOCUS_MODE_AUTO: - Pylon::CEnumParameter(nodemap, "FocusMode").SetValue("Auto"); + Pylon::CEnumParameter(nodemap, "FocusAuto") + .TrySetValue("FocusAuto_Continuous"); break; case CameraLinuxPlatformFocusMode:: CAMERA_LINUX_PLATFORM_FOCUS_MODE_LOCKED: - Pylon::CEnumParameter(nodemap, "FocusMode").SetValue("Locked"); + Pylon::CEnumParameter(nodemap, "FocusAuto") + .TrySetValue("FocusAuto_Off"); break; default: - Pylon::CEnumParameter(nodemap, "FocusMode").SetValue("Auto"); + Pylon::CEnumParameter(nodemap, "FocusAuto") + .TrySetValue("FocusAuto_Continuous"); break; } focus_mode = mode; From fafa2ecae6c480fb4441c4b3878062e2b69d847e Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Thu, 12 Jun 2025 09:55:29 -0400 Subject: [PATCH 15/21] Fix image format handling in CameraLinux: Add support for unknown format in initializeCamera method. --- packages/camera/camera_linux/lib/src/linux_camera.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/camera/camera_linux/lib/src/linux_camera.dart b/packages/camera/camera_linux/lib/src/linux_camera.dart index 2b6161a6e4dd..eace45141273 100644 --- a/packages/camera/camera_linux/lib/src/linux_camera.dart +++ b/packages/camera/camera_linux/lib/src/linux_camera.dart @@ -116,9 +116,9 @@ class CameraLinux extends CameraPlatform { PlatformImageFormatGroup imageFormat = PlatformImageFormatGroup.rgb8; switch (imageFormatGroup) { case ImageFormatGroup.jpeg: + case ImageFormatGroup.unknown: imageFormat = PlatformImageFormatGroup.rgb8; break; - case ImageFormatGroup.unknown: case ImageFormatGroup.yuv420: case ImageFormatGroup.nv21: case ImageFormatGroup.bgra8888: From 46e85ee28eb604645def6b0089c65119655cf9e2 Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Thu, 12 Jun 2025 09:55:53 -0400 Subject: [PATCH 16/21] Update pubspec.yaml: Upgrade camera_avfoundation and camera_linux dependencies to camera_0.6 --- packages/camera/camera/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 5ec4822e2eee..26673869459a 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -30,7 +30,7 @@ dependencies: git: url: git@github.com:LightX-Innovations/flutter_packages.git path: packages/camera/camera_linux - ref: camera_0.5 + ref: camera_0.6 camera_web: ^0.3.3 flutter: sdk: flutter From 370723d6ede045c15c0cf0439609841eca8f9701 Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau Date: Thu, 24 Jul 2025 13:36:09 -0400 Subject: [PATCH 17/21] try grab image --- .../camera/camera_linux/linux/CMakeLists.txt | 4 +- packages/camera/camera_linux/linux/camera.cpp | 179 +++++- packages/camera/camera_linux/linux/camera.h | 51 +- .../camera_linux/linux/camera_host_plugin.h | 1 - .../camera_texture_image_event_handler.cpp | 237 ------- .../camera_texture_image_event_handler.h | 63 -- .../camera_linux/linux/capture_pipeline.cpp | 579 ++++++++++++++++++ .../camera_linux/linux/capture_pipeline.h | 82 +++ .../camera_linux/linux/dma_buffer_factory.cpp | 69 --- .../camera_linux/linux/dma_buffer_factory.h | 55 -- .../linux/fl_lightx_texture_gl.cpp | 32 + .../camera_linux/linux/fl_lightx_texture_gl.h | 22 + 12 files changed, 905 insertions(+), 469 deletions(-) delete mode 100644 packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp delete mode 100644 packages/camera/camera_linux/linux/camera_texture_image_event_handler.h create mode 100644 packages/camera/camera_linux/linux/capture_pipeline.cpp create mode 100644 packages/camera/camera_linux/linux/capture_pipeline.h delete mode 100644 packages/camera/camera_linux/linux/dma_buffer_factory.cpp delete mode 100644 packages/camera/camera_linux/linux/dma_buffer_factory.h create mode 100644 packages/camera/camera_linux/linux/fl_lightx_texture_gl.cpp create mode 100644 packages/camera/camera_linux/linux/fl_lightx_texture_gl.h diff --git a/packages/camera/camera_linux/linux/CMakeLists.txt b/packages/camera/camera_linux/linux/CMakeLists.txt index 80811bca1f0a..3346180b9ecd 100644 --- a/packages/camera/camera_linux/linux/CMakeLists.txt +++ b/packages/camera/camera_linux/linux/CMakeLists.txt @@ -70,9 +70,11 @@ set(PLUGIN_NAME "camera_linux_plugin") list(APPEND PLUGIN_SOURCES "camera_plugin.cpp" "camera_host_plugin.cpp" - "camera_texture_image_event_handler.cpp" + "camera_video_recorder_image_event_handler.cpp" "camera.cpp" + "capture_pipeline.cpp" + "fl_lightx_texture_gl.cpp" "messages.g.cc" ) diff --git a/packages/camera/camera_linux/linux/camera.cpp b/packages/camera/camera_linux/linux/camera.cpp index f40dc5e5c8ef..74fb05e30e3c 100644 --- a/packages/camera/camera_linux/linux/camera.cpp +++ b/packages/camera/camera_linux/linux/camera.cpp @@ -1,8 +1,9 @@ #include "camera.h" #include +#include -#include "camera_texture_image_event_handler.h" +#include "capture_pipeline.h" Camera::Camera(Pylon::IPylonDevice* device, int64_t camera_id, FlPluginRegistrar* registrar, @@ -27,8 +28,7 @@ Camera::Camera(Pylon::IPylonDevice* device, int64_t camera_id, } Camera::~Camera() { - if (cameraTextureImageEventHandler && camera) - camera->DeregisterImageEventHandler(cameraTextureImageEventHandler.get()); + if (capturePipeline && camera) camera->StopGrabbing(); if (camera) { if (camera->IsGrabbing()) camera->StopGrabbing(); if (camera->IsOpen()) camera->Close(); @@ -39,8 +39,11 @@ Camera::~Camera() { void Camera::initialize(CameraLinuxPlatformImageFormatGroup imageFormat) { imageFormatGroup = imageFormat; - cameraTextureImageEventHandler = - std::make_unique(*this, registrar); + capturePipeline = std::make_unique(*this, registrar); + if (camera->IsOpen()) { + camera->Close(); + } + camera->Open(); GenApi::INodeMap& nodemap = camera->GetNodeMap(); Pylon::CEnumParameter(nodemap, "DeviceLinkThroughputLimitMode") @@ -50,26 +53,20 @@ void Camera::initialize(CameraLinuxPlatformImageFormatGroup imageFormat) { Pylon::CFloatParameter(nodemap, "AcquisitionFrameRate").TrySetValue(60.0); Pylon::CFloatParameter(nodemap, "ResultingFrameRate").TrySetValue(60.0); setImageFormatGroup(imageFormat); - Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("Off"); Pylon::CIntegerParameter(nodemap, "Width").TrySetValue(width); Pylon::CIntegerParameter(nodemap, "Height").TrySetValue(height); Pylon::CIntegerParameter(nodemap, "OffsetX").TrySetValue(0); Pylon::CIntegerParameter(nodemap, "OffsetY").TrySetValue(0); - Pylon::CStringParameter(nodemap, "ExposureAuto").TrySetValue("Continuous"); + Pylon::CStringParameter(nodemap, "ExposureAuto").TrySetValue("Off"); Pylon::CBooleanParameter(nodemap, "ReverseY").TrySetValue(true); Pylon::CBooleanParameter(nodemap, "AutoFunctionROIUseBrightness") - .TrySetValue(true); + .TrySetValue(false); Pylon::CBooleanParameter(nodemap, "AutoFunctionROIUseWhiteBalance") - .TrySetValue(true); + .TrySetValue(false); Pylon::CEnumParameter(nodemap, "BslDefectPixelCorrectionMode") .TrySetValue("On"); - camera->RegisterImageEventHandler(cameraTextureImageEventHandler.get(), - Pylon::RegistrationMode_Append, - Pylon::Cleanup_None); - camera->StartGrabbing(Pylon::GrabStrategy_LatestImages, - Pylon::EGrabLoop::GrabLoop_ProvidedByInstantCamera); - + capturePipeline->StartGrabbing(); emitState(); } @@ -92,8 +89,8 @@ void Camera::setImageFormatGroup( } int64_t Camera::getTextureId() { - if (!cameraTextureImageEventHandler) return -1; - return cameraTextureImageEventHandler->get_texture_id(); + if (!capturePipeline) return -1; + return capturePipeline->get_texture_id(); } void Camera::takePicture(std::string filePath) { @@ -151,6 +148,154 @@ void Camera::emitTextureId(int64_t textureId) const { camera_linux_camera_event_api_initialized_callback, nullptr); } +// void Camera::startGrabbing() { +// GenApi::INodeMap& nodemap = camera->GetNodeMap(); +// Pylon::CEnumParameter(nodemap, "TriggerSelector").SetValue("FrameStart"); +// Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("On"); +// Pylon::CEnumParameter(nodemap, "TriggerSource").SetValue("Software"); + +// // Manual grab loop with exposure bracketing +// cameraTextureImageEventHandler->OnImageEventHandlerRegistered(*camera); + +// camera->StartGrabbing(Pylon::GrabStrategy_OneByOne, +// Pylon::EGrabLoop::GrabLoop_ProvidedByUser); + +// std::thread([this]() { +// double shortExposure = 1000.0; // µs - initial value +// // double longExposure = 128000.0; // µs +// // const double gain = 0.6; +// // const double targetBrightness = 120.0; // target average +// // brightness + +// // const double overblownTargetRatio = 0.01; // 3% +// // const double overblownThreshold = 240.0; + +// auto& nodemap = camera->GetNodeMap(); +// // const double minExposure = +// // Pylon::CFloatParameter(nodemap, "ExposureTime").GetMin(); +// // const double maxExposure = +// // Pylon::CFloatParameter(nodemap, "ExposureTime").GetMax(); + +// while (camera->IsGrabbing()) { +// // --- Short exposure --- +// Pylon::CFloatParameter(nodemap, "ExposureTime") +// .TrySetValue(shortExposure); +// camera->WaitForFrameTriggerReady(5000, +// Pylon::TimeoutHandling_ThrowException); +// camera->ExecuteSoftwareTrigger(); + +// Pylon::CGrabResultPtr shortResult; +// camera->RetrieveResult(5000, shortResult, +// Pylon::TimeoutHandling_ThrowException); + +// // if (shortResult && shortResult->GrabSucceeded()) { +// // cameraTextureImageEventHandler->OnImageGrabbed(*camera, +// // shortResult); + +// // // === Adjust short exposure for overblown % === +// // const int width = shortResult->GetWidth(); +// // const int height = shortResult->GetHeight(); +// // const uint8_t* buffer = +// // static_cast(shortResult->GetBuffer()); + +// // const int cx = width / 2; +// // const int cy = height / 2; +// // const int radius = std::min(width, height) / 4; + +// // size_t overblown = 0; +// // size_t total = 0; + +// // for (int y = 0; y < height; ++y) { +// // for (int x = 0; x < width; ++x) { +// // int dx = x - cx; +// // int dy = y - cy; +// // if (dx * dx + dy * dy <= radius * radius) { +// // int index = (y * width + x) * 3; +// // uint8_t r = buffer[index]; +// // uint8_t g = buffer[index + 1]; +// // uint8_t b = buffer[index + 2]; +// // double luminance = 0.299 * r + 0.587 * g + 0.114 * b; + +// // if (luminance >= overblownThreshold) { +// // overblown++; +// // } +// // total++; +// // } +// // } +// // } + +// // if (total > 0) { +// // double ratio = static_cast(overblown) / total; +// // double error = overblownTargetRatio - ratio; + +// // // Adjust short exposure proportionally +// // double proposed = +// // shortExposure * (1.0 + gain * error / +// // overblownTargetRatio); +// // shortExposure = +// // std::max(minExposure, std::min(maxExposure, proposed)); +// // } +// // } + +// // // --- Long exposure --- +// // Pylon::CFloatParameter(nodemap, +// // "ExposureTime").TrySetValue(longExposure); +// // camera->WaitForFrameTriggerReady(5000, +// // Pylon::TimeoutHandling_ThrowException); +// // camera->ExecuteSoftwareTrigger(); + +// // Pylon::CGrabResultPtr longResult; +// // camera->RetrieveResult(5000, longResult, +// // Pylon::TimeoutHandling_ThrowException); +// // if (longResult && longResult->GrabSucceeded()) { +// // cameraTextureImageEventHandler->OnImageGrabbed(*camera, +// // longResult); + +// // // === Adjust long exposure brightness as before === +// // const int width = longResult->GetWidth(); +// // const int height = longResult->GetHeight(); +// // const uint8_t* buffer = +// // static_cast(longResult->GetBuffer()); + +// // const int cx = width / 2; +// // const int cy = height / 2; +// // const int radius = std::min(width, height) / 4; + +// // uint64_t sum = 0; +// // size_t count = 0; + +// // for (int y = 0; y < height; ++y) { +// // for (int x = 0; x < width; ++x) { +// // int dx = x - cx; +// // int dy = y - cy; +// // if (dx * dx + dy * dy <= radius * radius) { +// // int index = (y * width + x) * 3; +// // uint8_t r = buffer[index]; +// // uint8_t g = buffer[index + 1]; +// // uint8_t b = buffer[index + 2]; +// // double luminance = 0.299 * r + 0.587 * g + 0.114 * b; + +// // if (luminance > 10 && luminance < 240) { +// // sum += luminance; +// // count++; +// // } +// // } +// // } +// // } + +// // if (count > 0) { +// // double avgBrightness = static_cast(sum) / count; +// // double error = targetBrightness - avgBrightness; +// // double proposed = +// // longExposure * (1.0 + gain * error / targetBrightness); +// // longExposure = std::max(minExposure, std::min(maxExposure, +// // proposed)); +// // } +// // } +// } +// }).detach(); +// } + Camera& Camera::setResolutionPreset( CameraLinuxPlatformResolutionPreset preset) { switch (preset) { diff --git a/packages/camera/camera_linux/linux/camera.h b/packages/camera/camera_linux/linux/camera.h index d2f9f9ffaadb..5bdf143e5cb5 100644 --- a/packages/camera/camera_linux/linux/camera.h +++ b/packages/camera/camera_linux/linux/camera.h @@ -5,6 +5,7 @@ #include #include "camera_video_recorder_image_event_handler.h" +#include "capture_pipeline.h" #include "flutter_linux/flutter_linux.h" #include "messages.g.h" @@ -21,8 +22,7 @@ class Camera { public: int64_t camera_id; std::unique_ptr camera; - std::unique_ptr - cameraTextureImageEventHandler; + std::unique_ptr capturePipeline; CameraLinuxCameraEventApi* cameraLinuxCameraEventApi; std::unique_ptr cameraVideoRecorderImageEventHandler; @@ -49,6 +49,13 @@ class Camera { void setExposureMode(CameraLinuxPlatformExposureMode mode); void setFocusMode(CameraLinuxPlatformFocusMode mode); + struct HDRFrame { + std::vector buffer; + int width = 0; + int height = 0; + int exposure = 0; // in microseconds + }; + // State public: CameraLinuxPlatformExposureMode exposure_mode; @@ -67,30 +74,22 @@ class Camera { FlPluginRegistrar* registrar; }; -#define CAMERA_CONFIG_LOCK(code) \ - do { \ - bool wasGrabbing = camera->IsGrabbing(); \ - if (!camera) { \ - std::cerr << "Camera is not initialized." << std::endl; \ - return; \ - } \ - if (wasGrabbing) { \ - camera->StopGrabbing(); \ - camera->DeregisterImageEventHandler( \ - cameraTextureImageEventHandler.get()); \ - cameraTextureImageEventHandler.reset(); \ - } \ - {code}; \ - if (wasGrabbing) { \ - cameraTextureImageEventHandler = \ - std::make_unique(*this, registrar); \ - camera->RegisterImageEventHandler(cameraTextureImageEventHandler.get(), \ - Pylon::RegistrationMode_Append, \ - Pylon::Cleanup_None); \ - camera->StartGrabbing( \ - Pylon::GrabStrategy_LatestImages, \ - Pylon::EGrabLoop::GrabLoop_ProvidedByInstantCamera); \ - } \ +#define CAMERA_CONFIG_LOCK(code) \ + do { \ + bool wasGrabbing = camera->IsGrabbing(); \ + if (!camera) { \ + std::cerr << "Camera is not initialized." << std::endl; \ + return; \ + } \ + if (wasGrabbing) { \ + capturePipeline->StopGrabbing(); \ + capturePipeline.reset(); \ + } \ + {code}; \ + if (wasGrabbing) { \ + capturePipeline = std::make_unique(*this, registrar); \ + capturePipeline->StartGrabbing(); \ + } \ } while (0) #endif // CAMERA_H_ diff --git a/packages/camera/camera_linux/linux/camera_host_plugin.h b/packages/camera/camera_linux/linux/camera_host_plugin.h index e059dc12284a..0f6294cd7ff2 100644 --- a/packages/camera/camera_linux/linux/camera_host_plugin.h +++ b/packages/camera/camera_linux/linux/camera_host_plugin.h @@ -4,7 +4,6 @@ #include -#include "camera_texture_image_event_handler.h" #include "flutter_linux/flutter_linux.h" #include "messages.g.h" diff --git a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp deleted file mode 100644 index 35dd5000a70e..000000000000 --- a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.cpp +++ /dev/null @@ -1,237 +0,0 @@ -#include "camera_texture_image_event_handler.h" - -#include - -#include - -G_DEFINE_TYPE(FlMyTextureGL, fl_my_texture_gl, fl_texture_gl_get_type()) - -static gboolean fl_my_texture_gl_populate(FlTextureGL* texture, - uint32_t* target, uint32_t* name, - uint32_t* width, uint32_t* height, - GError** error) { - FlMyTextureGL* f = (FlMyTextureGL*)texture; - *target = f->target; - *name = f->name; - *width = f->width; - *height = f->height; - return true; -} - -FlMyTextureGL* fl_my_texture_gl_new(uint32_t target, uint32_t name, - uint32_t width, uint32_t height) { - auto r = FL_MY_TEXTURE_GL(g_object_new(fl_my_texture_gl_get_type(), nullptr)); - r->target = target; - r->name = name; - r->width = width; - r->height = height; - return r; -} - -static void fl_my_texture_gl_class_init(FlMyTextureGLClass* klass) { - FL_TEXTURE_GL_CLASS(klass)->populate = fl_my_texture_gl_populate; -} - -static void fl_my_texture_gl_init(FlMyTextureGL* self) {} - -CameraTextureImageEventHandler::CameraTextureImageEventHandler( - const Camera& camera, FlPluginRegistrar* registrar) - : camera(camera), - m_registrar(registrar), - m_texture_registrar( - fl_plugin_registrar_get_texture_registrar(registrar)) {} - -CameraTextureImageEventHandler ::~CameraTextureImageEventHandler() { - if (m_texture) { - glDeleteTextures(1, &m_texture_name); - fl_texture_registrar_unregister_texture(m_texture_registrar, - FL_TEXTURE(m_texture)); - g_object_unref(m_texture); - } -} - -int64_t CameraTextureImageEventHandler::get_texture_id() { - if (!m_texture) { - std::cerr << "Texture is null" << std::endl; - return -1; - } - return fl_texture_get_id(FL_TEXTURE(m_texture)); -} - -GLuint compileShader(GLenum type, const char* src) { - GLuint shader = glCreateShader(type); - glShaderSource(shader, 1, &src, nullptr); - glCompileShader(shader); - GLint success; - glGetShaderiv(shader, GL_COMPILE_STATUS, &success); - if (!success) { - char log[512]; - glGetShaderInfoLog(shader, 512, nullptr, log); - std::cerr << "Shader compile error: " << log << std::endl; - } - return shader; -} - -GLuint createShaderProgram() { - const char* vertexSrc = R"( - #version 300 es - precision mediump float; - layout (location = 0) in vec2 position; - layout (location = 1) in vec2 texCoord; - out vec2 TexCoords; - void main() { - TexCoords = texCoord; - gl_Position = vec4(position, 0.0, 1.0); - } - )"; - - const char* fragmentSrc = R"( - #version 300 es - precision mediump float; - in vec2 TexCoords; - out vec4 FragColor; - uniform sampler2D monoTexture; - void main() { - float gray = texture(monoTexture, TexCoords).r; - FragColor = vec4(gray, gray, gray, 1.0); // convert mono to RGB - } - )"; - - GLuint vs = compileShader(GL_VERTEX_SHADER, vertexSrc); - GLuint fs = compileShader(GL_FRAGMENT_SHADER, fragmentSrc); - - GLuint program = glCreateProgram(); - glAttachShader(program, vs); - glAttachShader(program, fs); - glLinkProgram(program); - - GLint success; - glGetProgramiv(program, GL_LINK_STATUS, &success); - if (!success) { - char log[512]; - glGetProgramInfoLog(program, 512, nullptr, log); - std::cerr << "Shader program link error: " << log << std::endl; - } - - glDeleteShader(vs); - glDeleteShader(fs); - - return program; -} - -void CameraTextureImageEventHandler::OnImageEventHandlerRegistered( - Pylon::CInstantCamera& _) { - FlView* fl_view = FL_VIEW(fl_plugin_registrar_get_view(m_registrar)); - GdkWindow* window = gtk_widget_get_parent_window(GTK_WIDGET(fl_view)); - m_gl_context = gdk_window_create_gl_context(window, NULL); - gdk_gl_context_make_current(m_gl_context); - - const int width = camera.width; - const int height = camera.height; - - // 1. Create input texture (raw camera frame) - glGenTextures(1, &m_input_texture); - glBindTexture(GL_TEXTURE_2D, m_input_texture); - glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width, height, 0, GL_RED, - GL_UNSIGNED_BYTE, nullptr); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - - // 2. Create output texture (post-shader result) - glGenTextures(1, &m_output_texture); - glBindTexture(GL_TEXTURE_2D, m_output_texture); - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB, - GL_UNSIGNED_BYTE, nullptr); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - - // 3. Create framebuffer and attach output texture - glGenFramebuffers(1, &m_fbo); - glBindFramebuffer(GL_FRAMEBUFFER, m_fbo); - glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, - m_output_texture, 0); - - if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { - std::cerr << "Framebuffer not complete!" << std::endl; - } - - // 4. Create shader program - m_shader_program = createShaderProgram(); - - // 5. Create fullscreen quad VAO/VBO - float quadVertices[] = { - // pos // tex - -1.0f, -1.0f, 0.0f, 0.0f, 1.0f, -1.0f, 1.0f, 0.0f, - -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, - }; - - glGenVertexArrays(1, &m_vao); - glGenBuffers(1, &m_vbo); - glBindVertexArray(m_vao); - glBindBuffer(GL_ARRAY_BUFFER, m_vbo); - glBufferData(GL_ARRAY_BUFFER, sizeof(quadVertices), quadVertices, - GL_STATIC_DRAW); - glEnableVertexAttribArray(0); - glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (void*)0); - glEnableVertexAttribArray(1); - glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), - (void*)(2 * sizeof(float))); - - // 6. Wrap output texture for Flutter - m_texture = - fl_my_texture_gl_new(GL_TEXTURE_2D, m_output_texture, width, height); - fl_texture_registrar_register_texture(m_texture_registrar, - FL_TEXTURE(m_texture)); - fl_texture_registrar_mark_texture_frame_available(m_texture_registrar, - FL_TEXTURE(m_texture)); - camera.emitTextureId(get_texture_id()); -} - -void CameraTextureImageEventHandler::OnImageEventHandlerDeregistered( - Pylon::CInstantCamera& _) { - camera.emitTextureId(-1); -} - -void CameraTextureImageEventHandler::OnImageGrabbed( - Pylon::CInstantCamera& _, const Pylon::CGrabResultPtr& ptr) { - if (!m_texture || !ptr->GrabSucceeded()) { - std::cerr << "Error: Grab failed or texture not ready." << std::endl; - return; - } - - gdk_gl_context_make_current(m_gl_context); - - const int width = ptr->GetWidth(); - const int height = ptr->GetHeight(); - - if (ptr->GetPixelType() == Pylon::PixelType_Mono8) { - // Upload to input texture (single channel) - glBindTexture(GL_TEXTURE_2D, m_input_texture); - - glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RED, - GL_UNSIGNED_BYTE, ptr->GetBuffer()); - - // Use shader to render to output texture via FBO - glBindFramebuffer(GL_FRAMEBUFFER, m_fbo); - - glViewport(0, 0, width, height); - glUseProgram(m_shader_program); - - glBindVertexArray(m_vao); - glActiveTexture(GL_TEXTURE0); - glBindTexture(GL_TEXTURE_2D, m_input_texture); - glUniform1i(glGetUniformLocation(m_shader_program, "monoTexture"), 0); - glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); - glBindFramebuffer(GL_FRAMEBUFFER, 0); // unbind FBO - } else { - // RGB format: write directly to output texture - glBindTexture(GL_TEXTURE_2D, m_output_texture); - glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RGB, - GL_UNSIGNED_BYTE, ptr->GetBuffer()); - } - - glFlush(); - // Mark the output texture as new frame available for Flutter - fl_texture_registrar_mark_texture_frame_available(m_texture_registrar, - FL_TEXTURE(m_texture)); -} diff --git a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h b/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h deleted file mode 100644 index ed6040bb9c15..000000000000 --- a/packages/camera/camera_linux/linux/camera_texture_image_event_handler.h +++ /dev/null @@ -1,63 +0,0 @@ - -#ifndef CAMERA_TEXTURE_IMAGE_EVENT_HANDLER_H_ -#define CAMERA_TEXTURE_IMAGE_EVENT_HANDLER_H_ - -#include - -#include "camera.h" -#include "flutter_linux/flutter_linux.h" -#include "messages.g.h" - -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Woverloaded-virtual" -#pragma clang diagnostic ignored "-Wunused-variable" - -#include - -#pragma clang diagnostic pop - -G_DECLARE_FINAL_TYPE(FlMyTextureGL, fl_my_texture_gl, FL, MY_TEXTURE_GL, - FlTextureGL) - -struct _FlMyTextureGL { - FlTextureGL parent_instance; - uint32_t target; - uint32_t name; - uint32_t width; - uint32_t height; -}; - -FlMyTextureGL* fl_my_texture_gl_new(uint32_t target, uint32_t name, - uint32_t width, uint32_t height); - -class CameraTextureImageEventHandler : public Pylon::CImageEventHandler { - FlMyTextureGL* m_texture; - unsigned int m_texture_name; - const Camera& camera; - FlPluginRegistrar* m_registrar; - FlTextureRegistrar* m_texture_registrar; - GdkGLContext* m_gl_context; - - GLuint m_input_texture = 0; - GLuint m_output_texture = 0; - GLuint m_fbo = 0; - GLuint m_shader_program = 0; - GLuint m_vao = 0, m_vbo = 0; - - public: - CameraTextureImageEventHandler(const Camera& camera, - FlPluginRegistrar* registrar); - - ~CameraTextureImageEventHandler() override; - - int64_t get_texture_id(); - - void OnImageEventHandlerRegistered(Pylon::CInstantCamera& camera) override; - - void OnImageGrabbed(Pylon::CInstantCamera& camera, - const Pylon::CGrabResultPtr& ptr) override; - - void OnImageEventHandlerDeregistered(Pylon::CInstantCamera& camera) override; -}; - -#endif // CAMERA_TEXTURE_IMAGE_EVENT_HANDLER_H_ diff --git a/packages/camera/camera_linux/linux/capture_pipeline.cpp b/packages/camera/camera_linux/linux/capture_pipeline.cpp new file mode 100644 index 000000000000..23d09bcaea1c --- /dev/null +++ b/packages/camera/camera_linux/linux/capture_pipeline.cpp @@ -0,0 +1,579 @@ + +#include "capture_pipeline.h" + +#include + +#include + +#include "camera.h" + +CapturePipeline::CapturePipeline(const Camera& camera, + FlPluginRegistrar* registrar) + : camera{camera}, + m_fl_registrar(registrar), + m_fl_texture_registrar( + fl_plugin_registrar_get_texture_registrar(registrar)) {} + +CapturePipeline::~CapturePipeline() { + if (m_fl_texture) { + glDeleteTextures(1, &m_fl_texture_name); + fl_texture_registrar_unregister_texture(m_fl_texture_registrar, + FL_TEXTURE(m_fl_texture)); + g_object_unref(m_fl_texture); + } +} + +// GenApi::INodeMap& nodemap = camera->GetNodeMap(); +// Pylon::CEnumParameter(nodemap, "TriggerSelector").SetValue("FrameStart"); +// Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("On"); +// Pylon::CEnumParameter(nodemap, "TriggerSource").SetValue("Software"); + +// // Manual grab loop with exposure bracketing +// cameraTextureImageEventHandler->OnImageEventHandlerRegistered(*camera); + +// camera->StartGrabbing(Pylon::GrabStrategy_OneByOne, +// Pylon::EGrabLoop::GrabLoop_ProvidedByUser); + +// std::thread([this]() { +// double shortExposure = 1000.0; // µs - initial value +// // double longExposure = 128000.0; // µs +// // const double gain = 0.6; +// // const double targetBrightness = 120.0; // target average +// // brightness + +// // const double overblownTargetRatio = 0.01; // 3% +// // const double overblownThreshold = 240.0; + +// auto& nodemap = camera->GetNodeMap(); +// // const double minExposure = +// // Pylon::CFloatParameter(nodemap, "ExposureTime").GetMin(); +// // const double maxExposure = +// // Pylon::CFloatParameter(nodemap, "ExposureTime").GetMax(); + +// while (camera->IsGrabbing()) { +// // --- Short exposure --- +// Pylon::CFloatParameter(nodemap, "ExposureTime") +// .TrySetValue(shortExposure); +// camera->WaitForFrameTriggerReady(5000, +// Pylon::TimeoutHandling_ThrowException); +// camera->ExecuteSoftwareTrigger(); + +// Pylon::CGrabResultPtr shortResult; +// camera->RetrieveResult(5000, shortResult, +// Pylon::TimeoutHandling_ThrowException); + +// // if (shortResult && shortResult->GrabSucceeded()) { +// // cameraTextureImageEventHandler->OnImageGrabbed(*camera, +// // shortResult); + +// // // === Adjust short exposure for overblown % === +// // const int width = shortResult->GetWidth(); +// // const int height = shortResult->GetHeight(); +// // const uint8_t* buffer = +// // static_cast(shortResult->GetBuffer()); + +// // const int cx = width / 2; +// // const int cy = height / 2; +// // const int radius = std::min(width, height) / 4; + +// // size_t overblown = 0; +// // size_t total = 0; + +// // for (int y = 0; y < height; ++y) { +// // for (int x = 0; x < width; ++x) { +// // int dx = x - cx; +// // int dy = y - cy; +// // if (dx * dx + dy * dy <= radius * radius) { +// // int index = (y * width + x) * 3; +// // uint8_t r = buffer[index]; +// // uint8_t g = buffer[index + 1]; +// // uint8_t b = buffer[index + 2]; +// // double luminance = 0.299 * r + 0.587 * g + 0.114 * b; + +// // if (luminance >= overblownThreshold) { +// // overblown++; +// // } +// // total++; +// // } +// // } +// // } + +// // if (total > 0) { +// // double ratio = static_cast(overblown) / total; +// // double error = overblownTargetRatio - ratio; + +// // // Adjust short exposure proportionally +// // double proposed = +// // shortExposure * (1.0 + gain * error / +// // overblownTargetRatio); +// // shortExposure = +// // std::max(minExposure, std::min(maxExposure, proposed)); +// // } +// // } + +// // // --- Long exposure --- +// // Pylon::CFloatParameter(nodemap, +// // "ExposureTime").TrySetValue(longExposure); +// // camera->WaitForFrameTriggerReady(5000, +// // Pylon::TimeoutHandling_ThrowException); +// // camera->ExecuteSoftwareTrigger(); + +// // Pylon::CGrabResultPtr longResult; +// // camera->RetrieveResult(5000, longResult, +// // Pylon::TimeoutHandling_ThrowException); +// // if (longResult && longResult->GrabSucceeded()) { +// // cameraTextureImageEventHandler->OnImageGrabbed(*camera, +// // longResult); + +// // // === Adjust long exposure brightness as before === +// // const int width = longResult->GetWidth(); +// // const int height = longResult->GetHeight(); +// // const uint8_t* buffer = +// // static_cast(longResult->GetBuffer()); + +// // const int cx = width / 2; +// // const int cy = height / 2; +// // const int radius = std::min(width, height) / 4; + +// // uint64_t sum = 0; +// // size_t count = 0; + +// // for (int y = 0; y < height; ++y) { +// // for (int x = 0; x < width; ++x) { +// // int dx = x - cx; +// // int dy = y - cy; +// // if (dx * dx + dy * dy <= radius * radius) { +// // int index = (y * width + x) * 3; +// // uint8_t r = buffer[index]; +// // uint8_t g = buffer[index + 1]; +// // uint8_t b = buffer[index + 2]; +// // double luminance = 0.299 * r + 0.587 * g + 0.114 * b; + +// // if (luminance > 10 && luminance < 240) { +// // sum += luminance; +// // count++; +// // } +// // } +// // } +// // } + +// // if (count > 0) { +// // double avgBrightness = static_cast(sum) / count; +// // double error = targetBrightness - avgBrightness; +// // double proposed = +// // longExposure * (1.0 + gain * error / targetBrightness); +// // longExposure = std::max(minExposure, std::min(maxExposure, +// // proposed)); +// // } +// // } +// } +// }).detach(); +// } + +void CapturePipeline::StartGrabbing() { + if (!camera.camera) { + std::cerr << "Camera is not initialized." << std::endl; + return; + } + GenApi::INodeMap& nodemap = camera.camera->GetNodeMap(); + Pylon::CEnumParameter(nodemap, "TriggerSelector").SetValue("FrameStart"); + Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("On"); + Pylon::CEnumParameter(nodemap, "TriggerSource").SetValue("Software"); + + camera.camera->StartGrabbing(Pylon::GrabStrategy_OneByOne, + Pylon::EGrabLoop::GrabLoop_ProvidedByUser); + + std::cout << "Starting camera grabbing..." << std::endl; + + std::thread([this]() { + GLInit(); + notifyTextureReady(); + + std::vector exposureLevels = {2000.0, 16000.0}; + size_t exposureIndex = 0; + GenApi::INodeMap& nodemap = camera.camera->GetNodeMap(); + + while (camera.camera->IsGrabbing()) { + // Set new exposure + double exposure = exposureLevels[exposureIndex]; + exposureIndex = (exposureIndex + 1) % exposureLevels.size(); + Pylon::CFloatParameter(nodemap, "ExposureTime").TrySetValue(exposure); + std::cout << "Set exposure to: " << exposure << "us" << std::endl; + + camera.camera->WaitForFrameTriggerReady(5000, + Pylon::TimeoutHandling_Return); + camera.camera->ExecuteSoftwareTrigger(); + Pylon::CGrabResultPtr grabResult; + if (!camera.camera->RetrieveResult(5000, grabResult, + Pylon::TimeoutHandling_Return)) { + continue; + } + std::cout << "image grabbed" << std::endl; + + if (!grabResult->GrabSucceeded()) { + std::cerr << "Error grabbing image: " + << grabResult->GetErrorDescription() << std::endl; + continue; + } + OnImageGrabbed(grabResult); + std::cout << "finish processing frame" << std::endl; + } + }).detach(); +} + +void CapturePipeline::notifyTextureReady() { + // Pass 'this' pointer to main thread callback + g_idle_add( + [](void* data) -> gboolean { + CapturePipeline* self = static_cast(data); + std::cout << "Texture is ready" << std::endl; + self->camera.emitTextureId(self->get_texture_id()); + return G_SOURCE_REMOVE; // remove source after running once + }, + this); +} + +void CapturePipeline::GLInit() { + FlView* fl_view = FL_VIEW(fl_plugin_registrar_get_view(m_fl_registrar)); + GdkWindow* window = gtk_widget_get_parent_window(GTK_WIDGET(fl_view)); + m_gl_context = gdk_window_create_gl_context(window, NULL); + gdk_gl_context_make_current(m_gl_context); + std::cout << "[DEBUG] Created and made current GL context." << std::endl; + + const int width = camera.width; + const int height = camera.height; + std::cout << "[DEBUG] Camera resolution: " << width << "x" << height + << std::endl; + + // 1. Create PBO ring buffer + m_ring_buffer_index = 0; + glGenBuffers(RING_BUFFER_SIZE, m_pbo_ring_buffer); + for (size_t i = 0; i < RING_BUFFER_SIZE; ++i) { + glBindBuffer(GL_PIXEL_PACK_BUFFER, m_pbo_ring_buffer[i]); + glBufferData(GL_PIXEL_PACK_BUFFER, width * height * 3, nullptr, + GL_STREAM_READ); + std::cout << "[DEBUG] Created PBO buffer ID: " << m_pbo_ring_buffer[i] + << std::endl; + } + glBindBuffer(GL_PIXEL_PACK_BUFFER, 0); + + glGenTextures(RING_BUFFER_SIZE, m_exposure_textures); + for (int i = 0; i < RING_BUFFER_SIZE; ++i) { + glBindTexture(GL_TEXTURE_2D, m_exposure_textures[i]); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + } + glBindTexture(GL_TEXTURE_2D, 0); + + // 2. Create Motion Mask Texture + // glGenTextures(1, &m_motion_mask_texture); + // glBindTexture(GL_TEXTURE_2D, m_motion_mask_texture); + // glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, width, height, 0, GL_RED, + // GL_UNSIGNED_BYTE, nullptr); + // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + // std::cout << "[DEBUG] Created motion mask texture ID: " + // << m_motion_mask_texture << std::endl; + + // 3. Create HDR Fusion Shader Program + m_hdr_fusion_shader_program = createHDRShaderProgram(); + std::cout << "[DEBUG] Created HDR fusion shader program ID: " + << m_hdr_fusion_shader_program << std::endl; + + float quadVertices[] = { + // pos // tex + -1.0f, -1.0f, 0.0f, 0.0f, 1.0f, -1.0f, 1.0f, 0.0f, + -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, + }; + + glGenVertexArrays(1, &m_hdr_fusion_vao); + glGenFramebuffers(1, &m_hdr_fusion_fbo); + glBindVertexArray(m_hdr_fusion_vao); + glBindBuffer(GL_ARRAY_BUFFER, m_hdr_fusion_vbo); + glBufferData(GL_ARRAY_BUFFER, sizeof(quadVertices), quadVertices, + GL_STATIC_DRAW); + + glEnableVertexAttribArray(0); + glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (void*)0); + glEnableVertexAttribArray(1); + glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), + (void*)(2 * sizeof(float))); + + glBindVertexArray(0); + std::cout << "[DEBUG] Created HDR fusion VAO: " << m_hdr_fusion_vao + << ", VBO: " << m_hdr_fusion_vbo << std::endl; + + // 4. Create Tone Mapping Shader Program + // TODO: Add debug print here when implemented + + // 5. Create Mono Shader Program + // m_mono_shader_program = createMonoShaderProgram(); + // std::cout << "[DEBUG] Created Mono shader program ID: " + // << m_mono_shader_program << std::endl; + + // float quadVerticesMono[] = { + // // pos // tex + // -1.0f, -1.0f, 0.0f, 0.0f, 1.0f, -1.0f, 1.0f, 0.0f, + // -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, + // }; + + // glGenVertexArrays(1, &m_mono_vao); + // glGenBuffers(1, &m_mono_vbo); + // glBindVertexArray(m_mono_vao); + // glBindBuffer(GL_ARRAY_BUFFER, m_mono_vbo); + // glBufferData(GL_ARRAY_BUFFER, sizeof(quadVerticesMono), quadVerticesMono, + // GL_STATIC_DRAW); + + // glEnableVertexAttribArray(0); + // glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), + // (void*)0); glEnableVertexAttribArray(1); glVertexAttribPointer(1, 2, + // GL_FLOAT, GL_FALSE, 4 * sizeof(float), + // (void*)(2 * sizeof(float))); + + // glBindVertexArray(0); + // std::cout << "[DEBUG] Created Mono VAO: " << m_mono_vao + // << ", VBO: " << m_mono_vbo << std::endl; + + // 6. Create Output Texture + glGenTextures(1, &m_output_texture); + glBindTexture(GL_TEXTURE_2D, m_output_texture); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB, + GL_UNSIGNED_BYTE, nullptr); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + std::cout << "[DEBUG] Created output texture ID: " << m_output_texture + << std::endl; + + // 7. Wrap output texture for Flutter + m_fl_texture = + fl_lightx_texture_gl_new(GL_TEXTURE_2D, m_output_texture, width, height); + m_fl_texture_name = m_output_texture; + fl_texture_registrar_register_texture(m_fl_texture_registrar, + FL_TEXTURE(m_fl_texture)); + fl_texture_registrar_mark_texture_frame_available(m_fl_texture_registrar, + FL_TEXTURE(m_fl_texture)); + std::cout << "[DEBUG] Registered and marked Flutter texture frame available " + "for texture ID: " + << m_output_texture << std::endl; +} + +void CapturePipeline::StopGrabbing() {} +void CapturePipeline::OnImageGrabbed(const Pylon::CGrabResultPtr& grabResult) { + if (!grabResult || !grabResult->GrabSucceeded()) { + std::cerr << "[DEBUG] Error grabbing image: " + << (grabResult ? grabResult->GetErrorDescription() : "No result") + << std::endl; + return; + } + + const int width = grabResult->GetWidth(); + const int height = grabResult->GetHeight(); + const uint8_t* data = static_cast(grabResult->GetBuffer()); + if (!data) { + std::cerr << "[DEBUG] No image data available." << std::endl; + return; + } + + gdk_gl_context_make_current(m_gl_context); + + const int bufferIndex = m_ring_buffer_index; + const int nextIndex = (m_ring_buffer_index + 1) % RING_BUFFER_SIZE; + + GLuint pbo = m_pbo_ring_buffer[bufferIndex]; + GLuint texture = m_exposure_textures[bufferIndex]; + + glBindBuffer(GL_PIXEL_UNPACK_BUFFER, pbo); + glBufferData(GL_PIXEL_UNPACK_BUFFER, width * height * 3, nullptr, + GL_STREAM_DRAW); + + void* ptr = glMapBufferRange(GL_PIXEL_UNPACK_BUFFER, 0, width * height * 3, + GL_MAP_WRITE_BIT | GL_MAP_INVALIDATE_BUFFER_BIT); + if (ptr) { + std::memcpy(ptr, data, width * height * 3); + glUnmapBuffer(GL_PIXEL_UNPACK_BUFFER); + } else { + std::cerr << "[ERROR] Failed to map PBO" << std::endl; + } + + // Upload from PBO to texture (allocated only once elsewhere) + glBindTexture(GL_TEXTURE_2D, texture); + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RGB, + GL_UNSIGNED_BYTE, nullptr); + glBindTexture(GL_TEXTURE_2D, 0); + glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0); + + std::cout << "[DEBUG] Uploaded image data to texture index " << bufferIndex + << std::endl; + + m_ring_buffer_index = nextIndex; + + // --- HDR Shader Pass --- + glBindFramebuffer(GL_FRAMEBUFFER, m_hdr_fusion_fbo); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, + m_output_texture, 0); + + if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { + std::cerr << "[ERROR] Framebuffer not complete." << std::endl; + glBindFramebuffer(GL_FRAMEBUFFER, 0); + return; + } + + glViewport(0, 0, width, height); + glUseProgram(m_hdr_fusion_shader_program); + + const char* uniformNames[] = {"texLow", "texMidLow"}; + for (int i = 0; i < 2; ++i) { + glActiveTexture(GL_TEXTURE0 + i); + glBindTexture(GL_TEXTURE_2D, m_exposure_textures[i]); + GLint loc = + glGetUniformLocation(m_hdr_fusion_shader_program, uniformNames[i]); + glUniform1i(loc, i); + } + + glBindVertexArray(m_hdr_fusion_vao); + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + // Cleanup + glBindVertexArray(0); + for (int i = 0; i < 2; ++i) { + glActiveTexture(GL_TEXTURE0 + i); + glBindTexture(GL_TEXTURE_2D, 0); + } + glBindFramebuffer(GL_FRAMEBUFFER, 0); + glUseProgram(0); + + // Notify Flutter + glFlush(); + fl_texture_registrar_mark_texture_frame_available(m_fl_texture_registrar, + FL_TEXTURE(m_fl_texture)); +} + +int64_t CapturePipeline::get_texture_id() { + if (!m_fl_texture) { + std::cerr << "Texture is null" << std::endl; + return -1; + } + return fl_texture_get_id(FL_TEXTURE(m_fl_texture)); +} + +void CapturePipeline::OnNewFrame() {} + +GLuint CapturePipeline::compileShader(GLenum type, const char* src) { + GLuint shader = glCreateShader(type); + glShaderSource(shader, 1, &src, nullptr); + glCompileShader(shader); + GLint success; + glGetShaderiv(shader, GL_COMPILE_STATUS, &success); + if (!success) { + char log[512]; + glGetShaderInfoLog(shader, 512, nullptr, log); + std::cerr << "Shader compile error: " << log << std::endl; + } + return shader; +} + +GLuint CapturePipeline::createMonoShaderProgram() { + const char* vertexSrc = R"( + #version 300 es + precision mediump float; + layout (location = 0) in vec2 position; + layout (location = 1) in vec2 texCoord; + out vec2 TexCoords; + void main() { + TexCoords = texCoord; + gl_Position = vec4(position, 0.0, 1.0); + } + )"; + + const char* fragmentSrc = R"( + #version 300 es + precision mediump float; + in vec2 TexCoords; + out vec4 FragColor; + uniform sampler2D monoTexture; + void main() { + float gray = texture(monoTexture, TexCoords).r; + FragColor = vec4(gray, gray, gray, 1.0); // convert mono to RGB + } + )"; + + GLuint vs = compileShader(GL_VERTEX_SHADER, vertexSrc); + GLuint fs = compileShader(GL_FRAGMENT_SHADER, fragmentSrc); + + GLuint program = glCreateProgram(); + glAttachShader(program, vs); + glAttachShader(program, fs); + glLinkProgram(program); + + GLint success; + glGetProgramiv(program, GL_LINK_STATUS, &success); + if (!success) { + char log[512]; + glGetProgramInfoLog(program, 512, nullptr, log); + std::cerr << "Shader program link error: " << log << std::endl; + } + + glDeleteShader(vs); + glDeleteShader(fs); + + return program; +} + +GLuint CapturePipeline::createHDRShaderProgram() { + const char* vertexSrc = R"( + #version 300 es + precision mediump float; + layout (location = 0) in vec2 position; + layout (location = 1) in vec2 texCoord; + out vec2 TexCoords; + void main() { + TexCoords = texCoord; + gl_Position = vec4(position, 0.0, 1.0); + } + )"; + + const char* fragmentSrc = R"( + #version 300 es + precision mediump float; + in vec2 TexCoords; + out vec4 FragColor; + + uniform sampler2D texLow; + uniform sampler2D texMidLow; + + void main() { + vec3 colorLow = texture(texLow, TexCoords).rgb; + vec3 colorMidLow = texture(texMidLow, TexCoords).rgb; + + // Simple exposure fusion strategy: weighted average (weights can be adjusted) + float w1 = 0.2; + float w2 = 0.8; + + vec3 hdr = (colorLow * w1 + colorMidLow * w2) / (w1 + w2); + FragColor = vec4(hdr, 1.0); + } + )"; + + GLuint vs = compileShader(GL_VERTEX_SHADER, vertexSrc); + GLuint fs = compileShader(GL_FRAGMENT_SHADER, fragmentSrc); + + GLuint program = glCreateProgram(); + glAttachShader(program, vs); + glAttachShader(program, fs); + glLinkProgram(program); + + GLint success; + glGetProgramiv(program, GL_LINK_STATUS, &success); + if (!success) { + char log[512]; + glGetProgramInfoLog(program, 512, nullptr, log); + std::cerr << "Shader program link error: " << log << std::endl; + } + + glDeleteShader(vs); + glDeleteShader(fs); + + return program; +} diff --git a/packages/camera/camera_linux/linux/capture_pipeline.h b/packages/camera/camera_linux/linux/capture_pipeline.h new file mode 100644 index 000000000000..07fcef61d642 --- /dev/null +++ b/packages/camera/camera_linux/linux/capture_pipeline.h @@ -0,0 +1,82 @@ + +#ifndef CAPTURE_PIPELINE_H_ +#define CAPTURE_PIPELINE_H_ + +#include + +#include + +#include "fl_lightx_texture_gl.h" +#include "flutter_linux/flutter_linux.h" +#include "messages.g.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Woverloaded-virtual" +#pragma clang diagnostic ignored "-Wunused-variable" + +#include + +#pragma clang diagnostic pop + +#include +#include + +#define RING_BUFFER_SIZE 2 + +class Camera; + +class CapturePipeline { + public: + CapturePipeline(const Camera& camera, FlPluginRegistrar* registrar); + ~CapturePipeline(); + + void StartGrabbing(); + void StopGrabbing(); + + int64_t get_texture_id(); + + private: + const Camera& camera; + + // FL Texture + FlLightxTextureGL* m_fl_texture; + unsigned int m_fl_texture_name; + FlPluginRegistrar* m_fl_registrar; + FlTextureRegistrar* m_fl_texture_registrar; + GdkGLContext* m_gl_context; + + // OpenGL resources + GLuint m_pbo_ring_buffer[RING_BUFFER_SIZE]; + GLuint m_exposure_textures[RING_BUFFER_SIZE] = {0}; + size_t m_ring_buffer_index; + + // motion mask texture + // GLuint m_motion_mask_texture; + + // hdr fusion GPU shader pass + GLuint m_hdr_fusion_shader_program; + GLuint m_hdr_fusion_vao, m_hdr_fusion_vbo; + GLuint m_hdr_fusion_fbo; + + // tone mapping GPU shader pass + // GLuint m_tone_mapping_shader_program; + // GLuint m_tone_mapping_vao, m_tone_mapping_vbo; + // GLuint m_tone_mapping_fbo; + + // mono texture GPU shader pass + // GLuint m_mono_shader_program; + // GLuint m_mono_fbo; + + // output texture + GLuint m_output_texture; + + void OnImageGrabbed(const Pylon::CGrabResultPtr& grabResult); + void GLInit(); + void OnNewFrame(); + GLuint compileShader(GLenum type, const char* src); + GLuint createMonoShaderProgram(); + GLuint createHDRShaderProgram(); + void notifyTextureReady(); +}; + +#endif // CAPTURE_PIPELINE_H_ diff --git a/packages/camera/camera_linux/linux/dma_buffer_factory.cpp b/packages/camera/camera_linux/linux/dma_buffer_factory.cpp deleted file mode 100644 index e57be31014c2..000000000000 --- a/packages/camera/camera_linux/linux/dma_buffer_factory.cpp +++ /dev/null @@ -1,69 +0,0 @@ -#include "dma_buffer_factory.h" - -DMABufferFactory::DMABufferFactory() { - m_gbmDevice = gbm_create_device(open("/dev/dri/renderD128", O_RDWR)); - if (!m_gbmDevice) { - throw std::runtime_error("Failed to create GBM device"); - } -} - -DMABufferFactory ::~DMABufferFactory() { - for (auto& pair : buffers) { - FreeBuffer(pair.second.mappedAddress, pair.second.context); - } - if (m_gbmDevice) { - close(gbm_device_get_fd(m_gbmDevice)); - gbm_device_destroy(m_gbmDevice); - } -} - -void DMABufferFactory::AllocateBuffer(size_t bufferSize, void** pCreatedBuffer, - intptr_t& bufferContext) { - const int width = 3840; - const int height = 2160; - const int format = GBM_FORMAT_XRGB8888; - - gbm_bo* bo = gbm_bo_create(m_gbmDevice, width, height, format, - GBM_BO_USE_LINEAR | GBM_BO_USE_RENDERING); - - if (!bo) { - throw std::runtime_error("Failed to allocate GBM buffer"); - } - - void* map_data = nullptr; - uint32_t stride; - void* addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_WRITE, - &stride, &map_data); - if (!addr) { - gbm_bo_destroy(bo); - throw std::runtime_error("Failed to map GBM buffer"); - } - - intptr_t ctx = reinterpret_cast(map_data); - - BufferInfo info = {bo, addr, ctx}; - buffers[addr] = info; - - *pCreatedBuffer = addr; - bufferContext = ctx; -} - -void DMABufferFactory::FreeBuffer(void* pCreatedBuffer, - intptr_t bufferContext) { - auto it = buffers.find(pCreatedBuffer); - if (it != buffers.end()) { - gbm_bo_unmap(it->second.bo, (void*)bufferContext); - gbm_bo_destroy(it->second.bo); - buffers.erase(it); - } -} - -void DMABufferFactory::DestroyBufferFactory() { delete this; } - -gbm_bo* DMABufferFactory::get_bo(void* pCreatedBuffer) { - auto it = buffers.find(pCreatedBuffer); - if (it != buffers.end()) { - return it->second.bo; - } - return nullptr; -} diff --git a/packages/camera/camera_linux/linux/dma_buffer_factory.h b/packages/camera/camera_linux/linux/dma_buffer_factory.h deleted file mode 100644 index 7e5e5dbca52e..000000000000 --- a/packages/camera/camera_linux/linux/dma_buffer_factory.h +++ /dev/null @@ -1,55 +0,0 @@ - -#ifndef DMA_BUFFER_FACTORY_H_ -#define DMA_BUFFER_FACTORY_H_ - -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include - -#include "flutter_linux/flutter_linux.h" -#include "messages.g.h" - -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Woverloaded-virtual" -#pragma clang diagnostic ignored "-Wunused-variable" - -#include - -#pragma clang diagnostic pop - -class DMABufferFactory : public Pylon::IBufferFactory { - public: - struct BufferInfo { - gbm_bo* bo; - void* mappedAddress; - intptr_t context; - }; - - DMABufferFactory(); - - ~DMABufferFactory() override; - - void AllocateBuffer(size_t bufferSize, void** pCreatedBuffer, - intptr_t& bufferContext) override; - - void FreeBuffer(void* pCreatedBuffer, intptr_t bufferContext) override; - - void DestroyBufferFactory() override; - - gbm_bo* get_bo(void* pCreatedBuffer); - gbm_device* get_gbm_device() { return m_gbmDevice; } - - private: - gbm_device* m_gbmDevice = nullptr; - std::map buffers = {}; -}; - -#endif // DMA_BUFFER_FACTORY_H_ diff --git a/packages/camera/camera_linux/linux/fl_lightx_texture_gl.cpp b/packages/camera/camera_linux/linux/fl_lightx_texture_gl.cpp new file mode 100644 index 000000000000..5340eb7c4cd4 --- /dev/null +++ b/packages/camera/camera_linux/linux/fl_lightx_texture_gl.cpp @@ -0,0 +1,32 @@ +#include "fl_lightx_texture_gl.h" + +G_DEFINE_TYPE(FlLightxTextureGL, fl_lightx_texture_gl, fl_texture_gl_get_type()) + +static gboolean fl_lightx_texture_gl_populate(FlTextureGL* texture, + uint32_t* target, uint32_t* name, + uint32_t* width, uint32_t* height, + GError** error) { + FlLightxTextureGL* f = (FlLightxTextureGL*)texture; + *target = f->target; + *name = f->name; + *width = f->width; + *height = f->height; + return true; +} + +FlLightxTextureGL* fl_lightx_texture_gl_new(uint32_t target, uint32_t name, + uint32_t width, uint32_t height) { + auto r = FL_LIGHTX_TEXTURE_GL( + g_object_new(fl_lightx_texture_gl_get_type(), nullptr)); + r->target = target; + r->name = name; + r->width = width; + r->height = height; + return r; +} + +static void fl_lightx_texture_gl_class_init(FlLightxTextureGLClass* klass) { + FL_TEXTURE_GL_CLASS(klass)->populate = fl_lightx_texture_gl_populate; +} + +static void fl_lightx_texture_gl_init(FlLightxTextureGL* self) {} \ No newline at end of file diff --git a/packages/camera/camera_linux/linux/fl_lightx_texture_gl.h b/packages/camera/camera_linux/linux/fl_lightx_texture_gl.h new file mode 100644 index 000000000000..7585a2e7fcef --- /dev/null +++ b/packages/camera/camera_linux/linux/fl_lightx_texture_gl.h @@ -0,0 +1,22 @@ + +#ifndef FL_LIGHTX_TEXTURE_GL_H_ +#define FL_LIGHTX_TEXTURE_GL_H_ + +#include "flutter_linux/flutter_linux.h" +#include "messages.g.h" + +G_DECLARE_FINAL_TYPE(FlLightxTextureGL, fl_lightx_texture_gl, FL, + LIGHTX_TEXTURE_GL, FlTextureGL) + +struct _FlLightxTextureGL { + FlTextureGL parent_instance; + uint32_t target; + uint32_t name; + uint32_t width; + uint32_t height; +}; + +FlLightxTextureGL* fl_lightx_texture_gl_new(uint32_t target, uint32_t name, + uint32_t width, uint32_t height); + +#endif // FL_LIGHTX_TEXTURE_GL_H_ \ No newline at end of file From e3e4fb82816b109b1cbc316d6ecc7b7d0caff01f Mon Sep 17 00:00:00 2001 From: Nicolas Depelteau <71102008+Depdx@users.noreply.github.com> Date: Wed, 11 Mar 2026 15:45:45 -0400 Subject: [PATCH 18/21] Update iOS deployment target and enhance camera functionality (#1) --- .../ios/Flutter/AppFrameworkInfo.plist | 2 +- packages/camera/camera/example/ios/Podfile | 2 +- .../ios/Runner.xcodeproj/project.pbxproj | 24 +- packages/camera/camera/example/lib/main.dart | 48 +- packages/camera/camera/example/pubspec.yaml | 2 + .../camera/camera_avfoundation/CHANGELOG.md | 4 + .../camera_avfoundation/example/lib/main.dart | 4 +- .../Sources/camera_avfoundation/Camera.swift | 3 + .../camera_avfoundation/CameraPlugin.swift | 6 + .../camera_avfoundation/CaptureDevice.swift | 2 + .../camera_avfoundation/DefaultCamera.swift | 19 + .../camera_avfoundation/Messages.swift | 295 +++---- .../lib/src/avfoundation_camera.dart | 18 +- .../lib/src/messages.g.dart | 717 +++++++----------- .../camera_avfoundation/pigeons/messages.dart | 17 +- .../camera/camera_avfoundation/pubspec.yaml | 2 +- .../test/avfoundation_camera_test.dart | 8 + .../test/avfoundation_camera_test.mocks.dart | 10 + 18 files changed, 513 insertions(+), 670 deletions(-) diff --git a/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist b/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist index 7c5696400627..1dc6cf7652ba 100644 --- a/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist +++ b/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist @@ -21,6 +21,6 @@ CFBundleVersion 1.0 MinimumOSVersion - 12.0 + 13.0 diff --git a/packages/camera/camera/example/ios/Podfile b/packages/camera/camera/example/ios/Podfile index 01d4aa611bb9..17adeb14132e 100644 --- a/packages/camera/camera/example/ios/Podfile +++ b/packages/camera/camera/example/ios/Podfile @@ -1,5 +1,5 @@ # Uncomment this line to define a global platform for your project -# platform :ios, '12.0' +# platform :ios, '13.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' diff --git a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj index 92de69f31b7a..e9a714f09ec6 100644 --- a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj @@ -141,6 +141,7 @@ 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + 786AFE0EBD00D4651BA88F35 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -222,6 +223,23 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; + 786AFE0EBD00D4651BA88F35 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; alwaysOutOfDate = 1; @@ -336,7 +354,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; @@ -415,7 +433,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -466,7 +484,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; diff --git a/packages/camera/camera/example/lib/main.dart b/packages/camera/camera/example/lib/main.dart index 4094a551ab52..fc1e9e73ec14 100644 --- a/packages/camera/camera/example/lib/main.dart +++ b/packages/camera/camera/example/lib/main.dart @@ -6,6 +6,7 @@ import 'dart:async'; import 'dart:io'; import 'package:camera/camera.dart'; +import 'package:camera_avfoundation/camera_avfoundation.dart'; import 'package:camera_linux/camera_linux.dart'; import 'package:camera_platform_interface/camera_platform_interface.dart'; import 'package:flutter/foundation.dart'; @@ -66,6 +67,7 @@ class _CameraExampleHomeState extends State double _maxAvailableZoom = 1.0; double _currentScale = 1.0; double _baseScale = 1.0; + double _currentLensPosition = 0.0; // Counting pointers (number of user fingers on screen) int _pointers = 0; @@ -157,6 +159,7 @@ class _CameraExampleHomeState extends State ), _captureControlRowWidget(), _modeControlRowWidget(), + if (!kIsWeb && Platform.isIOS) _lensPositionWidget(), Row( children: [ ElevatedButton( @@ -168,7 +171,9 @@ class _CameraExampleHomeState extends State final CameraLinux nativeCamera = CameraPlatform.instance as CameraLinux; nativeCamera.setImageFormatGroup( - controller!.cameraId, PlatformImageFormatGroup.mono8); + controller!.cameraId, + PlatformImageFormatGroup.mono8, + ); }); }, child: Text('mono8'), @@ -182,7 +187,9 @@ class _CameraExampleHomeState extends State final CameraLinux nativeCamera = CameraPlatform.instance as CameraLinux; nativeCamera.setImageFormatGroup( - controller!.cameraId, PlatformImageFormatGroup.rgb8); + controller!.cameraId, + PlatformImageFormatGroup.rgb8, + ); }); }, child: Text('rgb8'), @@ -523,6 +530,43 @@ class _CameraExampleHomeState extends State ); } + /// Lens position slider, shown on iOS only. + Widget _lensPositionWidget() { + return ColoredBox( + color: Colors.grey.shade50, + child: Column( + children: [ + const Center(child: Text('Lens Position (lock focus first)')), + Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + const Text('0.0'), + Expanded( + child: Slider( + value: _currentLensPosition, + min: 0.0, + max: 1.0, + divisions: 100, + label: _currentLensPosition.toStringAsFixed(2), + onChanged: + controller != null && + controller!.value.focusMode == FocusMode.locked + ? (double value) { + setState(() => _currentLensPosition = value); + (CameraPlatform.instance as AVFoundationCamera) + .setLensPosition(value); + } + : null, + ), + ), + const Text('1.0'), + ], + ), + ], + ), + ); + } + /// Display the control bar with buttons to take pictures and record videos. Widget _captureControlRowWidget() { final CameraController? cameraController = controller; diff --git a/packages/camera/camera/example/pubspec.yaml b/packages/camera/camera/example/pubspec.yaml index 62fde6862a14..af6c4b52fa02 100644 --- a/packages/camera/camera/example/pubspec.yaml +++ b/packages/camera/camera/example/pubspec.yaml @@ -30,6 +30,8 @@ dev_dependencies: leak_tracker_flutter_testing: any dependency_overrides: + camera_avfoundation: + path: ../../camera_avfoundation camera_web: path: ../../camera_web camera_linux: diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index ca97b101df8b..9a532157ea2e 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.10.2 + +* Adds setLensPosition support for manually controlling lens focus position on iOS. + ## 0.10.1 * Fixes fatal crash on iPhone 17 when using `ResolutionPreset.max`. diff --git a/packages/camera/camera_avfoundation/example/lib/main.dart b/packages/camera/camera_avfoundation/example/lib/main.dart index 2f9bc238bd4c..cda6e362fc30 100644 --- a/packages/camera/camera_avfoundation/example/lib/main.dart +++ b/packages/camera/camera_avfoundation/example/lib/main.dart @@ -173,7 +173,7 @@ class _CameraExampleHomeState extends State with WidgetsBindi children: [ ElevatedButton( onPressed: () { - final AVFoundationCamera camera = CameraPlatform.instance as AVFoundationCamera; + final camera = CameraPlatform.instance as AVFoundationCamera; setState(() { _lenPosition = (_lenPosition + 0.1).clamp(0.0, 1.0); }); @@ -184,7 +184,7 @@ class _CameraExampleHomeState extends State with WidgetsBindi ), ElevatedButton( onPressed: () { - final AVFoundationCamera camera = CameraPlatform.instance as AVFoundationCamera; + final camera = CameraPlatform.instance as AVFoundationCamera; setState(() { _lenPosition = (_lenPosition - 0.1).clamp(0.0, 1.0); }); diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift index 117fd909d32e..db3af3a0a8fe 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift @@ -95,6 +95,9 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, completion: @escaping (Result) -> Void ) + /// Sets the lens position to the given value in the (0,1) range and locks focus. + func setLensPosition(_ position: Float, completion: @escaping (Result) -> Void) + func setZoomLevel(_ zoom: CGFloat, withCompletion: @escaping (Result) -> Void) func setVideoStabilizationMode( diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift index 43ef5f48916c..ef39b93f9a2d 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift @@ -477,6 +477,12 @@ extension CameraPlugin: CameraApi { } } + func setLensPosition(position: Double, completion: @escaping (Result) -> Void) { + captureSessionQueue.async { [weak self] in + self?.camera?.setLensPosition(Float(position), completion: completion) + } + } + func getMinZoomLevel(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in if let minZoom = self?.camera?.minimumAvailableZoomFactor { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift index b007cb39b4d1..a87c5b1a7ba8 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift @@ -37,6 +37,8 @@ protocol CaptureDevice: NSObjectProtocol { func isFocusModeSupported(_ mode: AVCaptureDevice.FocusMode) -> Bool var focusMode: AVCaptureDevice.FocusMode { get set } var focusPointOfInterest: CGPoint { get set } + var lensPosition: Float { get } + func setFocusModeLocked(lensPosition: Float, completionHandler handler: ((CMTime) -> Void)?) // Exposure var isExposurePointOfInterestSupported: Bool { get } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index 16d1637d23fa..d64a0a251660 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -928,6 +928,25 @@ final class DefaultCamera: NSObject, Camera { completion(.success(())) } + func setLensPosition( + _ position: Float, completion: @escaping (Result) -> Void + ) { + guard position >= 0, position <= 1 else { + completion( + .failure( + PigeonError( + code: "LENS_POSITION_ERROR", + message: + "Lens position out of bounds (should be between 0.0 and 1.0).", + details: nil))) + return + } + try? captureDevice.lockForConfiguration() + captureDevice.setFocusModeLocked(lensPosition: position, completionHandler: nil) + captureDevice.unlockForConfiguration() + completion(.success(())) + } + private func applyFocusMode() { applyFocusMode(focusMode, onDevice: captureDevice) } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift index 47e6abbbe750..75b65d60234c 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift @@ -1,7 +1,7 @@ // Copyright 2013 The Flutter Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Autogenerated from Pigeon (v26.1.5), do not edit directly. +// Autogenerated from Pigeon (v26.2.3), do not edit directly. // See also: https://pub.dev/packages/pigeon import Foundation @@ -59,9 +59,7 @@ private func wrapError(_ error: Any) -> [Any?] { } private func createConnectionError(withChannelName channelName: String) -> PigeonError { - return PigeonError( - code: "channel-error", message: "Unable to establish connection on channel: '\(channelName)'.", - details: "") + return PigeonError(code: "channel-error", message: "Unable to establish connection on channel: '\(channelName)'.", details: "") } private func isNullish(_ value: Any?) -> Bool { @@ -116,12 +114,12 @@ func deepEqualsMessages(_ lhs: Any?, _ rhs: Any?) -> Bool { func deepHashMessages(value: Any?, hasher: inout Hasher) { if let valueList = value as? [AnyHashable] { - for item in valueList { deepHashMessages(value: item, hasher: &hasher) } - return + for item in valueList { deepHashMessages(value: item, hasher: &hasher) } + return } if let valueDict = value as? [AnyHashable: AnyHashable] { - for key in valueDict.keys { + for key in valueDict.keys { hasher.combine(key) deepHashMessages(value: valueDict[key]!, hasher: &hasher) } @@ -135,6 +133,8 @@ func deepHashMessages(value: Any?, hasher: inout Hasher) { return hasher.combine(String(describing: value)) } + + enum PlatformCameraLensDirection: Int { /// Front facing camera (a user looking at the screen is seen by the camera). case front = 0 @@ -215,6 +215,7 @@ struct PlatformCameraDescription: Hashable { /// The type of the camera lens. var lensType: PlatformCameraLensType + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraDescription? { let name = pigeonVar_list[0] as! String @@ -235,8 +236,7 @@ struct PlatformCameraDescription: Hashable { ] } static func == (lhs: PlatformCameraDescription, rhs: PlatformCameraDescription) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -255,6 +255,7 @@ struct PlatformCameraState: Hashable { /// Whether setting focus points is supported. var focusPointSupported: Bool + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraState? { let previewSize = pigeonVar_list[0] as! PlatformSize @@ -281,8 +282,7 @@ struct PlatformCameraState: Hashable { ] } static func == (lhs: PlatformCameraState, rhs: PlatformCameraState) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -299,6 +299,7 @@ struct PlatformCameraImageData: Hashable { var sensorExposureTimeNanoseconds: Int64 var sensorSensitivity: Double + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraImageData? { let formatCode = pigeonVar_list[0] as! Int64 @@ -331,8 +332,7 @@ struct PlatformCameraImageData: Hashable { ] } static func == (lhs: PlatformCameraImageData, rhs: PlatformCameraImageData) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -345,6 +345,7 @@ struct PlatformCameraImagePlane: Hashable { var width: Int64 var height: Int64 + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraImagePlane? { let bytes = pigeonVar_list[0] as! FlutterStandardTypedData @@ -368,8 +369,7 @@ struct PlatformCameraImagePlane: Hashable { ] } static func == (lhs: PlatformCameraImagePlane, rhs: PlatformCameraImagePlane) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -383,6 +383,7 @@ struct PlatformMediaSettings: Hashable { var audioBitrate: Int64? = nil var enableAudio: Bool + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformMediaSettings? { let resolutionPreset = pigeonVar_list[0] as! PlatformResolutionPreset @@ -409,8 +410,7 @@ struct PlatformMediaSettings: Hashable { ] } static func == (lhs: PlatformMediaSettings, rhs: PlatformMediaSettings) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -421,6 +421,7 @@ struct PlatformPoint: Hashable { var x: Double var y: Double + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformPoint? { let x = pigeonVar_list[0] as! Double @@ -438,8 +439,7 @@ struct PlatformPoint: Hashable { ] } static func == (lhs: PlatformPoint, rhs: PlatformPoint) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -450,6 +450,7 @@ struct PlatformSize: Hashable { var width: Double var height: Double + // swift-format-ignore: AlwaysUseLowerCamelCase static func fromList(_ pigeonVar_list: [Any?]) -> PlatformSize? { let width = pigeonVar_list[0] as! Double @@ -467,8 +468,7 @@ struct PlatformSize: Hashable { ] } static func == (lhs: PlatformSize, rhs: PlatformSize) -> Bool { - return deepEqualsMessages(lhs.toList(), rhs.toList()) - } + return deepEqualsMessages(lhs.toList(), rhs.toList()) } func hash(into hasher: inout Hasher) { deepHashMessages(value: toList(), hasher: &hasher) } @@ -630,22 +630,17 @@ class MessagesPigeonCodec: FlutterStandardMessageCodec, @unchecked Sendable { static let shared = MessagesPigeonCodec(readerWriter: MessagesPigeonCodecReaderWriter()) } -var messagesPigeonMethodCodec = FlutterStandardMethodCodec( - readerWriter: MessagesPigeonCodecReaderWriter()) +var messagesPigeonMethodCodec = FlutterStandardMethodCodec(readerWriter: MessagesPigeonCodecReaderWriter()); + /// Generated protocol from Pigeon that represents a handler of messages from Flutter. protocol CameraApi { /// Returns the list of available cameras. - func getAvailableCameras( - completion: @escaping (Result<[PlatformCameraDescription], Error>) -> Void) + func getAvailableCameras(completion: @escaping (Result<[PlatformCameraDescription], Error>) -> Void) /// Create a new camera with the given settings, and returns its ID. - func create( - cameraName: String, settings: PlatformMediaSettings, - completion: @escaping (Result) -> Void) + func create(cameraName: String, settings: PlatformMediaSettings, completion: @escaping (Result) -> Void) /// Initializes the camera with the given ID. - func initialize( - cameraId: Int64, imageFormat: PlatformImageFormatGroup, - completion: @escaping (Result) -> Void) + func initialize(cameraId: Int64, imageFormat: PlatformImageFormatGroup, completion: @escaping (Result) -> Void) /// Begins streaming frames from the camera. func startImageStream(completion: @escaping (Result) -> Void) /// Stops streaming frames from the camera. @@ -659,8 +654,7 @@ protocol CameraApi { /// and any associated resources can be cleaned up. func dispose(cameraId: Int64, completion: @escaping (Result) -> Void) /// Locks the camera capture to the current device orientation. - func lockCaptureOrientation( - orientation: PlatformDeviceOrientation, completion: @escaping (Result) -> Void) + func lockCaptureOrientation(orientation: PlatformDeviceOrientation, completion: @escaping (Result) -> Void) /// Unlocks camera capture orientation, allowing it to automatically adapt to /// device orientation. func unlockCaptureOrientation(completion: @escaping (Result) -> Void) @@ -681,12 +675,16 @@ protocol CameraApi { /// Switches the camera to the given flash mode. func setFlashMode(mode: PlatformFlashMode, completion: @escaping (Result) -> Void) /// Switches the camera to the given exposure mode. - func setExposureMode( - mode: PlatformExposureMode, completion: @escaping (Result) -> Void) + func setExposureMode(mode: PlatformExposureMode, completion: @escaping (Result) -> Void) /// Anchors auto-exposure to the given point in (0,1) coordinate space. /// /// A null value resets to the default exposure point. func setExposurePoint(point: PlatformPoint?, completion: @escaping (Result) -> Void) + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + func setLensPosition(position: Double, completion: @escaping (Result) -> Void) /// Returns the minimum exposure offset supported by the camera. func getMinExposureOffset(completion: @escaping (Result) -> Void) /// Returns the maximum exposure offset supported by the camera. @@ -706,11 +704,9 @@ protocol CameraApi { /// Sets the zoom factor. func setZoomLevel(zoom: Double, completion: @escaping (Result) -> Void) /// Sets the video stabilization mode. - func setVideoStabilizationMode( - mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void) + func setVideoStabilizationMode(mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void) /// Gets if the given video stabilization mode is supported. - func isVideoStabilizationModeSupported( - mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void) + func isVideoStabilizationModeSupported(mode: PlatformVideoStabilizationMode, completion: @escaping (Result) -> Void) /// Pauses streaming of preview frames. func pausePreview(completion: @escaping (Result) -> Void) /// Resumes a previously paused preview stream. @@ -718,25 +714,19 @@ protocol CameraApi { /// Changes the camera used while recording video. /// /// This should only be called while video recording is active. - func updateDescriptionWhileRecording( - cameraName: String, completion: @escaping (Result) -> Void) + func updateDescriptionWhileRecording(cameraName: String, completion: @escaping (Result) -> Void) /// Sets the file format used for taking pictures. - func setImageFileFormat( - format: PlatformImageFileFormat, completion: @escaping (Result) -> Void) + func setImageFileFormat(format: PlatformImageFileFormat, completion: @escaping (Result) -> Void) } /// Generated setup class from Pigeon to handle messages through the `binaryMessenger`. class CameraApiSetup { static var codec: FlutterStandardMessageCodec { MessagesPigeonCodec.shared } /// Sets up an instance of `CameraApi` to handle messages through the `binaryMessenger`. - static func setUp( - binaryMessenger: FlutterBinaryMessenger, api: CameraApi?, messageChannelSuffix: String = "" - ) { + static func setUp(binaryMessenger: FlutterBinaryMessenger, api: CameraApi?, messageChannelSuffix: String = "") { let channelSuffix = messageChannelSuffix.count > 0 ? ".\(messageChannelSuffix)" : "" /// Returns the list of available cameras. - let getAvailableCamerasChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getAvailableCameras\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let getAvailableCamerasChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getAvailableCameras\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { getAvailableCamerasChannel.setMessageHandler { _, reply in api.getAvailableCameras { result in @@ -752,9 +742,7 @@ class CameraApiSetup { getAvailableCamerasChannel.setMessageHandler(nil) } /// Create a new camera with the given settings, and returns its ID. - let createChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.create\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let createChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.create\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { createChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -773,9 +761,7 @@ class CameraApiSetup { createChannel.setMessageHandler(nil) } /// Initializes the camera with the given ID. - let initializeChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.initialize\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let initializeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.initialize\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { initializeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -794,9 +780,7 @@ class CameraApiSetup { initializeChannel.setMessageHandler(nil) } /// Begins streaming frames from the camera. - let startImageStreamChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startImageStream\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let startImageStreamChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startImageStream\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { startImageStreamChannel.setMessageHandler { _, reply in api.startImageStream { result in @@ -812,9 +796,7 @@ class CameraApiSetup { startImageStreamChannel.setMessageHandler(nil) } /// Stops streaming frames from the camera. - let stopImageStreamChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopImageStream\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let stopImageStreamChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopImageStream\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { stopImageStreamChannel.setMessageHandler { _, reply in api.stopImageStream { result in @@ -833,10 +815,7 @@ class CameraApiSetup { /// frame sent. /// /// This is used to throttle sending frames across the channel. - let receivedImageStreamDataChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.receivedImageStreamData\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let receivedImageStreamDataChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.receivedImageStreamData\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { receivedImageStreamDataChannel.setMessageHandler { _, reply in api.receivedImageStreamData { result in @@ -853,9 +832,7 @@ class CameraApiSetup { } /// Indicates that the given camera is no longer being used on the Dart side, /// and any associated resources can be cleaned up. - let disposeChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.dispose\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let disposeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.dispose\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { disposeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -873,10 +850,7 @@ class CameraApiSetup { disposeChannel.setMessageHandler(nil) } /// Locks the camera capture to the current device orientation. - let lockCaptureOrientationChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.lockCaptureOrientation\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let lockCaptureOrientationChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.lockCaptureOrientation\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { lockCaptureOrientationChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -895,10 +869,7 @@ class CameraApiSetup { } /// Unlocks camera capture orientation, allowing it to automatically adapt to /// device orientation. - let unlockCaptureOrientationChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.unlockCaptureOrientation\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let unlockCaptureOrientationChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.unlockCaptureOrientation\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { unlockCaptureOrientationChannel.setMessageHandler { _, reply in api.unlockCaptureOrientation { result in @@ -915,9 +886,7 @@ class CameraApiSetup { } /// Takes a picture with the current settings, and returns the path to the /// resulting file. - let takePictureChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.takePicture\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let takePictureChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.takePicture\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { takePictureChannel.setMessageHandler { _, reply in api.takePicture { result in @@ -933,10 +902,7 @@ class CameraApiSetup { takePictureChannel.setMessageHandler(nil) } /// Does any preprocessing necessary before beginning to record video. - let prepareForVideoRecordingChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.prepareForVideoRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let prepareForVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.prepareForVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { prepareForVideoRecordingChannel.setMessageHandler { _, reply in api.prepareForVideoRecording { result in @@ -953,9 +919,7 @@ class CameraApiSetup { } /// Begins recording video, optionally enabling streaming to Dart at the same /// time. - let startVideoRecordingChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startVideoRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let startVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { startVideoRecordingChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -973,9 +937,7 @@ class CameraApiSetup { startVideoRecordingChannel.setMessageHandler(nil) } /// Stops recording video, and results the path to the resulting file. - let stopVideoRecordingChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopVideoRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let stopVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { stopVideoRecordingChannel.setMessageHandler { _, reply in api.stopVideoRecording { result in @@ -991,9 +953,7 @@ class CameraApiSetup { stopVideoRecordingChannel.setMessageHandler(nil) } /// Pauses video recording. - let pauseVideoRecordingChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pauseVideoRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let pauseVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pauseVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { pauseVideoRecordingChannel.setMessageHandler { _, reply in api.pauseVideoRecording { result in @@ -1009,9 +969,7 @@ class CameraApiSetup { pauseVideoRecordingChannel.setMessageHandler(nil) } /// Resumes a previously paused video recording. - let resumeVideoRecordingChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumeVideoRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let resumeVideoRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumeVideoRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { resumeVideoRecordingChannel.setMessageHandler { _, reply in api.resumeVideoRecording { result in @@ -1027,9 +985,7 @@ class CameraApiSetup { resumeVideoRecordingChannel.setMessageHandler(nil) } /// Switches the camera to the given flash mode. - let setFlashModeChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFlashMode\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setFlashModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFlashMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setFlashModeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1047,9 +1003,7 @@ class CameraApiSetup { setFlashModeChannel.setMessageHandler(nil) } /// Switches the camera to the given exposure mode. - let setExposureModeChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureMode\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setExposureModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setExposureModeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1069,9 +1023,7 @@ class CameraApiSetup { /// Anchors auto-exposure to the given point in (0,1) coordinate space. /// /// A null value resets to the default exposure point. - let setExposurePointChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposurePoint\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setExposurePointChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposurePoint\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setExposurePointChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1088,10 +1040,29 @@ class CameraApiSetup { } else { setExposurePointChannel.setMessageHandler(nil) } + /// Sets the lens position manually to the given value. + /// The value should be between 0 and 1. + /// 0 means the lens is at the minimum position. + /// 1 means the lens is at the maximum position. + let setLensPositionChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setLensPosition\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setLensPositionChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let positionArg = args[0] as! Double + api.setLensPosition(position: positionArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setLensPositionChannel.setMessageHandler(nil) + } /// Returns the minimum exposure offset supported by the camera. - let getMinExposureOffsetChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinExposureOffset\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let getMinExposureOffsetChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinExposureOffset\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { getMinExposureOffsetChannel.setMessageHandler { _, reply in api.getMinExposureOffset { result in @@ -1107,9 +1078,7 @@ class CameraApiSetup { getMinExposureOffsetChannel.setMessageHandler(nil) } /// Returns the maximum exposure offset supported by the camera. - let getMaxExposureOffsetChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxExposureOffset\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let getMaxExposureOffsetChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxExposureOffset\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { getMaxExposureOffsetChannel.setMessageHandler { _, reply in api.getMaxExposureOffset { result in @@ -1125,9 +1094,7 @@ class CameraApiSetup { getMaxExposureOffsetChannel.setMessageHandler(nil) } /// Sets the exposure offset manually to the given value. - let setExposureOffsetChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureOffset\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setExposureOffsetChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureOffset\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setExposureOffsetChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1145,9 +1112,7 @@ class CameraApiSetup { setExposureOffsetChannel.setMessageHandler(nil) } /// Switches the camera to the given focus mode. - let setFocusModeChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusMode\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setFocusModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setFocusModeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1167,9 +1132,7 @@ class CameraApiSetup { /// Anchors auto-focus to the given point in (0,1) coordinate space. /// /// A null value resets to the default focus point. - let setFocusPointChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusPoint\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setFocusPointChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusPoint\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setFocusPointChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1187,9 +1150,7 @@ class CameraApiSetup { setFocusPointChannel.setMessageHandler(nil) } /// Returns the minimum zoom level supported by the camera. - let getMinZoomLevelChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinZoomLevel\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let getMinZoomLevelChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinZoomLevel\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { getMinZoomLevelChannel.setMessageHandler { _, reply in api.getMinZoomLevel { result in @@ -1205,9 +1166,7 @@ class CameraApiSetup { getMinZoomLevelChannel.setMessageHandler(nil) } /// Returns the maximum zoom level supported by the camera. - let getMaxZoomLevelChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxZoomLevel\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let getMaxZoomLevelChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxZoomLevel\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { getMaxZoomLevelChannel.setMessageHandler { _, reply in api.getMaxZoomLevel { result in @@ -1223,9 +1182,7 @@ class CameraApiSetup { getMaxZoomLevelChannel.setMessageHandler(nil) } /// Sets the zoom factor. - let setZoomLevelChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setZoomLevel\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setZoomLevelChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setZoomLevel\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setZoomLevelChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1243,10 +1200,7 @@ class CameraApiSetup { setZoomLevelChannel.setMessageHandler(nil) } /// Sets the video stabilization mode. - let setVideoStabilizationModeChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.setVideoStabilizationMode\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setVideoStabilizationModeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setVideoStabilizationMode\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setVideoStabilizationModeChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1264,10 +1218,7 @@ class CameraApiSetup { setVideoStabilizationModeChannel.setMessageHandler(nil) } /// Gets if the given video stabilization mode is supported. - let isVideoStabilizationModeSupportedChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.isVideoStabilizationModeSupported\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let isVideoStabilizationModeSupportedChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.isVideoStabilizationModeSupported\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { isVideoStabilizationModeSupportedChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1285,9 +1236,7 @@ class CameraApiSetup { isVideoStabilizationModeSupportedChannel.setMessageHandler(nil) } /// Pauses streaming of preview frames. - let pausePreviewChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pausePreview\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let pausePreviewChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pausePreview\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { pausePreviewChannel.setMessageHandler { _, reply in api.pausePreview { result in @@ -1303,9 +1252,7 @@ class CameraApiSetup { pausePreviewChannel.setMessageHandler(nil) } /// Resumes a previously paused preview stream. - let resumePreviewChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumePreview\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let resumePreviewChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumePreview\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { resumePreviewChannel.setMessageHandler { _, reply in api.resumePreview { result in @@ -1323,10 +1270,7 @@ class CameraApiSetup { /// Changes the camera used while recording video. /// /// This should only be called while video recording is active. - let updateDescriptionWhileRecordingChannel = FlutterBasicMessageChannel( - name: - "dev.flutter.pigeon.camera_avfoundation.CameraApi.updateDescriptionWhileRecording\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let updateDescriptionWhileRecordingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.updateDescriptionWhileRecording\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { updateDescriptionWhileRecordingChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1344,9 +1288,7 @@ class CameraApiSetup { updateDescriptionWhileRecordingChannel.setMessageHandler(nil) } /// Sets the file format used for taking pictures. - let setImageFileFormatChannel = FlutterBasicMessageChannel( - name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setImageFileFormat\(channelSuffix)", - binaryMessenger: binaryMessenger, codec: codec) + let setImageFileFormatChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setImageFileFormat\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) if let api = api { setImageFileFormatChannel.setMessageHandler { message, reply in let args = message as! [Any?] @@ -1416,31 +1358,25 @@ class PigeonEventSink { } class ImageDataStreamStreamHandler: PigeonEventChannelWrapper { - static func register( - with messenger: FlutterBinaryMessenger, - instanceName: String = "", - streamHandler: ImageDataStreamStreamHandler - ) { - var channelName = - "dev.flutter.pigeon.camera_avfoundation.CameraImageStreamEventApi.imageDataStream" + static func register(with messenger: FlutterBinaryMessenger, + instanceName: String = "", + streamHandler: ImageDataStreamStreamHandler) { + var channelName = "dev.flutter.pigeon.camera_avfoundation.CameraImageStreamEventApi.imageDataStream" if !instanceName.isEmpty { channelName += ".\(instanceName)" } let internalStreamHandler = PigeonStreamHandler(wrapper: streamHandler) - let channel = FlutterEventChannel( - name: channelName, binaryMessenger: messenger, codec: messagesPigeonMethodCodec) + let channel = FlutterEventChannel(name: channelName, binaryMessenger: messenger, codec: messagesPigeonMethodCodec) channel.setStreamHandler(internalStreamHandler) } } - + /// Handler for native callbacks that are not tied to a specific camera ID. /// /// Generated protocol from Pigeon that represents Flutter messages that can be called from Swift. protocol CameraGlobalEventApiProtocol { /// Called when the device's physical orientation changes. - func deviceOrientationChanged( - orientation orientationArg: PlatformDeviceOrientation, - completion: @escaping (Result) -> Void) + func deviceOrientationChanged(orientation orientationArg: PlatformDeviceOrientation, completion: @escaping (Result) -> Void) } class CameraGlobalEventApi: CameraGlobalEventApiProtocol { private let binaryMessenger: FlutterBinaryMessenger @@ -1453,14 +1389,9 @@ class CameraGlobalEventApi: CameraGlobalEventApiProtocol { return MessagesPigeonCodec.shared } /// Called when the device's physical orientation changes. - func deviceOrientationChanged( - orientation orientationArg: PlatformDeviceOrientation, - completion: @escaping (Result) -> Void - ) { - let channelName: String = - "dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged\(messageChannelSuffix)" - let channel = FlutterBasicMessageChannel( - name: channelName, binaryMessenger: binaryMessenger, codec: codec) + func deviceOrientationChanged(orientation orientationArg: PlatformDeviceOrientation, completion: @escaping (Result) -> Void) { + let channelName: String = "dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged\(messageChannelSuffix)" + let channel = FlutterBasicMessageChannel(name: channelName, binaryMessenger: binaryMessenger, codec: codec) channel.sendMessage([orientationArg] as [Any?]) { response in guard let listResponse = response as? [Any?] else { completion(.failure(createConnectionError(withChannelName: channelName))) @@ -1484,9 +1415,7 @@ class CameraGlobalEventApi: CameraGlobalEventApiProtocol { /// Generated protocol from Pigeon that represents Flutter messages that can be called from Swift. protocol CameraEventApiProtocol { /// Called when the camera is inialitized for use. - func initialized( - initialState initialStateArg: PlatformCameraState, - completion: @escaping (Result) -> Void) + func initialized(initialState initialStateArg: PlatformCameraState, completion: @escaping (Result) -> Void) /// Called when an error occurs in the camera. /// /// This should be used for errors that occur outside of the context of @@ -1504,14 +1433,9 @@ class CameraEventApi: CameraEventApiProtocol { return MessagesPigeonCodec.shared } /// Called when the camera is inialitized for use. - func initialized( - initialState initialStateArg: PlatformCameraState, - completion: @escaping (Result) -> Void - ) { - let channelName: String = - "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized\(messageChannelSuffix)" - let channel = FlutterBasicMessageChannel( - name: channelName, binaryMessenger: binaryMessenger, codec: codec) + func initialized(initialState initialStateArg: PlatformCameraState, completion: @escaping (Result) -> Void) { + let channelName: String = "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized\(messageChannelSuffix)" + let channel = FlutterBasicMessageChannel(name: channelName, binaryMessenger: binaryMessenger, codec: codec) channel.sendMessage([initialStateArg] as [Any?]) { response in guard let listResponse = response as? [Any?] else { completion(.failure(createConnectionError(withChannelName: channelName))) @@ -1531,12 +1455,9 @@ class CameraEventApi: CameraEventApiProtocol { /// /// This should be used for errors that occur outside of the context of /// handling a specific HostApi call, such as during streaming. - func error(message messageArg: String, completion: @escaping (Result) -> Void) - { - let channelName: String = - "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error\(messageChannelSuffix)" - let channel = FlutterBasicMessageChannel( - name: channelName, binaryMessenger: binaryMessenger, codec: codec) + func error(message messageArg: String, completion: @escaping (Result) -> Void) { + let channelName: String = "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error\(messageChannelSuffix)" + let channel = FlutterBasicMessageChannel(name: channelName, binaryMessenger: binaryMessenger, codec: codec) channel.sendMessage([messageArg] as [Any?]) { response in guard let listResponse = response as? [Any?] else { completion(.failure(createConnectionError(withChannelName: channelName))) diff --git a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart index 97649e00517b..a8710502195c 100644 --- a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart +++ b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart @@ -1,4 +1,4 @@ - // Copyright 2013 The Flutter Authors +// Copyright 2013 The Flutter Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -37,7 +37,8 @@ class AVFoundationCamera extends CameraPlatform { /// This is only exposed for test purposes. It shouldn't be used by clients of /// the plugin as it may break or change at any time. @visibleForTesting - final StreamController cameraEventStreamController = StreamController.broadcast(); + final StreamController cameraEventStreamController = + StreamController.broadcast(); /// The handler for device-level messages that should be rebroadcast to /// clients as [DeviceEvent]s. @@ -50,7 +51,8 @@ class AVFoundationCamera extends CameraPlatform { /// The per-camera handlers for messages that should be rebroadcast to /// clients as [CameraEvent]s. @visibleForTesting - final Map hostCameraHandlers = {}; + final Map hostCameraHandlers = + {}; // The stream to receive frames from the native code. StreamSubscription? _platformImageStreamSubscription; @@ -65,7 +67,9 @@ class AVFoundationCamera extends CameraPlatform { @override Future> availableCameras() async { try { - return (await _hostApi.getAvailableCameras()).map(cameraDescriptionFromPlatform).toList(); + return (await _hostApi.getAvailableCameras()) + .map(cameraDescriptionFromPlatform) + .toList(); } on PlatformException catch (e) { throw CameraException(e.code, e.message); } @@ -168,7 +172,8 @@ class AVFoundationCamera extends CameraPlatform { @override Stream onDeviceOrientationChanged() { - return hostHandler.deviceEventStreamController.stream.whereType(); + return hostHandler.deviceEventStreamController.stream + .whereType(); } @override @@ -626,7 +631,8 @@ class HostDeviceMessageHandler implements CameraGlobalEventApi { /// /// It is a `broadcast` because multiple controllers will connect to /// different stream views of this Controller. - final StreamController deviceEventStreamController = StreamController.broadcast(); + final StreamController deviceEventStreamController = + StreamController.broadcast(); @override void deviceOrientationChanged(PlatformDeviceOrientation orientation) { diff --git a/packages/camera/camera_avfoundation/lib/src/messages.g.dart b/packages/camera/camera_avfoundation/lib/src/messages.g.dart index 4254c2f79273..b9c0c072f1d5 100644 --- a/packages/camera/camera_avfoundation/lib/src/messages.g.dart +++ b/packages/camera/camera_avfoundation/lib/src/messages.g.dart @@ -1,21 +1,40 @@ // Copyright 2013 The Flutter Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Autogenerated from Pigeon (v26.1.5), do not edit directly. +// Autogenerated from Pigeon (v26.2.3), do not edit directly. // See also: https://pub.dev/packages/pigeon -// ignore_for_file: public_member_api_docs, non_constant_identifier_names, avoid_as, unused_import, unnecessary_parenthesis, prefer_null_aware_operators, omit_local_variable_types, omit_obvious_local_variable_types, unused_shown_name, unnecessary_import, no_leading_underscores_for_local_identifiers +// ignore_for_file: unused_import, unused_shown_name +// ignore_for_file: type=lint import 'dart:async'; -import 'dart:typed_data' show Float64List, Int32List, Int64List, Uint8List; +import 'dart:typed_data' show Float64List, Int32List, Int64List; -import 'package:flutter/foundation.dart' show ReadBuffer, WriteBuffer; import 'package:flutter/services.dart'; +import 'package:meta/meta.dart' show immutable, protected, visibleForTesting; -PlatformException _createConnectionError(String channelName) { - return PlatformException( - code: 'channel-error', - message: 'Unable to establish connection on channel: "$channelName".', - ); +Object? _extractReplyValueOrThrow( + List? replyList, + String channelName, { + required bool isNullValid, +}) { + if (replyList == null) { + throw PlatformException( + code: 'channel-error', + message: 'Unable to establish connection on channel: "$channelName".', + ); + } else if (replyList.length > 1) { + throw PlatformException( + code: replyList[0]! as String, + message: replyList[1] as String?, + details: replyList[2], + ); + } else if (!isNullValid && (replyList.isNotEmpty && replyList[0] == null)) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } + return replyList.firstOrNull; } List wrapResponse({ @@ -53,8 +72,10 @@ bool _deepEquals(Object? a, Object? b) { enum PlatformCameraLensDirection { /// Front facing camera (a user looking at the screen is seen by the camera). front, + /// Back facing camera (a user looking at the screen is not seen by the camera). back, + /// External camera which may not be mounted to the device. external, } @@ -265,7 +286,7 @@ class PlatformCameraImageData { formatCode: result[0]! as int, width: result[1]! as int, height: result[2]! as int, - planes: (result[3] as List?)!.cast(), + planes: (result[3]! as List).cast(), lensAperture: result[4]! as double, sensorExposureTimeNanoseconds: result[5]! as int, sensorSensitivity: result[6]! as double, @@ -479,7 +500,6 @@ class PlatformSize { int get hashCode => Object.hashAll(_toList()); } - class _PigeonCodec extends StandardMessageCodec { const _PigeonCodec(); @override @@ -487,7 +507,7 @@ class _PigeonCodec extends StandardMessageCodec { if (value is int) { buffer.putUint8(4); buffer.putInt64(value); - } else if (value is PlatformCameraLensDirection) { + } else if (value is PlatformCameraLensDirection) { buffer.putUint8(129); writeValue(buffer, value.index); } else if (value is PlatformCameraLensType) { @@ -630,22 +650,14 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as List?)!.cast(); - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return (pigeonVar_replyValue! as List) + .cast(); } /// Create a new camera with the given settings, and returns its ID. @@ -661,22 +673,13 @@ class CameraApi { [cameraName, settings], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as int?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as int; } /// Initializes the camera with the given ID. @@ -695,17 +698,12 @@ class CameraApi { [cameraId, imageFormat], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Begins streaming frames from the camera. @@ -719,17 +717,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Stops streaming frames from the camera. @@ -743,17 +736,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Called by the Dart side of the plugin when it has received the last image @@ -770,17 +758,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Indicates that the given camera is no longer being used on the Dart side, @@ -797,17 +780,12 @@ class CameraApi { [cameraId], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Locks the camera capture to the current device orientation. @@ -825,17 +803,12 @@ class CameraApi { [orientation], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Unlocks camera capture orientation, allowing it to automatically adapt to @@ -850,17 +823,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Takes a picture with the current settings, and returns the path to the @@ -875,22 +843,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as String?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as String; } /// Does any preprocessing necessary before beginning to record video. @@ -904,17 +863,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Begins recording video, optionally enabling streaming to Dart at the same @@ -931,17 +885,12 @@ class CameraApi { [enableStream], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Stops recording video, and results the path to the resulting file. @@ -955,22 +904,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as String?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as String; } /// Pauses video recording. @@ -984,17 +924,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Resumes a previously paused video recording. @@ -1008,17 +943,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Switches the camera to the given flash mode. @@ -1034,17 +964,12 @@ class CameraApi { [mode], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Switches the camera to the given exposure mode. @@ -1060,17 +985,12 @@ class CameraApi { [mode], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Anchors auto-exposure to the given point in (0,1) coordinate space. @@ -1088,17 +1008,12 @@ class CameraApi { [point], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Sets the lens position manually to the given value. @@ -1106,25 +1021,23 @@ class CameraApi { /// 0 means the lens is at the minimum position. /// 1 means the lens is at the maximum position. Future setLensPosition(double position) async { - final String pigeonVar_channelName = 'dev.flutter.pigeon.camera_avfoundation.CameraApi.setLensPosition$pigeonVar_messageChannelSuffix'; - final BasicMessageChannel pigeonVar_channel = BasicMessageChannel( + final pigeonVar_channelName = + 'dev.flutter.pigeon.camera_avfoundation.CameraApi.setLensPosition$pigeonVar_messageChannelSuffix'; + final pigeonVar_channel = BasicMessageChannel( pigeonVar_channelName, pigeonChannelCodec, binaryMessenger: pigeonVar_binaryMessenger, ); - final List? pigeonVar_replyList = - await pigeonVar_channel.send([position]) as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + final Future pigeonVar_sendFuture = pigeonVar_channel.send( + [position], + ); + final pigeonVar_replyList = await pigeonVar_sendFuture as List?; + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Returns the minimum exposure offset supported by the camera. @@ -1138,22 +1051,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as double; } /// Returns the maximum exposure offset supported by the camera. @@ -1167,22 +1071,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as double; } /// Sets the exposure offset manually to the given value. @@ -1198,17 +1093,12 @@ class CameraApi { [offset], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Switches the camera to the given focus mode. @@ -1224,17 +1114,12 @@ class CameraApi { [mode], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Anchors auto-focus to the given point in (0,1) coordinate space. @@ -1252,17 +1137,12 @@ class CameraApi { [point], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Returns the minimum zoom level supported by the camera. @@ -1276,22 +1156,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as double; } /// Returns the maximum zoom level supported by the camera. @@ -1305,22 +1176,13 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as double?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as double; } /// Sets the zoom factor. @@ -1336,17 +1198,12 @@ class CameraApi { [zoom], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Sets the video stabilization mode. @@ -1364,17 +1221,12 @@ class CameraApi { [mode], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Gets if the given video stabilization mode is supported. @@ -1392,22 +1244,13 @@ class CameraApi { [mode], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else if (pigeonVar_replyList[0] == null) { - throw PlatformException( - code: 'null-error', - message: 'Host platform returned null value for non-null return value.', - ); - } else { - return (pigeonVar_replyList[0] as bool?)!; - } + + final Object? pigeonVar_replyValue = _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: false, + ); + return pigeonVar_replyValue! as bool; } /// Pauses streaming of preview frames. @@ -1421,17 +1264,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Resumes a previously paused preview stream. @@ -1445,17 +1283,12 @@ class CameraApi { ); final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Changes the camera used while recording video. @@ -1473,17 +1306,12 @@ class CameraApi { [cameraName], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } /// Sets the file format used for taking pictures. @@ -1499,17 +1327,12 @@ class CameraApi { [format], ); final pigeonVar_replyList = await pigeonVar_sendFuture as List?; - if (pigeonVar_replyList == null) { - throw _createConnectionError(pigeonVar_channelName); - } else if (pigeonVar_replyList.length > 1) { - throw PlatformException( - code: pigeonVar_replyList[0]! as String, - message: pigeonVar_replyList[1] as String?, - details: pigeonVar_replyList[2], - ); - } else { - return; - } + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); } } @@ -1551,19 +1374,11 @@ abstract class CameraGlobalEventApi { pigeonVar_channel.setMessageHandler(null); } else { pigeonVar_channel.setMessageHandler((Object? message) async { - assert( - message != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged was null.', - ); - final List args = (message as List?)!; - final PlatformDeviceOrientation? arg_orientation = - (args[0] as PlatformDeviceOrientation?); - assert( - arg_orientation != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged was null, expected non-null PlatformDeviceOrientation.', - ); + final List args = message! as List; + final PlatformDeviceOrientation arg_orientation = + args[0]! as PlatformDeviceOrientation; try { - api.deviceOrientationChanged(arg_orientation!); + api.deviceOrientationChanged(arg_orientation); return wrapResponse(empty: true); } on PlatformException catch (e) { return wrapResponse(error: e); @@ -1611,19 +1426,11 @@ abstract class CameraEventApi { pigeonVar_channel.setMessageHandler(null); } else { pigeonVar_channel.setMessageHandler((Object? message) async { - assert( - message != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized was null.', - ); - final List args = (message as List?)!; - final PlatformCameraState? arg_initialState = - (args[0] as PlatformCameraState?); - assert( - arg_initialState != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized was null, expected non-null PlatformCameraState.', - ); + final List args = message! as List; + final PlatformCameraState arg_initialState = + args[0]! as PlatformCameraState; try { - api.initialized(arg_initialState!); + api.initialized(arg_initialState); return wrapResponse(empty: true); } on PlatformException catch (e) { return wrapResponse(error: e); @@ -1645,18 +1452,10 @@ abstract class CameraEventApi { pigeonVar_channel.setMessageHandler(null); } else { pigeonVar_channel.setMessageHandler((Object? message) async { - assert( - message != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error was null.', - ); - final List args = (message as List?)!; - final String? arg_message = (args[0] as String?); - assert( - arg_message != null, - 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error was null, expected non-null String.', - ); + final List args = message! as List; + final String arg_message = args[0]! as String; try { - api.error(arg_message!); + api.error(arg_message); return wrapResponse(empty: true); } on PlatformException catch (e) { return wrapResponse(error: e); diff --git a/packages/camera/camera_avfoundation/pigeons/messages.dart b/packages/camera/camera_avfoundation/pigeons/messages.dart index bd0a81f9a812..fe659f4ef827 100644 --- a/packages/camera/camera_avfoundation/pigeons/messages.dart +++ b/packages/camera/camera_avfoundation/pigeons/messages.dart @@ -4,17 +4,18 @@ import 'package:pigeon/pigeon.dart'; -@ConfigurePigeon(PigeonOptions( - dartOut: 'lib/src/messages.g.dart', - swiftOut: +@ConfigurePigeon( + PigeonOptions( + dartOut: 'lib/src/messages.g.dart', + swiftOut: 'ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift', - objcOptions: ObjcOptions( - prefix: 'FCP', - headerIncludePath: './include/camera_avfoundation/messages.g.h', + objcOptions: ObjcOptions( + prefix: 'FCP', + headerIncludePath: './include/camera_avfoundation/messages.g.h', + ), + copyrightHeader: 'pigeons/copyright.txt', ), - copyrightHeader: 'pigeons/copyright.txt', ) - // Pigeon version of CameraLensDirection. enum PlatformCameraLensDirection { /// Front facing camera (a user looking at the screen is seen by the camera). diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index 8a10d0d21f72..ed5f3064bbe2 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.10.1 +version: 0.10.2 environment: sdk: ^3.9.0 diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart index 53d7965c6bac..6412830d0ca1 100644 --- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart +++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart @@ -678,6 +678,14 @@ void main() { expect(minZoomLevel, stubZoomLevel); }); + test('Should set the lens position', () async { + const position = 0.5; + + await camera.setLensPosition(position); + + verify(mockApi.setLensPosition(position)); + }); + test('Should set the zoom level', () async { const zoom = 2.0; diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart index 225eac9931c6..2bb30ac726c6 100644 --- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart +++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart @@ -22,6 +22,7 @@ import 'package:mockito/src/dummies.dart' as _i3; // ignore_for_file: unnecessary_parenthesis // ignore_for_file: camel_case_types // ignore_for_file: subtype_of_sealed_class +// ignore_for_file: invalid_use_of_internal_member /// A class which mocks [CameraApi]. /// @@ -231,6 +232,15 @@ class MockCameraApi extends _i1.Mock implements _i2.CameraApi { ) as _i4.Future); + @override + _i4.Future setLensPosition(double? position) => + (super.noSuchMethod( + Invocation.method(#setLensPosition, [position]), + returnValue: _i4.Future.value(), + returnValueForMissingStub: _i4.Future.value(), + ) + as _i4.Future); + @override _i4.Future getMinExposureOffset() => (super.noSuchMethod( From bee51769ade991183e11b1b9fb5dce9005bf9a4a Mon Sep 17 00:00:00 2001 From: nicolas depelteau Date: Wed, 11 Mar 2026 16:50:38 -0400 Subject: [PATCH 19/21] Add camera transformation support: implement geometric transforms for outputs --- .../camera_avfoundation/AssetWriter.swift | 3 + .../Sources/camera_avfoundation/Camera.swift | 3 + .../camera_avfoundation/CameraPlugin.swift | 9 + .../CaptureConnection.swift | 8 + .../camera_avfoundation/DefaultCamera.swift | 109 +++++++++++- .../camera_avfoundation/Messages.swift | 134 +++++++++++++++ .../SavePhotoDelegate.swift | 53 +++++- .../lib/camera_avfoundation.dart | 1 + .../lib/src/avfoundation_camera.dart | 28 +++ .../lib/src/camera_transform.dart | 76 +++++++++ .../lib/src/messages.g.dart | 161 ++++++++++++++++++ .../camera_avfoundation/pigeons/messages.dart | 60 +++++++ .../test/avfoundation_camera_test.dart | 67 ++++++++ .../test/avfoundation_camera_test.mocks.dart | 9 + 14 files changed, 715 insertions(+), 6 deletions(-) create mode 100644 packages/camera/camera_avfoundation/lib/src/camera_transform.dart diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift index d86662a9b8b3..c57ee1dfa569 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/AssetWriter.swift @@ -27,6 +27,9 @@ protocol AssetWriterInput: NSObjectProtocol { var expectsMediaDataInRealTime: Bool { get set } var isReadyForMoreMediaData: Bool { get } + /// The transform to apply to the visual media data before writing it. + var transform: CGAffineTransform { get set } + func append(_ sampleBuffer: CMSampleBuffer) -> Bool } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift index db3af3a0a8fe..0dbd949aa786 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift @@ -114,6 +114,9 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, func pausePreview() func resumePreview() + /// Applies a geometric transform (rotation, mirroring, optional crop) to all camera outputs. + func setTransform(_ transform: PlatformCameraTransform) + func setDescriptionWhileRecording( _ cameraName: String, withCompletion: @escaping (Result) -> Void diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift index ef39b93f9a2d..ff8d63609ba9 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift @@ -561,4 +561,13 @@ extension CameraPlugin: CameraApi { completion(.success(())) } } + + func setTransform( + transform: PlatformCameraTransform, completion: @escaping (Result) -> Void + ) { + captureSessionQueue.async { [weak self] in + self?.camera?.setTransform(transform) + completion(.success(())) + } + } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift index d119afb9474a..74b297a75192 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureConnection.swift @@ -25,6 +25,14 @@ protocol CaptureConnection: NSObjectProtocol { /// Corresponds to the preferredVideoStabilizationMode property of `AVCaptureConnection` var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode { get set } + /// Corresponds to the `videoRotationAngle` property of `AVCaptureConnection` (iOS 17+). + @available(iOS 17.0, *) + var videoRotationAngle: CGFloat { get set } + + /// Corresponds to the `isVideoRotationAngleSupported(_:)` method of `AVCaptureConnection` (iOS 17+). + @available(iOS 17.0, *) + func isVideoRotationAngleSupported(_ angle: CGFloat) -> Bool + } extension AVCaptureConnection: CaptureConnection {} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index d64a0a251660..a4f8d3345068 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -126,6 +126,12 @@ final class DefaultCamera: NSObject, Camera { private var focusMode = PlatformFocusMode.auto private var flashMode: PlatformFlashMode + /// The current camera transform applied to all outputs. + private var cameraTransform: PlatformCameraTransform? + + /// Metal-backed Core Image context, lazily initialised. Used only when a crop is active. + private lazy var ciContext = CIContext(options: [.useSoftwareRenderer: false]) + private static func pigeonErrorFromNSError(_ error: NSError) -> PigeonError { return PigeonError( code: "Error \(error.code)", @@ -752,7 +758,9 @@ final class DefaultCamera: NSObject, Camera { assert(path != nil, "Path must not be nil if no error.") completion(.success(path!)) } - } + }, + cropRect: cameraTransform?.cropRect, + ciContext: cameraTransform?.cropRect != nil ? ciContext : nil ) assert( @@ -796,6 +804,8 @@ final class DefaultCamera: NSObject, Camera { updateOrientation(orientation, forCaptureOutput: capturePhotoOutput) updateOrientation(orientation, forCaptureOutput: captureVideoOutput) + + applyConnectionTransform() } private func updateOrientation( @@ -808,6 +818,79 @@ final class DefaultCamera: NSObject, Camera { } } + // MARK: - Transform + + func setTransform(_ transform: PlatformCameraTransform) { + cameraTransform = transform + applyConnectionTransform() + } + + /// Applies the current rotation/mirror transform at the hardware connection level. + /// + /// `AVCaptureConnection.videoRotationAngle` (iOS 17+) instructs the camera ISP to rotate + /// the pixel data in hardware – zero CPU/GPU cost – and the effect propagates to the + /// preview texture, image stream, video recording, and photo capture simultaneously. + private func applyConnectionTransform() { + guard let transform = cameraTransform else { return } + + for output in [captureVideoOutput as CaptureOutput, capturePhotoOutput as CaptureOutput] { + guard let connection = output.connection(with: .video) else { continue } + + if #available(iOS 17.0, *) { + let angle = transform.rotationDegrees + if connection.isVideoRotationAngleSupported(angle) { + connection.videoRotationAngle = angle + } + } + + // Vertical flip is implemented as a composition: mirror horizontally + rotate 180°. + let mirrorH = transform.flipHorizontally != transform.flipVertically + if connection.isVideoMirroringSupported { + connection.isVideoMirrored = mirrorH + } + } + } + + /// Crops `pixelBuffer` to the normalised rect from `transform.cropRect`. + /// + /// The crop is performed on the GPU via Metal-backed Core Image (`ciContext`). + /// Returns `nil` when allocation fails; callers should fall back to the original buffer. + private func applyCrop( + _ pixelBuffer: CVPixelBuffer, cropRect: PlatformRect + ) -> CVPixelBuffer? { + let fullWidth = CVPixelBufferGetWidth(pixelBuffer) + let fullHeight = CVPixelBufferGetHeight(pixelBuffer) + + let cropX = cropRect.x * Double(fullWidth) + let cropY = cropRect.y * Double(fullHeight) + let cropW = cropRect.width * Double(fullWidth) + let cropH = cropRect.height * Double(fullHeight) + + // Core Image origin is bottom-left; convert from top-left. + let ciCropRect = CGRect( + x: cropX, + y: Double(fullHeight) - cropY - cropH, + width: cropW, + height: cropH) + + let ciImage = CIImage(cvPixelBuffer: pixelBuffer).cropped(to: ciCropRect) + .transformed(by: CGAffineTransform(translationX: -ciCropRect.origin.x, y: -ciCropRect.origin.y)) + + var outBuffer: CVPixelBuffer? + let attrs: [String: Any] = [ + kCVPixelBufferPixelFormatTypeKey as String: videoFormat, + kCVPixelBufferWidthKey as String: Int(cropW), + kCVPixelBufferHeightKey as String: Int(cropH), + kCVPixelBufferIOSurfacePropertiesKey as String: [:], + ] + guard CVPixelBufferCreate(kCFAllocatorDefault, Int(cropW), Int(cropH), videoFormat, attrs as CFDictionary, &outBuffer) == kCVReturnSuccess, + let out = outBuffer + else { return nil } + + ciContext.render(ciImage, to: out) + return out + } + private func videoOrientation(forDeviceOrientation deviceOrientation: UIDeviceOrientation) -> AVCaptureVideoOrientation { @@ -1179,6 +1262,9 @@ final class DefaultCamera: NSObject, Camera { newConnection.videoOrientation = oldConnection.videoOrientation } + // Re-apply any camera transform that was set by the caller. + applyConnectionTransform() + // Add the new connections to the session. if !videoCaptureSession.canAddInput(captureVideoInput) { completion( @@ -1269,9 +1355,17 @@ final class DefaultCamera: NSObject, Camera { ) { if output == captureVideoOutput.avOutput { if let newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { + // Apply crop transform if one is active (GPU path via Core Image / Metal). + // When no crop is set this is a zero-overhead fast path. + let processedBuffer: CVPixelBuffer + if let cropRect = cameraTransform?.cropRect { + processedBuffer = applyCrop(newBuffer, cropRect: cropRect) ?? newBuffer + } else { + processedBuffer = newBuffer + } pixelBufferSynchronizationQueue.sync { - latestPixelBuffer = newBuffer + latestPixelBuffer = processedBuffer } onFrameAvailable?() @@ -1342,10 +1436,17 @@ final class DefaultCamera: NSObject, Camera { } if output == captureVideoOutput.avOutput { - let nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) + let rawBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! let nextSampleTime = CMTimeSubtract(sampleTime, recordingTimeOffset) if nextSampleTime > lastAppendedVideoSampleTime { - let _ = videoAdaptor?.append(nextBuffer!, withPresentationTime: nextSampleTime) + // Apply crop transform to the recorded frame if needed. + let writeBuffer: CVPixelBuffer + if let cropRect = cameraTransform?.cropRect { + writeBuffer = applyCrop(rawBuffer, cropRect: cropRect) ?? rawBuffer + } else { + writeBuffer = rawBuffer + } + let _ = videoAdaptor?.append(writeBuffer, withPresentationTime: nextSampleTime) lastAppendedVideoSampleTime = nextSampleTime } } else { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift index 75b65d60234c..f2b909166992 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift @@ -474,6 +474,98 @@ struct PlatformSize: Hashable { } } +/// Generated class from Pigeon that represents data sent in messages. +struct PlatformRect: Hashable { + var x: Double + var y: Double + var width: Double + var height: Double + + + // swift-format-ignore: AlwaysUseLowerCamelCase + static func fromList(_ pigeonVar_list: [Any?]) -> PlatformRect? { + let x = pigeonVar_list[0] as! Double + let y = pigeonVar_list[1] as! Double + let width = pigeonVar_list[2] as! Double + let height = pigeonVar_list[3] as! Double + + return PlatformRect( + x: x, + y: y, + width: width, + height: height + ) + } + func toList() -> [Any?] { + return [ + x, + y, + width, + height, + ] + } + static func == (lhs: PlatformRect, rhs: PlatformRect) -> Bool { + return deepEqualsMessages(lhs.toList(), rhs.toList()) } + func hash(into hasher: inout Hasher) { + deepHashMessages(value: toList(), hasher: &hasher) + } +} + +/// Pigeon version of a geometric camera transform. +/// +/// Rotation and mirroring are applied at the hardware connection level +/// (AVCaptureConnection.videoRotationAngle / isVideoMirrored), which means no +/// CPU/GPU cost and the effect is visible in the preview, image stream, photos, +/// and recorded video simultaneously. +/// +/// Crop is applied per-frame via Core Image on the GPU (Metal) and has a small +/// (~1–3 ms) cost per frame. +/// +/// Generated class from Pigeon that represents data sent in messages. +struct PlatformCameraTransform: Hashable { + /// Clockwise rotation in degrees. Must be 0, 90, 180, or 270. + var rotationDegrees: Double + /// Whether to flip the image along the horizontal axis (left–right mirror). + var flipHorizontally: Bool + /// Whether to flip the image along the vertical axis (upside-down mirror). + /// + /// Implemented as a 180° rotation composed with a horizontal flip. + var flipVertically: Bool + /// Optional crop rectangle in normalized (0,1) coordinate space. + /// + /// Applied after rotation/mirroring. Null means no crop. + var cropRect: PlatformRect? = nil + + + // swift-format-ignore: AlwaysUseLowerCamelCase + static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraTransform? { + let rotationDegrees = pigeonVar_list[0] as! Double + let flipHorizontally = pigeonVar_list[1] as! Bool + let flipVertically = pigeonVar_list[2] as! Bool + let cropRect: PlatformRect? = nilOrValue(pigeonVar_list[3]) + + return PlatformCameraTransform( + rotationDegrees: rotationDegrees, + flipHorizontally: flipHorizontally, + flipVertically: flipVertically, + cropRect: cropRect + ) + } + func toList() -> [Any?] { + return [ + rotationDegrees, + flipHorizontally, + flipVertically, + cropRect, + ] + } + static func == (lhs: PlatformCameraTransform, rhs: PlatformCameraTransform) -> Bool { + return deepEqualsMessages(lhs.toList(), rhs.toList()) } + func hash(into hasher: inout Hasher) { + deepHashMessages(value: toList(), hasher: &hasher) + } +} + private class MessagesPigeonCodecReader: FlutterStandardReader { override func readValue(ofType type: UInt8) -> Any? { switch type { @@ -551,6 +643,10 @@ private class MessagesPigeonCodecReader: FlutterStandardReader { return PlatformPoint.fromList(self.readValue() as! [Any?]) case 145: return PlatformSize.fromList(self.readValue() as! [Any?]) + case 146: + return PlatformRect.fromList(self.readValue() as! [Any?]) + case 147: + return PlatformCameraTransform.fromList(self.readValue() as! [Any?]) default: return super.readValue(ofType: type) } @@ -610,6 +706,12 @@ private class MessagesPigeonCodecWriter: FlutterStandardWriter { } else if let value = value as? PlatformSize { super.writeByte(145) super.writeValue(value.toList()) + } else if let value = value as? PlatformRect { + super.writeByte(146) + super.writeValue(value.toList()) + } else if let value = value as? PlatformCameraTransform { + super.writeByte(147) + super.writeValue(value.toList()) } else { super.writeValue(value) } @@ -717,6 +819,14 @@ protocol CameraApi { func updateDescriptionWhileRecording(cameraName: String, completion: @escaping (Result) -> Void) /// Sets the file format used for taking pictures. func setImageFileFormat(format: PlatformImageFileFormat, completion: @escaping (Result) -> Void) + /// Applies a geometric transform (rotation, mirroring, crop) to the camera + /// output. The transform is applied to the preview, image stream, captured + /// photos, and recorded video simultaneously. + /// + /// Requires iOS 17+ for hardware-accelerated rotation. On earlier iOS + /// versions the rotation part of the transform is silently ignored and only + /// the crop (if any) is applied in software. + func setTransform(transform: PlatformCameraTransform, completion: @escaping (Result) -> Void) } /// Generated setup class from Pigeon to handle messages through the `binaryMessenger`. @@ -1305,6 +1415,30 @@ class CameraApiSetup { } else { setImageFileFormatChannel.setMessageHandler(nil) } + /// Applies a geometric transform (rotation, mirroring, crop) to the camera + /// output. The transform is applied to the preview, image stream, captured + /// photos, and recorded video simultaneously. + /// + /// Requires iOS 17+ for hardware-accelerated rotation. On earlier iOS + /// versions the rotation part of the transform is silently ignored and only + /// the crop (if any) is applied in software. + let setTransformChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setTransform\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setTransformChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let transformArg = args[0] as! PlatformCameraTransform + api.setTransform(transform: transformArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setTransformChannel.setMessageHandler(nil) + } } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift index 35050120e118..824cd4dd37f6 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/SavePhotoDelegate.swift @@ -3,6 +3,7 @@ // found in the LICENSE file. import AVFoundation +import CoreImage import Flutter import Foundation @@ -25,6 +26,13 @@ class SavePhotoDelegate: NSObject, AVCapturePhotoCaptureDelegate { /// The completion handler block for capture and save photo operations. let completionHandler: SavePhotoDelegateCompletionHandler + /// Optional crop rectangle in normalised (0,1) coordinate space. + /// When non-nil the photo is cropped (GPU path) before it is written to disk. + private let cropRect: PlatformRect? + + /// Core Image context shared with the camera (Metal-backed). Only used when `cropRect` is set. + private let ciContext: CIContext? + /// The path for captured photo file. /// Exposed for unit tests to verify the captured photo file path. var filePath: String { @@ -36,14 +44,20 @@ class SavePhotoDelegate: NSObject, AVCapturePhotoCaptureDelegate { /// ioQueue - the queue on which captured photos are written to disk. /// completionHandler - The completion handler block for save photo operations. Can /// be called from either main queue or IO queue. + /// cropRect - optional crop in normalised (0,1) coordinates; applied before writing. + /// ciContext - Core Image context to use for crop rendering; must be non-nil when cropRect is set. init( path: String, ioQueue: DispatchQueue, - completionHandler: @escaping SavePhotoDelegateCompletionHandler + completionHandler: @escaping SavePhotoDelegateCompletionHandler, + cropRect: PlatformRect? = nil, + ciContext: CIContext? = nil ) { self.path = path self.ioQueue = ioQueue self.completionHandler = completionHandler + self.cropRect = cropRect + self.ciContext = ciContext super.init() } @@ -65,7 +79,42 @@ class SavePhotoDelegate: NSObject, AVCapturePhotoCaptureDelegate { do { let data = photoDataProvider() - try data?.writeToPath(strongSelf.path, options: .atomic) + let finalData: WritableData? + + // If a crop is requested, apply it in Core Image before writing. + if let crop = strongSelf.cropRect, + let ctx = strongSelf.ciContext, + let rawData = data as? Data + { + let ci = CIImage(data: rawData) + let fullW = ci.map { Double($0.extent.width) } ?? 0 + let fullH = ci.map { Double($0.extent.height) } ?? 0 + if let ci = ci, fullW > 0, fullH > 0 { + // Core Image origin is bottom-left; convert from top-left. + let ciCrop = CGRect( + x: crop.x * fullW, + y: (1.0 - crop.y - crop.height) * fullH, + width: crop.width * fullW, + height: crop.height * fullH) + let cropped = ci.cropped(to: ciCrop) + .transformed( + by: CGAffineTransform(translationX: -ciCrop.origin.x, y: -ciCrop.origin.y)) + if let encoded = ctx.jpegRepresentation( + of: cropped, + colorSpace: CGColorSpaceCreateDeviceRGB()) + { + finalData = encoded + } else { + finalData = data + } + } else { + finalData = data + } + } else { + finalData = data + } + + try finalData?.writeToPath(strongSelf.path, options: .atomic) strongSelf.completionHandler(strongSelf.path, nil) } catch { strongSelf.completionHandler(nil, error) diff --git a/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart b/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart index e6f7340ed7a4..da12c0d6ec34 100644 --- a/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart +++ b/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart @@ -3,3 +3,4 @@ // found in the LICENSE file. export 'src/avfoundation_camera.dart'; +export 'src/camera_transform.dart'; diff --git a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart index a8710502195c..8dd7019596b3 100644 --- a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart +++ b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart @@ -11,6 +11,7 @@ import 'package:flutter/services.dart'; import 'package:flutter/widgets.dart'; import 'package:stream_transform/stream_transform.dart'; +import 'camera_transform.dart'; import 'messages.g.dart'; import 'type_conversion.dart'; import 'utils.dart'; @@ -450,6 +451,33 @@ class AVFoundationCamera extends CameraPlatform { await _hostApi.setImageFileFormat(_pigeonImageFileFormat(format)); } + /// Applies a geometric [transform] to all camera outputs on iOS. + /// + /// The [cameraId] parameter is currently unused on iOS (there is only ever + /// one active camera) but is included for API consistency. + /// + /// - Rotation and mirroring are applied at the hardware AVCaptureConnection + /// level (requires iOS 17+) and cost nothing in CPU / GPU. + /// - Crop is applied per-frame by Core Image on the GPU and costs ~1–3 ms + /// per frame. Pass `null` (or omit `cropRect`) to disable it. + Future setTransform(int cameraId, CameraTransform transform) async { + await _hostApi.setTransform( + PlatformCameraTransform( + rotationDegrees: transform.rotationDegrees, + flipHorizontally: transform.flipHorizontally, + flipVertically: transform.flipVertically, + cropRect: transform.cropRect == null + ? null + : PlatformRect( + x: transform.cropRect!.x, + y: transform.cropRect!.y, + width: transform.cropRect!.width, + height: transform.cropRect!.height, + ), + ), + ); + } + @override Widget buildPreview(int cameraId) { return Texture(textureId: cameraId); diff --git a/packages/camera/camera_avfoundation/lib/src/camera_transform.dart b/packages/camera/camera_avfoundation/lib/src/camera_transform.dart new file mode 100644 index 000000000000..6c3e26cc3c0b --- /dev/null +++ b/packages/camera/camera_avfoundation/lib/src/camera_transform.dart @@ -0,0 +1,76 @@ +// Copyright 2013 The Flutter Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +/// A normalized rectangle within the (0,1) coordinate space used to describe +/// a crop region for [CameraTransform]. +/// +/// The origin (0,0) is the top-left corner of the image. +class CameraTransformRect { + /// Creates a normalized crop rectangle. + const CameraTransformRect({ + required this.x, + required this.y, + required this.width, + required this.height, + }) : assert(x >= 0 && x <= 1, 'x must be in [0, 1]'), + assert(y >= 0 && y <= 1, 'y must be in [0, 1]'), + assert(width > 0 && width <= 1, 'width must be in (0, 1]'), + assert(height > 0 && height <= 1, 'height must be in (0, 1]'), + assert(x + width <= 1, 'x + width must be <= 1'), + assert(y + height <= 1, 'y + height must be <= 1'); + + /// Left edge in normalized [0,1] coordinates. + final double x; + + /// Top edge in normalized [0,1] coordinates. + final double y; + + /// Width in normalized [0,1] coordinates. + final double width; + + /// Height in normalized [0,1] coordinates. + final double height; +} + +/// A geometric transform to apply to all camera outputs simultaneously: +/// the preview texture, the image stream, captured photos, and recorded video. +/// +/// On iOS 17+ rotation and mirroring are applied at the hardware +/// `AVCaptureConnection` level (zero CPU / GPU cost). Crop uses Core Image on +/// the GPU (~1–3 ms per frame). +class CameraTransform { + /// Creates a camera transform. + /// + /// Defaults to identity (no rotation, no flip, no crop). + const CameraTransform({ + this.rotationDegrees = 0, + this.flipHorizontally = false, + this.flipVertically = false, + this.cropRect, + }) : assert( + rotationDegrees == 0 || + rotationDegrees == 90 || + rotationDegrees == 180 || + rotationDegrees == 270, + 'rotationDegrees must be 0, 90, 180, or 270', + ); + + /// Clockwise rotation in degrees. + /// + /// Must be one of: `0`, `90`, `180`, `270`. + final double rotationDegrees; + + /// Flip the image left–right (horizontal mirror). + final bool flipHorizontally; + + /// Flip the image upside-down (vertical mirror). + /// + /// Implemented as a horizontal flip composed with a 180° rotation. + final bool flipVertically; + + /// Optional crop region in normalized (0,1) coordinate space. + /// + /// Applied after rotation and mirroring. `null` means no crop. + final CameraTransformRect? cropRect; +} diff --git a/packages/camera/camera_avfoundation/lib/src/messages.g.dart b/packages/camera/camera_avfoundation/lib/src/messages.g.dart index b9c0c072f1d5..4df80c329803 100644 --- a/packages/camera/camera_avfoundation/lib/src/messages.g.dart +++ b/packages/camera/camera_avfoundation/lib/src/messages.g.dart @@ -500,6 +500,130 @@ class PlatformSize { int get hashCode => Object.hashAll(_toList()); } +class PlatformRect { + PlatformRect({ + required this.x, + required this.y, + required this.width, + required this.height, + }); + + double x; + + double y; + + double width; + + double height; + + List _toList() { + return [x, y, width, height]; + } + + Object encode() { + return _toList(); + } + + static PlatformRect decode(Object result) { + result as List; + return PlatformRect( + x: result[0]! as double, + y: result[1]! as double, + width: result[2]! as double, + height: result[3]! as double, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! PlatformRect || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Pigeon version of a geometric camera transform. +/// +/// Rotation and mirroring are applied at the hardware connection level +/// (AVCaptureConnection.videoRotationAngle / isVideoMirrored), which means no +/// CPU/GPU cost and the effect is visible in the preview, image stream, photos, +/// and recorded video simultaneously. +/// +/// Crop is applied per-frame via Core Image on the GPU (Metal) and has a small +/// (~1–3 ms) cost per frame. +class PlatformCameraTransform { + PlatformCameraTransform({ + required this.rotationDegrees, + required this.flipHorizontally, + required this.flipVertically, + this.cropRect, + }); + + /// Clockwise rotation in degrees. Must be 0, 90, 180, or 270. + double rotationDegrees; + + /// Whether to flip the image along the horizontal axis (left–right mirror). + bool flipHorizontally; + + /// Whether to flip the image along the vertical axis (upside-down mirror). + /// + /// Implemented as a 180° rotation composed with a horizontal flip. + bool flipVertically; + + /// Optional crop rectangle in normalized (0,1) coordinate space. + /// + /// Applied after rotation/mirroring. Null means no crop. + PlatformRect? cropRect; + + List _toList() { + return [ + rotationDegrees, + flipHorizontally, + flipVertically, + cropRect, + ]; + } + + Object encode() { + return _toList(); + } + + static PlatformCameraTransform decode(Object result) { + result as List; + return PlatformCameraTransform( + rotationDegrees: result[0]! as double, + flipHorizontally: result[1]! as bool, + flipVertically: result[2]! as bool, + cropRect: result[3] as PlatformRect?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! PlatformCameraTransform || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + class _PigeonCodec extends StandardMessageCodec { const _PigeonCodec(); @override @@ -558,6 +682,12 @@ class _PigeonCodec extends StandardMessageCodec { } else if (value is PlatformSize) { buffer.putUint8(145); writeValue(buffer, value.encode()); + } else if (value is PlatformRect) { + buffer.putUint8(146); + writeValue(buffer, value.encode()); + } else if (value is PlatformCameraTransform) { + buffer.putUint8(147); + writeValue(buffer, value.encode()); } else { super.writeValue(buffer, value); } @@ -612,6 +742,10 @@ class _PigeonCodec extends StandardMessageCodec { return PlatformPoint.decode(readValue(buffer)!); case 145: return PlatformSize.decode(readValue(buffer)!); + case 146: + return PlatformRect.decode(readValue(buffer)!); + case 147: + return PlatformCameraTransform.decode(readValue(buffer)!); default: return super.readValueOfType(type, buffer); } @@ -1334,6 +1468,33 @@ class CameraApi { isNullValid: true, ); } + + /// Applies a geometric transform (rotation, mirroring, crop) to the camera + /// output. The transform is applied to the preview, image stream, captured + /// photos, and recorded video simultaneously. + /// + /// Requires iOS 17+ for hardware-accelerated rotation. On earlier iOS + /// versions the rotation part of the transform is silently ignored and only + /// the crop (if any) is applied in software. + Future setTransform(PlatformCameraTransform transform) async { + final pigeonVar_channelName = + 'dev.flutter.pigeon.camera_avfoundation.CameraApi.setTransform$pigeonVar_messageChannelSuffix'; + final pigeonVar_channel = BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send( + [transform], + ); + final pigeonVar_replyList = await pigeonVar_sendFuture as List?; + + _extractReplyValueOrThrow( + pigeonVar_replyList, + pigeonVar_channelName, + isNullValid: true, + ); + } } Stream imageDataStream({String instanceName = ''}) { diff --git a/packages/camera/camera_avfoundation/pigeons/messages.dart b/packages/camera/camera_avfoundation/pigeons/messages.dart index fe659f4ef827..a8cb3940d5e0 100644 --- a/packages/camera/camera_avfoundation/pigeons/messages.dart +++ b/packages/camera/camera_avfoundation/pigeons/messages.dart @@ -191,6 +191,55 @@ class PlatformSize { final double height; } +// Pigeon equivalent of CGRect, with values in the (0,1) normalized coordinate space. +class PlatformRect { + PlatformRect({ + required this.x, + required this.y, + required this.width, + required this.height, + }); + + final double x; + final double y; + final double width; + final double height; +} + +/// Pigeon version of a geometric camera transform. +/// +/// Rotation and mirroring are applied at the hardware connection level +/// (AVCaptureConnection.videoRotationAngle / isVideoMirrored), which means no +/// CPU/GPU cost and the effect is visible in the preview, image stream, photos, +/// and recorded video simultaneously. +/// +/// Crop is applied per-frame via Core Image on the GPU (Metal) and has a small +/// (~1–3 ms) cost per frame. +class PlatformCameraTransform { + PlatformCameraTransform({ + required this.rotationDegrees, + required this.flipHorizontally, + required this.flipVertically, + this.cropRect, + }); + + /// Clockwise rotation in degrees. Must be 0, 90, 180, or 270. + final double rotationDegrees; + + /// Whether to flip the image along the horizontal axis (left–right mirror). + final bool flipHorizontally; + + /// Whether to flip the image along the vertical axis (upside-down mirror). + /// + /// Implemented as a 180° rotation composed with a horizontal flip. + final bool flipVertically; + + /// Optional crop rectangle in normalized (0,1) coordinate space. + /// + /// Applied after rotation/mirroring. Null means no crop. + final PlatformRect? cropRect; +} + @HostApi() abstract class CameraApi { /// Returns the list of available cameras. @@ -364,6 +413,17 @@ abstract class CameraApi { @async @ObjCSelector('setImageFileFormat:') void setImageFileFormat(PlatformImageFileFormat format); + + /// Applies a geometric transform (rotation, mirroring, crop) to the camera + /// output. The transform is applied to the preview, image stream, captured + /// photos, and recorded video simultaneously. + /// + /// Requires iOS 17+ for hardware-accelerated rotation. On earlier iOS + /// versions the rotation part of the transform is silently ignored and only + /// the crop (if any) is applied in software. + @async + @ObjCSelector('setCameraTransform:') + void setTransform(PlatformCameraTransform transform); } @EventChannelApi() diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart index 6412830d0ca1..bbe36d08e8db 100644 --- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart +++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart @@ -7,6 +7,7 @@ import 'dart:math'; import 'package:async/async.dart'; import 'package:camera_avfoundation/src/avfoundation_camera.dart'; +import 'package:camera_avfoundation/src/camera_transform.dart'; import 'package:camera_avfoundation/src/messages.g.dart'; import 'package:camera_avfoundation/src/utils.dart'; import 'package:camera_platform_interface/camera_platform_interface.dart'; @@ -1001,4 +1002,70 @@ void main() { verify(mockApi.setImageFileFormat(PlatformImageFileFormat.jpeg)); }); }); + + group('setTransform', () { + late AVFoundationCamera camera; + late MockCameraApi mockApi; + const cameraId = 1; + + setUp(() { + mockApi = MockCameraApi(); + camera = AVFoundationCamera(api: mockApi); + }); + + test('forwards rotation-only transform to host API', () async { + await camera.setTransform( + cameraId, + const CameraTransform(rotationDegrees: 90), + ); + + final captured = + verify(mockApi.setTransform(captureAny)).captured.single + as PlatformCameraTransform; + expect(captured.rotationDegrees, 90); + expect(captured.flipHorizontally, false); + expect(captured.flipVertically, false); + expect(captured.cropRect, isNull); + }); + + test('forwards full transform with crop to host API', () async { + await camera.setTransform( + cameraId, + const CameraTransform( + rotationDegrees: 180, + flipHorizontally: true, + cropRect: CameraTransformRect( + x: 0.1, + y: 0.1, + width: 0.8, + height: 0.8, + ), + ), + ); + + final captured = + verify(mockApi.setTransform(captureAny)).captured.single + as PlatformCameraTransform; + expect(captured.rotationDegrees, 180); + expect(captured.flipHorizontally, true); + expect(captured.flipVertically, false); + expect(captured.cropRect, isNotNull); + expect(captured.cropRect!.x, 0.1); + expect(captured.cropRect!.y, 0.1); + expect(captured.cropRect!.width, 0.8); + expect(captured.cropRect!.height, 0.8); + }); + + test('forwards identity transform (zeros) to host API', () async { + await camera.setTransform(cameraId, const CameraTransform()); + + final captured = + verify(mockApi.setTransform(captureAny)).captured.single + as PlatformCameraTransform; + expect(captured.rotationDegrees, 0); + expect(captured.flipHorizontally, false); + expect(captured.flipVertically, false); + expect(captured.cropRect, isNull); + }); + }); } diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart index 2bb30ac726c6..79c2820e212e 100644 --- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart +++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.mocks.dart @@ -370,4 +370,13 @@ class MockCameraApi extends _i1.Mock implements _i2.CameraApi { returnValueForMissingStub: _i4.Future.value(), ) as _i4.Future); + + @override + _i4.Future setTransform(_i2.PlatformCameraTransform? transform) => + (super.noSuchMethod( + Invocation.method(#setTransform, [transform]), + returnValue: _i4.Future.value(), + returnValueForMissingStub: _i4.Future.value(), + ) + as _i4.Future); } From a3850a1e39060dabab37ccd8c4d04c9e6fca231e Mon Sep 17 00:00:00 2001 From: nicolas depelteau Date: Wed, 11 Mar 2026 17:05:47 -0400 Subject: [PATCH 20/21] Add iOS 17+ camera transformation controls: implement rotation, flip, and crop features --- packages/camera/camera/example/lib/main.dart | 137 +++++++++++++++++++ 1 file changed, 137 insertions(+) diff --git a/packages/camera/camera/example/lib/main.dart b/packages/camera/camera/example/lib/main.dart index fc1e9e73ec14..c6d947919e3b 100644 --- a/packages/camera/camera/example/lib/main.dart +++ b/packages/camera/camera/example/lib/main.dart @@ -69,6 +69,12 @@ class _CameraExampleHomeState extends State double _baseScale = 1.0; double _currentLensPosition = 0.0; + // Transform state (iOS 17+) + double _transformRotation = 0; + bool _transformFlipH = false; + bool _transformFlipV = false; + bool _transformCropEnabled = false; + // Counting pointers (number of user fingers on screen) int _pointers = 0; @@ -160,6 +166,7 @@ class _CameraExampleHomeState extends State _captureControlRowWidget(), _modeControlRowWidget(), if (!kIsWeb && Platform.isIOS) _lensPositionWidget(), + if (!kIsWeb && Platform.isIOS) _transformControlRowWidget(), Row( children: [ ElevatedButton( @@ -567,6 +574,136 @@ class _CameraExampleHomeState extends State ); } + Future _applyTransform() async { + if (controller == null || !controller!.value.isInitialized) { + return; + } + // iOS camera sensor native orientation is landscape (0°). The system + // normally compensates with 90° to produce an upright portrait image. + // We offset the user-facing rotation (relative to portrait) by 90° so + // that "0°" in the UI means "upright" and "90°" means "rotated right". + final double hardwareAngle = (_transformRotation + 90) % 360; + await (CameraPlatform.instance as AVFoundationCamera).setTransform( + controller!.cameraId, + CameraTransform( + rotationDegrees: hardwareAngle, + flipHorizontally: _transformFlipH, + flipVertically: _transformFlipV, + cropRect: _transformCropEnabled + ? const CameraTransformRect( + x: 0.1, + y: 0.1, + width: 0.8, + height: 0.8, + ) + : null, + ), + ); + } + + /// Transform controls panel (iOS 17+ only). + Widget _transformControlRowWidget() { + final bool enabled = + controller != null && controller!.value.isInitialized; + return ColoredBox( + color: Colors.grey.shade50, + child: Column( + children: [ + const Center( + child: Text( + 'Camera Transform (iOS 17+)', + style: TextStyle(fontWeight: FontWeight.bold), + ), + ), + // Rotation + Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + const Text('Rotation:'), + for (final double deg in [0, 90, 180, 270]) + TextButton( + style: TextButton.styleFrom( + foregroundColor: _transformRotation == deg + ? Colors.orange + : Colors.blue, + ), + onPressed: enabled + ? () { + setState(() => _transformRotation = deg); + _applyTransform(); + } + : null, + child: Text('${deg.toInt()}\u00b0'), + ), + ], + ), + // Flip, crop, reset + Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + TextButton( + style: TextButton.styleFrom( + foregroundColor: + _transformFlipH ? Colors.orange : Colors.blue, + ), + onPressed: enabled + ? () { + setState(() => _transformFlipH = !_transformFlipH); + _applyTransform(); + } + : null, + child: const Text('Flip H'), + ), + TextButton( + style: TextButton.styleFrom( + foregroundColor: + _transformFlipV ? Colors.orange : Colors.blue, + ), + onPressed: enabled + ? () { + setState(() => _transformFlipV = !_transformFlipV); + _applyTransform(); + } + : null, + child: const Text('Flip V'), + ), + TextButton( + style: TextButton.styleFrom( + foregroundColor: + _transformCropEnabled ? Colors.orange : Colors.blue, + ), + onPressed: enabled + ? () { + setState( + () => + _transformCropEnabled = !_transformCropEnabled, + ); + _applyTransform(); + } + : null, + child: const Text('Crop 80%'), + ), + TextButton( + onPressed: enabled + ? () { + setState(() { + _transformRotation = 0; + _transformFlipH = false; + _transformFlipV = false; + _transformCropEnabled = false; + }); + _applyTransform(); + } + : null, + child: const Text('Reset'), + ), + ], + ), + ], + ), + ); + } + /// Display the control bar with buttons to take pictures and record videos. Widget _captureControlRowWidget() { final CameraController? cameraController = controller; From 110a9210175d3255ce71d5cf695de77334afdaa0 Mon Sep 17 00:00:00 2001 From: nicolas depelteau Date: Wed, 11 Mar 2026 17:07:25 -0400 Subject: [PATCH 21/21] Refactor camera rotation handling: adjust rotation degrees for iOS camera sensor orientation --- packages/camera/camera/example/lib/main.dart | 7 +------ .../Sources/camera_avfoundation/DefaultCamera.swift | 7 ++++++- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/camera/camera/example/lib/main.dart b/packages/camera/camera/example/lib/main.dart index c6d947919e3b..75cc60897b8f 100644 --- a/packages/camera/camera/example/lib/main.dart +++ b/packages/camera/camera/example/lib/main.dart @@ -578,15 +578,10 @@ class _CameraExampleHomeState extends State if (controller == null || !controller!.value.isInitialized) { return; } - // iOS camera sensor native orientation is landscape (0°). The system - // normally compensates with 90° to produce an upright portrait image. - // We offset the user-facing rotation (relative to portrait) by 90° so - // that "0°" in the UI means "upright" and "90°" means "rotated right". - final double hardwareAngle = (_transformRotation + 90) % 360; await (CameraPlatform.instance as AVFoundationCamera).setTransform( controller!.cameraId, CameraTransform( - rotationDegrees: hardwareAngle, + rotationDegrees: _transformRotation, flipHorizontally: _transformFlipH, flipVertically: _transformFlipV, cropRect: _transformCropEnabled diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index a4f8d3345068..3fa1af49d782 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -837,7 +837,12 @@ final class DefaultCamera: NSObject, Camera { guard let connection = output.connection(with: .video) else { continue } if #available(iOS 17.0, *) { - let angle = transform.rotationDegrees + // The iOS camera sensor's native orientation is landscape (0°). The + // system normally compensates with 90° to produce an upright portrait + // image. We offset the caller's angle by 90° so that + // rotationDegrees = 0 means "upright / no rotation" from the API + // user's point of view. + let angle = (transform.rotationDegrees + 90).truncatingRemainder(dividingBy: 360) if connection.isVideoRotationAngleSupported(angle) { connection.videoRotationAngle = angle }