diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md index 53a6c4ca1ad..d018f494199 100644 --- a/packages/camera/camera/CHANGELOG.md +++ b/packages/camera/camera/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.11.0+2 + +* Updates minimum supported SDK version to Flutter 3.19/Dart 3.3. + ## 0.11.0+1 * Updates minimum supported SDK version to Flutter 3.16/Dart 3.2. diff --git a/packages/camera/camera/example/pubspec.yaml b/packages/camera/camera/example/pubspec.yaml index f3b240b56c2..54f2cebd4fe 100644 --- a/packages/camera/camera/example/pubspec.yaml +++ b/packages/camera/camera/example/pubspec.yaml @@ -28,5 +28,9 @@ dev_dependencies: integration_test: sdk: flutter +dependency_overrides: + camera_web: + path: ../../camera_web + flutter: uses-material-design: true diff --git a/packages/camera/camera/example/web/index.html b/packages/camera/camera/example/web/index.html index 91502587eda..0dd4a04db41 100644 --- a/packages/camera/camera/example/web/index.html +++ b/packages/camera/camera/example/web/index.html @@ -3,8 +3,9 @@ Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. --> - + + @@ -16,14 +17,12 @@ - + Camera Web Example - - diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index f00a7e798f0..931f091bbdc 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -4,11 +4,11 @@ description: A Flutter plugin for controlling the camera. Supports previewing Dart. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.11.0+1 +version: 0.11.0+2 environment: - sdk: ^3.2.3 - flutter: ">=3.16.6" + sdk: ^3.3.0 + flutter: ">=3.19.0" flutter: plugin: diff --git a/packages/camera/camera_web/CHANGELOG.md b/packages/camera/camera_web/CHANGELOG.md index c9e8e661da4..d508877041c 100644 --- a/packages/camera/camera_web/CHANGELOG.md +++ b/packages/camera/camera_web/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.3.5 + +* Migrates to package:web to support WASM +* Updates minimum supported SDK version to Flutter 3.19/Dart 3.3. + ## 0.3.4 * Removes `maxVideoDuration`/`maxDuration`, as the feature was never exposed at diff --git a/packages/camera/camera_web/example/integration_test/camera_bitrate_test.dart b/packages/camera/camera_web/example/integration_test/camera_bitrate_test.dart index 3bf946029c2..3e2c9bd40c9 100644 --- a/packages/camera/camera_web/example/integration_test/camera_bitrate_test.dart +++ b/packages/camera/camera_web/example/integration_test/camera_bitrate_test.dart @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import 'dart:html'; +import 'dart:js_interop'; import 'dart:math'; import 'dart:ui'; @@ -13,6 +13,7 @@ import 'package:camera_web/src/types/types.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:integration_test/integration_test.dart'; import 'package:mocktail/mocktail.dart'; +import 'package:web/web.dart'; import 'helpers/helpers.dart'; @@ -22,7 +23,7 @@ void main() { const Size videoSize = Size(320, 240); /// Draw some seconds of random video frames on canvas in realtime. - Future simulateCamera(CanvasElement canvasElement) async { + Future simulateCamera(HTMLCanvasElement canvasElement) async { const int fps = 15; const int seconds = 3; const int frameDuration = 1000 ~/ fps; @@ -34,8 +35,10 @@ void main() { final int h = videoSize.height ~/ 20; for (int y = 0; y < videoSize.height; y += h) { for (int x = 0; x < videoSize.width; x += w) { - canvasElement.context2D.setFillColorRgb( - random.nextInt(255), random.nextInt(255), random.nextInt(255)); + final int r = random.nextInt(255); + final int g = random.nextInt(255); + final int b = random.nextInt(255); + canvasElement.context2D.fillStyle = 'rgba($r, $g, $b, 1)'.toJS; canvasElement.context2D.fillRect(x, y, w, h); } } @@ -53,19 +56,25 @@ void main() { bool isVideoTypeSupported(String type) => type == supportedVideoType; Future recordVideo(int videoBitrate) async { - final Window window = MockWindow(); - final Navigator navigator = MockNavigator(); - final MediaDevices mediaDevices = MockMediaDevices(); + final MockWindow mockWindow = MockWindow(); + final MockNavigator mockNavigator = MockNavigator(); + final MockMediaDevices mockMediaDevices = MockMediaDevices(); - when(() => window.navigator).thenReturn(navigator); - when(() => navigator.mediaDevices).thenReturn(mediaDevices); + final Window window = createJSInteropWrapper(mockWindow) as Window; + final Navigator navigator = + createJSInteropWrapper(mockNavigator) as Navigator; + final MediaDevices mediaDevices = + createJSInteropWrapper(mockMediaDevices) as MediaDevices; - final CanvasElement canvasElement = CanvasElement( - width: videoSize.width.toInt(), - height: videoSize.height.toInt(), - )..context2D.clearRect(0, 0, videoSize.width, videoSize.height); + mockWindow.navigator = navigator; + mockNavigator.mediaDevices = mediaDevices; - final VideoElement videoElement = VideoElement(); + final HTMLCanvasElement canvasElement = HTMLCanvasElement() + ..width = videoSize.width.toInt() + ..height = videoSize.height.toInt() + ..context2D.clearRect(0, 0, videoSize.width, videoSize.height); + + final HTMLVideoElement videoElement = HTMLVideoElement(); final MockCameraService cameraService = MockCameraService(); diff --git a/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart b/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart index 6bd86b0fe8e..4adb5cc4046 100644 --- a/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart +++ b/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart @@ -2,12 +2,13 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import 'dart:html'; - // ignore: implementation_imports +import 'dart:js_interop'; + import 'package:camera_web/src/types/types.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:integration_test/integration_test.dart'; +import 'package:web/web.dart'; import 'helpers/helpers.dart'; @@ -132,7 +133,8 @@ void main() { testWidgets('with aborted error code', (WidgetTester tester) async { expect( CameraErrorCode.fromMediaError( - FakeMediaError(MediaError.MEDIA_ERR_ABORTED), + createJSInteropWrapper( + FakeMediaError(MediaError.MEDIA_ERR_ABORTED)) as MediaError, ).toString(), equals('mediaErrorAborted'), ); @@ -141,7 +143,8 @@ void main() { testWidgets('with network error code', (WidgetTester tester) async { expect( CameraErrorCode.fromMediaError( - FakeMediaError(MediaError.MEDIA_ERR_NETWORK), + createJSInteropWrapper( + FakeMediaError(MediaError.MEDIA_ERR_NETWORK)) as MediaError, ).toString(), equals('mediaErrorNetwork'), ); @@ -150,7 +153,8 @@ void main() { testWidgets('with decode error code', (WidgetTester tester) async { expect( CameraErrorCode.fromMediaError( - FakeMediaError(MediaError.MEDIA_ERR_DECODE), + createJSInteropWrapper( + FakeMediaError(MediaError.MEDIA_ERR_DECODE)) as MediaError, ).toString(), equals('mediaErrorDecode'), ); @@ -160,7 +164,9 @@ void main() { (WidgetTester tester) async { expect( CameraErrorCode.fromMediaError( - FakeMediaError(MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED), + createJSInteropWrapper( + FakeMediaError(MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED)) + as MediaError, ).toString(), equals('mediaErrorSourceNotSupported'), ); @@ -169,7 +175,7 @@ void main() { testWidgets('with unknown error code', (WidgetTester tester) async { expect( CameraErrorCode.fromMediaError( - FakeMediaError(5), + createJSInteropWrapper(FakeMediaError(5)) as MediaError, ).toString(), equals('mediaErrorUnknown'), ); diff --git a/packages/camera/camera_web/example/integration_test/camera_options_test.dart b/packages/camera/camera_web/example/integration_test/camera_options_test.dart index 7dd25e37556..66f4a6bd1eb 100644 --- a/packages/camera/camera_web/example/integration_test/camera_options_test.dart +++ b/packages/camera/camera_web/example/integration_test/camera_options_test.dart @@ -3,6 +3,8 @@ // found in the LICENSE file. // ignore: implementation_imports +import 'dart:js_interop'; + import 'package:camera_web/src/types/types.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:integration_test/integration_test.dart'; @@ -20,10 +22,10 @@ void main() { ); expect( - cameraOptions.toJson(), + cameraOptions.toMediaStreamConstraints().dartify(), equals({ - 'audio': cameraOptions.audio.toJson(), - 'video': cameraOptions.video.toJson(), + 'audio': cameraOptions.audio.toMediaStreamConstraints().dartify()!, + 'video': cameraOptions.video.toMediaStreamConstraints().dartify()!, }), ); }); @@ -61,8 +63,8 @@ void main() { group('AudioConstraints', () { testWidgets('serializes correctly', (WidgetTester tester) async { expect( - const AudioConstraints(enabled: true).toJson(), - equals(true), + const AudioConstraints(enabled: true).toMediaStreamConstraints(), + true.toJS, ); }); @@ -84,7 +86,7 @@ void main() { ); expect( - videoConstraints.toJson(), + videoConstraints.toMediaStreamConstraints().dartify(), equals({ 'facingMode': videoConstraints.facingMode!.toJson(), 'width': videoConstraints.width!.toJson(), diff --git a/packages/camera/camera_web/example/integration_test/camera_service_test.dart b/packages/camera/camera_web/example/integration_test/camera_service_test.dart index fb2279a0942..2ed0c54e633 100644 --- a/packages/camera/camera_web/example/integration_test/camera_service_test.dart +++ b/packages/camera/camera_web/example/integration_test/camera_service_test.dart @@ -2,8 +2,10 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import 'dart:html'; -import 'dart:js_util' as js_util; +// ignore_for_file: only_throw_errors + +import 'dart:js_interop'; +import 'dart:js_interop_unsafe'; import 'package:camera_platform_interface/camera_platform_interface.dart'; // ignore_for_file: implementation_imports @@ -15,6 +17,7 @@ import 'package:flutter/services.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:integration_test/integration_test.dart'; import 'package:mocktail/mocktail.dart'; +import 'package:web/web.dart' as web; import 'helpers/helpers.dart'; @@ -24,27 +27,40 @@ void main() { group('CameraService', () { const int cameraId = 1; - late Window window; - late Navigator navigator; - late MediaDevices mediaDevices; - late CameraService cameraService; + late MockWindow mockWindow; + late MockNavigator mockNavigator; + late MockMediaDevices mockMediaDevices; + + late web.Window window; + late web.Navigator navigator; + late web.MediaDevices mediaDevices; + late JsUtil jsUtil; + late CameraService cameraService; + setUp(() async { - window = MockWindow(); - navigator = MockNavigator(); - mediaDevices = MockMediaDevices(); + mockWindow = MockWindow(); + mockNavigator = MockNavigator(); + mockMediaDevices = MockMediaDevices(); + + window = createJSInteropWrapper(mockWindow) as web.Window; + navigator = createJSInteropWrapper(mockNavigator) as web.Navigator; + mediaDevices = + createJSInteropWrapper(mockMediaDevices) as web.MediaDevices; + + mockWindow.navigator = navigator; + mockNavigator.mediaDevices = mediaDevices; + jsUtil = MockJsUtil(); - when(() => window.navigator).thenReturn(navigator); - when(() => navigator.mediaDevices).thenReturn(mediaDevices); + registerFallbackValue(createJSInteropWrapper(MockWindow())); // Mock JsUtil to return the real getProperty from dart:js_util. when(() => jsUtil.getProperty(any(), any())).thenAnswer( - (Invocation invocation) => js_util.getProperty( - invocation.positionalArguments[0] as Object, - invocation.positionalArguments[1] as Object, - ), + (Invocation invocation) => + (invocation.positionalArguments[0] as JSObject) + .getProperty(invocation.positionalArguments[1] as JSAny), ); cameraService = CameraService()..window = window; @@ -54,8 +70,15 @@ void main() { testWidgets( 'calls MediaDevices.getUserMedia ' 'with provided options', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenAnswer((_) async => FakeMediaStream([])); + late final web.MediaStreamConstraints? capturedConstraints; + mockMediaDevices.getUserMedia = + ([web.MediaStreamConstraints? constraints]) { + capturedConstraints = constraints; + final web.MediaStream stream = + createJSInteropWrapper(FakeMediaStream([])) + as web.MediaStream; + return Future.value(stream).toJS; + }.toJS; final CameraOptions options = CameraOptions( video: VideoConstraints( @@ -66,26 +89,13 @@ void main() { await cameraService.getMediaStreamForOptions(options); - verify( - () => mediaDevices.getUserMedia(options.toJson()), - ).called(1); - }); - - testWidgets( - 'throws PlatformException ' - 'with notSupported error ' - 'when there are no media devices', (WidgetTester tester) async { - when(() => navigator.mediaDevices).thenReturn(null); - expect( - () => cameraService.getMediaStreamForOptions(const CameraOptions()), - throwsA( - isA().having( - (PlatformException e) => e.code, - 'code', - CameraErrorCode.notSupported.toString(), - ), - ), + capturedConstraints?.video.dartify(), + equals(options.video.toMediaStreamConstraints().dartify()), + ); + expect( + capturedConstraints?.audio.dartify(), + equals(options.audio.toMediaStreamConstraints().dartify()), ); }); @@ -94,8 +104,11 @@ void main() { 'with notFound error ' 'when MediaDevices.getUserMedia throws DomException ' 'with NotFoundError', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('NotFoundError')); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'NotFoundError'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -116,8 +129,11 @@ void main() { 'with notFound error ' 'when MediaDevices.getUserMedia throws DomException ' 'with DevicesNotFoundError', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('DevicesNotFoundError')); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'DevicesNotFoundError'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -138,9 +154,11 @@ void main() { 'with notReadable error ' 'when MediaDevices.getUserMedia throws DomException ' 'with NotReadableError', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('NotReadableError')); - + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'NotReadableError'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( const CameraOptions(), @@ -160,8 +178,11 @@ void main() { 'with notReadable error ' 'when MediaDevices.getUserMedia throws DomException ' 'with TrackStartError', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('TrackStartError')); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'TrackStartError'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -182,8 +203,11 @@ void main() { 'with overconstrained error ' 'when MediaDevices.getUserMedia throws DomException ' 'with OverconstrainedError', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('OverconstrainedError')); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'OverconstrainedError'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -204,8 +228,11 @@ void main() { 'with overconstrained error ' 'when MediaDevices.getUserMedia throws DomException ' 'with ConstraintNotSatisfiedError', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('ConstraintNotSatisfiedError')); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'ConstraintNotSatisfiedError'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -226,8 +253,11 @@ void main() { 'with permissionDenied error ' 'when MediaDevices.getUserMedia throws DomException ' 'with NotAllowedError', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('NotAllowedError')); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'NotAllowedError'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -248,8 +278,11 @@ void main() { 'with permissionDenied error ' 'when MediaDevices.getUserMedia throws DomException ' 'with PermissionDeniedError', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('PermissionDeniedError')); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'PermissionDeniedError'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -270,8 +303,11 @@ void main() { 'with type error ' 'when MediaDevices.getUserMedia throws DomException ' 'with TypeError', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('TypeError')); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'TypeError'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -292,8 +328,11 @@ void main() { 'with abort error ' 'when MediaDevices.getUserMedia throws DomException ' 'with AbortError', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('AbortError')); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'AbortError'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -314,8 +353,11 @@ void main() { 'with security error ' 'when MediaDevices.getUserMedia throws DomException ' 'with SecurityError', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('SecurityError')); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'SecurityError'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -336,8 +378,11 @@ void main() { 'with unknown error ' 'when MediaDevices.getUserMedia throws DomException ' 'with an unknown error', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())) - .thenThrow(FakeDomException('Unknown')); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw web.DOMException('', 'Unknown'); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -358,7 +403,11 @@ void main() { 'with unknown error ' 'when MediaDevices.getUserMedia throws an unknown exception', (WidgetTester tester) async { - when(() => mediaDevices.getUserMedia(any())).thenThrow(Exception()); + mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) { + throw Exception(); + // ignore: dead_code + return Future.value(web.MediaStream()).toJS; + }.toJS; expect( () => cameraService.getMediaStreamForOptions( @@ -379,17 +428,23 @@ void main() { group('getZoomLevelCapabilityForCamera', () { late Camera camera; - late List videoTracks; + late MockMediaStreamTrack mockVideoTrack; + late List videoTracks; setUp(() { camera = MockCamera(); - videoTracks = [ - MockMediaStreamTrack(), - MockMediaStreamTrack() + mockVideoTrack = MockMediaStreamTrack(); + videoTracks = [ + createJSInteropWrapper(mockVideoTrack) as web.MediaStreamTrack, + createJSInteropWrapper(MockMediaStreamTrack()) + as web.MediaStreamTrack, ]; when(() => camera.textureId).thenReturn(0); - when(() => camera.stream).thenReturn(FakeMediaStream(videoTracks)); + when(() => camera.stream).thenReturn( + createJSInteropWrapper(FakeMediaStream(videoTracks)) + as web.MediaStream, + ); cameraService.jsUtil = jsUtil; }); @@ -397,18 +452,15 @@ void main() { testWidgets( 'returns the zoom level capability ' 'based on the first video track', (WidgetTester tester) async { - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'zoom': true, - }); + mockMediaDevices.getSupportedConstraints = () { + return web.MediaTrackSupportedConstraints(zoom: true); + }.toJS; - when(videoTracks.first.getCapabilities).thenReturn({ - 'zoom': js_util.jsify({ - 'min': 100, - 'max': 400, - 'step': 2, - }), - }); + mockVideoTrack.getCapabilities = () { + return web.MediaTrackCapabilities( + zoom: web.MediaSettingsRange(min: 100, max: 400, step: 2), + ); + }.toJS; final ZoomLevelCapability zoomLevelCapability = cameraService.getZoomLevelCapabilityForCamera(camera); @@ -419,75 +471,19 @@ void main() { }); group('throws CameraWebException', () { - testWidgets( - 'with zoomLevelNotSupported error ' - 'when there are no media devices', (WidgetTester tester) async { - when(() => navigator.mediaDevices).thenReturn(null); - - expect( - () => cameraService.getZoomLevelCapabilityForCamera(camera), - throwsA( - isA() - .having( - (CameraWebException e) => e.cameraId, - 'cameraId', - camera.textureId, - ) - .having( - (CameraWebException e) => e.code, - 'code', - CameraErrorCode.zoomLevelNotSupported, - ), - ), - ); - }); - testWidgets( 'with zoomLevelNotSupported error ' 'when the zoom level is not supported ' 'in the browser', (WidgetTester tester) async { - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'zoom': false, - }); - - when(videoTracks.first.getCapabilities).thenReturn({ - 'zoom': { - 'min': 100, - 'max': 400, - 'step': 2, - }, - }); - - expect( - () => cameraService.getZoomLevelCapabilityForCamera(camera), - throwsA( - isA() - .having( - (CameraWebException e) => e.cameraId, - 'cameraId', - camera.textureId, - ) - .having( - (CameraWebException e) => e.code, - 'code', - CameraErrorCode.zoomLevelNotSupported, - ), - ), - ); - }); - - testWidgets( - 'with zoomLevelNotSupported error ' - 'when the zoom level is not supported ' - 'by the camera', (WidgetTester tester) async { - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'zoom': true, - }); + mockMediaDevices.getSupportedConstraints = () { + return web.MediaTrackSupportedConstraints(zoom: false); + }.toJS; - when(videoTracks.first.getCapabilities) - .thenReturn({}); + mockVideoTrack.getCapabilities = () { + return web.MediaTrackCapabilities( + zoom: web.MediaSettingsRange(min: 100, max: 400, step: 2), + ); + }.toJS; expect( () => cameraService.getZoomLevelCapabilityForCamera(camera), @@ -511,14 +507,15 @@ void main() { 'with notStarted error ' 'when the camera stream has not been initialized', (WidgetTester tester) async { - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'zoom': true, - }); + mockMediaDevices.getSupportedConstraints = () { + return web.MediaTrackSupportedConstraints(zoom: true); + }.toJS; // Create a camera stream with no video tracks. - when(() => camera.stream) - .thenReturn(FakeMediaStream([])); + when(() => camera.stream).thenReturn( + createJSInteropWrapper(FakeMediaStream([])) + as web.MediaStream, + ); expect( () => cameraService.getZoomLevelCapabilityForCamera(camera), @@ -545,59 +542,44 @@ void main() { cameraService.jsUtil = jsUtil; }); - testWidgets( - 'throws PlatformException ' - 'with notSupported error ' - 'when there are no media devices', (WidgetTester tester) async { - when(() => navigator.mediaDevices).thenReturn(null); - - expect( - () => - cameraService.getFacingModeForVideoTrack(MockMediaStreamTrack()), - throwsA( - isA().having( - (PlatformException e) => e.code, - 'code', - CameraErrorCode.notSupported.toString(), - ), - ), - ); - }); - testWidgets( 'returns null ' 'when the facing mode is not supported', (WidgetTester tester) async { - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'facingMode': false, - }); + mockMediaDevices.getSupportedConstraints = () { + return web.MediaTrackSupportedConstraints(facingMode: false); + }.toJS; - final String? facingMode = - cameraService.getFacingModeForVideoTrack(MockMediaStreamTrack()); + final String? facingMode = cameraService.getFacingModeForVideoTrack( + createJSInteropWrapper(MockMediaStreamTrack()) + as web.MediaStreamTrack, + ); expect(facingMode, isNull); }); group('when the facing mode is supported', () { - late MediaStreamTrack videoTrack; + late MockMediaStreamTrack mockVideoTrack; + late web.MediaStreamTrack videoTrack; setUp(() { - videoTrack = MockMediaStreamTrack(); + mockVideoTrack = MockMediaStreamTrack(); + videoTrack = + createJSInteropWrapper(mockVideoTrack) as web.MediaStreamTrack; - when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities')) + when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'.toJS)) .thenReturn(true); - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'facingMode': true, - }); + mockMediaDevices.getSupportedConstraints = () { + return web.MediaTrackSupportedConstraints(facingMode: true); + }.toJS; }); testWidgets( 'returns an appropriate facing mode ' 'based on the video track settings', (WidgetTester tester) async { - when(videoTrack.getSettings) - .thenReturn({'facingMode': 'user'}); + mockVideoTrack.getSettings = () { + return web.MediaTrackSettings(facingMode: 'user'); + }.toJS; final String? facingMode = cameraService.getFacingModeForVideoTrack(videoTrack); @@ -610,12 +592,16 @@ void main() { 'based on the video track capabilities ' 'when the facing mode setting is empty', (WidgetTester tester) async { - when(videoTrack.getSettings).thenReturn({}); - when(videoTrack.getCapabilities).thenReturn({ - 'facingMode': ['environment', 'left'] - }); - - when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities')) + mockVideoTrack.getSettings = () { + return web.MediaTrackSettings(facingMode: ''); + }.toJS; + mockVideoTrack.getCapabilities = () { + return web.MediaTrackCapabilities( + facingMode: ['environment'.toJS, 'left'.toJS].toJS, + ); + }.toJS; + + when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'.toJS)) .thenReturn(true); final String? facingMode = @@ -628,9 +614,12 @@ void main() { 'returns null ' 'when the facing mode setting ' 'and capabilities are empty', (WidgetTester tester) async { - when(videoTrack.getSettings).thenReturn({}); - when(videoTrack.getCapabilities) - .thenReturn({'facingMode': []}); + mockVideoTrack.getSettings = () { + return web.MediaTrackSettings(facingMode: ''); + }.toJS; + mockVideoTrack.getCapabilities = () { + return web.MediaTrackCapabilities(facingMode: [].toJS); + }.toJS; final String? facingMode = cameraService.getFacingModeForVideoTrack(videoTrack); @@ -643,9 +632,11 @@ void main() { 'when the facing mode setting is empty and ' 'the video track capabilities are not supported', (WidgetTester tester) async { - when(videoTrack.getSettings).thenReturn({}); + mockVideoTrack.getSettings = () { + return web.MediaTrackSettings(facingMode: ''); + }.toJS; - when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities')) + when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'.toJS)) .thenReturn(false); final String? facingMode = diff --git a/packages/camera/camera_web/example/integration_test/camera_test.dart b/packages/camera/camera_web/example/integration_test/camera_test.dart index 7611e288b84..e953a06b0e8 100644 --- a/packages/camera/camera_web/example/integration_test/camera_test.dart +++ b/packages/camera/camera_web/example/integration_test/camera_test.dart @@ -3,7 +3,8 @@ // found in the LICENSE file. import 'dart:async'; -import 'dart:html'; +import 'dart:js_interop'; +import 'dart:js_interop_unsafe'; import 'dart:ui'; import 'package:async/async.dart'; @@ -15,6 +16,7 @@ import 'package:camera_web/src/types/types.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:integration_test/integration_test.dart'; import 'package:mocktail/mocktail.dart'; +import 'package:web/web.dart'; import 'helpers/helpers.dart'; @@ -24,6 +26,10 @@ void main() { group('Camera', () { const int textureId = 1; + late MockWindow mockWindow; + late MockNavigator mockNavigator; + late MockMediaDevices mockMediaDevices; + late Window window; late Navigator navigator; late MediaDevices mediaDevices; @@ -32,16 +38,20 @@ void main() { late CameraService cameraService; setUp(() { - window = MockWindow(); - navigator = MockNavigator(); - mediaDevices = MockMediaDevices(); + mockWindow = MockWindow(); + mockNavigator = MockNavigator(); + mockMediaDevices = MockMediaDevices(); + + window = createJSInteropWrapper(mockWindow) as Window; + navigator = createJSInteropWrapper(mockNavigator) as Navigator; + mediaDevices = createJSInteropWrapper(mockMediaDevices) as MediaDevices; - when(() => window.navigator).thenReturn(navigator); - when(() => navigator.mediaDevices).thenReturn(mediaDevices); + mockWindow.navigator = navigator; + mockNavigator.mediaDevices = mediaDevices; cameraService = MockCameraService(); - final VideoElement videoElement = + final HTMLVideoElement videoElement = getVideoElementWithBlankStream(const Size(10, 10)); mediaStream = videoElement.captureStream(); @@ -110,7 +120,8 @@ void main() { expect(camera.videoElement.autoplay, isFalse); expect(camera.videoElement.muted, isTrue); expect(camera.videoElement.srcObject, mediaStream); - expect(camera.videoElement.attributes.keys, contains('playsinline')); + expect(camera.videoElement.attributes.getNamedItem('playsinline'), + isNotNull); expect( camera.videoElement.style.transformOrigin, equals('center center')); @@ -154,7 +165,10 @@ void main() { expect(camera.divElement, isNotNull); expect(camera.divElement.style.objectFit, equals('cover')); - expect(camera.divElement.children, contains(camera.videoElement)); + final JSArray? array = (globalContext['Array']! as JSObject) + .callMethod('from'.toJS, camera.divElement.children) + as JSArray?; + expect(array?.toDart, contains(camera.videoElement)); }); testWidgets('initializes the camera stream', (WidgetTester tester) async { @@ -303,26 +317,26 @@ void main() { group( 'enables the torch mode ' 'when taking a picture', () { + late MockMediaStreamTrack mockVideoTrack; late List videoTracks; late MediaStream videoStream; - late VideoElement videoElement; + late HTMLVideoElement videoElement; setUp(() { + mockVideoTrack = MockMediaStreamTrack(); videoTracks = [ - MockMediaStreamTrack(), - MockMediaStreamTrack() + createJSInteropWrapper(mockVideoTrack) as MediaStreamTrack, + createJSInteropWrapper(MockMediaStreamTrack()) as MediaStreamTrack, ]; - videoStream = FakeMediaStream(videoTracks); + videoStream = createJSInteropWrapper(FakeMediaStream(videoTracks)) + as MediaStream; videoElement = getVideoElementWithBlankStream(const Size(100, 100)) ..muted = true; - when(() => videoTracks.first.applyConstraints(any())) - .thenAnswer((_) async => {}); - - when(videoTracks.first.getCapabilities).thenReturn({ - 'torch': true, - }); + mockVideoTrack.getCapabilities = () { + return MediaTrackCapabilities(torch: [true.toJS].toJS); + }.toJS; }); testWidgets('if the flash mode is auto', (WidgetTester tester) async { @@ -337,27 +351,22 @@ void main() { await camera.play(); + final List capturedConstraints = + []; + mockVideoTrack.applyConstraints = ([ + MediaTrackConstraints? constraints, + ]) { + if (constraints != null) { + capturedConstraints.add(constraints); + } + return Future.value().toJS; + }.toJS; + final XFile _ = await camera.takePicture(); - verify( - () => videoTracks.first.applyConstraints({ - 'advanced': [ - { - 'torch': true, - } - ] - }), - ).called(1); - - verify( - () => videoTracks.first.applyConstraints({ - 'advanced': [ - { - 'torch': false, - } - ] - }), - ).called(1); + expect(capturedConstraints.length, 2); + expect(capturedConstraints[0].torch.dartify(), true); + expect(capturedConstraints[1].torch.dartify(), false); }); testWidgets('if the flash mode is always', (WidgetTester tester) async { @@ -372,27 +381,22 @@ void main() { await camera.play(); + final List capturedConstraints = + []; + mockVideoTrack.applyConstraints = ([ + MediaTrackConstraints? constraints, + ]) { + if (constraints != null) { + capturedConstraints.add(constraints); + } + return Future.value().toJS; + }.toJS; + final XFile _ = await camera.takePicture(); - verify( - () => videoTracks.first.applyConstraints({ - 'advanced': [ - { - 'torch': true, - } - ] - }), - ).called(1); - - verify( - () => videoTracks.first.applyConstraints({ - 'advanced': [ - { - 'torch': false, - } - ] - }), - ).called(1); + expect(capturedConstraints.length, 2); + expect(capturedConstraints[0].torch.dartify(), true); + expect(capturedConstraints[1].torch.dartify(), false); }); }); }); @@ -404,7 +408,7 @@ void main() { (WidgetTester tester) async { const Size videoSize = Size(1280, 720); - final VideoElement videoElement = + final HTMLVideoElement videoElement = getVideoElementWithBlankStream(videoSize); mediaStream = videoElement.captureStream(); @@ -425,7 +429,7 @@ void main() { 'returns Size.zero ' 'if the camera is missing video tracks', (WidgetTester tester) async { // Create a video stream with no video tracks. - final VideoElement videoElement = VideoElement(); + final HTMLVideoElement videoElement = HTMLVideoElement(); mediaStream = videoElement.captureStream(); final Camera camera = Camera( @@ -443,32 +447,38 @@ void main() { }); group('setFlashMode', () { + late MockMediaStreamTrack mockVideoTrack; late List videoTracks; late MediaStream videoStream; setUp(() { + mockVideoTrack = MockMediaStreamTrack(); videoTracks = [ - MockMediaStreamTrack(), - MockMediaStreamTrack() + createJSInteropWrapper(mockVideoTrack) as MediaStreamTrack, + createJSInteropWrapper(MockMediaStreamTrack()) as MediaStreamTrack, ]; - videoStream = FakeMediaStream(videoTracks); - - when(() => videoTracks.first.applyConstraints(any())) - .thenAnswer((_) async => {}); - - when(videoTracks.first.getCapabilities) - .thenReturn({}); + videoStream = + createJSInteropWrapper(FakeMediaStream(videoTracks)) as MediaStream; + + mockVideoTrack.applyConstraints = ([ + MediaTrackConstraints? constraints, + ]) { + return Future.value().toJS; + }.toJS; + + mockVideoTrack.getCapabilities = () { + return MediaTrackCapabilities(); + }.toJS; }); testWidgets('sets the camera flash mode', (WidgetTester tester) async { - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'torch': true, - }); + mockMediaDevices.getSupportedConstraints = () { + return MediaTrackSupportedConstraints(torch: true); + }.toJS; - when(videoTracks.first.getCapabilities).thenReturn({ - 'torch': true, - }); + mockVideoTrack.getCapabilities = () { + return MediaTrackCapabilities(torch: [true.toJS].toJS); + }.toJS; final Camera camera = Camera( textureId: textureId, @@ -490,14 +500,13 @@ void main() { testWidgets( 'enables the torch mode ' 'if the flash mode is torch', (WidgetTester tester) async { - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'torch': true, - }); + mockMediaDevices.getSupportedConstraints = () { + return MediaTrackSupportedConstraints(torch: true); + }.toJS; - when(videoTracks.first.getCapabilities).thenReturn({ - 'torch': true, - }); + mockVideoTrack.getCapabilities = () { + return MediaTrackCapabilities(torch: [true.toJS].toJS); + }.toJS; final Camera camera = Camera( textureId: textureId, @@ -506,30 +515,33 @@ void main() { ..window = window ..stream = videoStream; + final List capturedConstraints = + []; + mockVideoTrack.applyConstraints = ([ + MediaTrackConstraints? constraints, + ]) { + if (constraints != null) { + capturedConstraints.add(constraints); + } + return Future.value().toJS; + }.toJS; + camera.setFlashMode(FlashMode.torch); - verify( - () => videoTracks.first.applyConstraints({ - 'advanced': [ - { - 'torch': true, - } - ] - }), - ).called(1); + expect(capturedConstraints.length, 1); + expect(capturedConstraints[0].torch.dartify(), true); }); testWidgets( 'disables the torch mode ' 'if the flash mode is not torch', (WidgetTester tester) async { - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'torch': true, - }); + mockMediaDevices.getSupportedConstraints = () { + return MediaTrackSupportedConstraints(torch: true); + }.toJS; - when(videoTracks.first.getCapabilities).thenReturn({ - 'torch': true, - }); + mockVideoTrack.getCapabilities = () { + return MediaTrackCapabilities(torch: [true.toJS].toJS); + }.toJS; final Camera camera = Camera( textureId: textureId, @@ -538,62 +550,35 @@ void main() { ..window = window ..stream = videoStream; + final List capturedConstraints = + []; + mockVideoTrack.applyConstraints = ([ + MediaTrackConstraints? constraints, + ]) { + if (constraints != null) { + capturedConstraints.add(constraints); + } + return Future.value().toJS; + }.toJS; + camera.setFlashMode(FlashMode.auto); - verify( - () => videoTracks.first.applyConstraints({ - 'advanced': [ - { - 'torch': false, - } - ] - }), - ).called(1); + expect(capturedConstraints.length, 1); + expect(capturedConstraints[0].torch.dartify(), false); }); group('throws a CameraWebException', () { - testWidgets( - 'with torchModeNotSupported error ' - 'when there are no media devices', (WidgetTester tester) async { - when(() => navigator.mediaDevices).thenReturn(null); - - final Camera camera = Camera( - textureId: textureId, - cameraService: cameraService, - ) - ..window = window - ..stream = videoStream; - - expect( - () => camera.setFlashMode(FlashMode.always), - throwsA( - isA() - .having( - (CameraWebException e) => e.cameraId, - 'cameraId', - textureId, - ) - .having( - (CameraWebException e) => e.code, - 'code', - CameraErrorCode.torchModeNotSupported, - ), - ), - ); - }); - testWidgets( 'with torchModeNotSupported error ' 'when the torch mode is not supported ' 'in the browser', (WidgetTester tester) async { - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'torch': false, - }); + mockMediaDevices.getSupportedConstraints = () { + return MediaTrackSupportedConstraints(torch: false); + }.toJS; - when(videoTracks.first.getCapabilities).thenReturn({ - 'torch': true, - }); + mockVideoTrack.getCapabilities = () { + return MediaTrackCapabilities(torch: [true.toJS].toJS); + }.toJS; final Camera camera = Camera( textureId: textureId, @@ -624,14 +609,13 @@ void main() { 'with torchModeNotSupported error ' 'when the torch mode is not supported ' 'by the camera', (WidgetTester tester) async { - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'torch': true, - }); + mockMediaDevices.getSupportedConstraints = () { + return MediaTrackSupportedConstraints(torch: true); + }.toJS; - when(videoTracks.first.getCapabilities).thenReturn({ - 'torch': false, - }); + mockVideoTrack.getCapabilities = () { + return MediaTrackCapabilities(torch: [false.toJS].toJS); + }.toJS; final Camera camera = Camera( textureId: textureId, @@ -662,14 +646,13 @@ void main() { 'with notStarted error ' 'when the camera stream has not been initialized', (WidgetTester tester) async { - when(mediaDevices.getSupportedConstraints) - .thenReturn({ - 'torch': true, - }); + mockMediaDevices.getSupportedConstraints = () { + return MediaTrackSupportedConstraints(torch: true); + }.toJS; - when(videoTracks.first.getCapabilities).thenReturn({ - 'torch': true, - }); + mockVideoTrack.getCapabilities = () { + return MediaTrackCapabilities(torch: [true.toJS].toJS); + }.toJS; final Camera camera = Camera( textureId: textureId, @@ -710,7 +693,8 @@ void main() { final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability( minimum: 50.0, maximum: 100.0, - videoTrack: MockMediaStreamTrack(), + videoTrack: createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, ); when(() => cameraService.getZoomLevelCapabilityForCamera(camera)) @@ -741,7 +725,8 @@ void main() { final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability( minimum: 50.0, maximum: 100.0, - videoTrack: MockMediaStreamTrack(), + videoTrack: createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, ); when(() => cameraService.getZoomLevelCapabilityForCamera(camera)) @@ -769,7 +754,9 @@ void main() { cameraService: cameraService, ); - final MockMediaStreamTrack videoTrack = MockMediaStreamTrack(); + final MockMediaStreamTrack mockVideoTrack = MockMediaStreamTrack(); + final MediaStreamTrack videoTrack = + createJSInteropWrapper(mockVideoTrack) as MediaStreamTrack; final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability( minimum: 50.0, @@ -777,8 +764,16 @@ void main() { videoTrack: videoTrack, ); - when(() => videoTrack.applyConstraints(any())) - .thenAnswer((_) async {}); + final List capturedConstraints = + []; + mockVideoTrack.applyConstraints = ([ + MediaTrackConstraints? constraints, + ]) { + if (constraints != null) { + capturedConstraints.add(constraints); + } + return Future.value().toJS; + }.toJS; when(() => cameraService.getZoomLevelCapabilityForCamera(camera)) .thenReturn(zoomLevelCapability); @@ -787,15 +782,8 @@ void main() { camera.setZoomLevel(zoom); - verify( - () => videoTrack.applyConstraints({ - 'advanced': [ - { - ZoomLevelCapability.constraintName: zoom, - } - ] - }), - ).called(1); + expect(capturedConstraints.length, 1); + expect(capturedConstraints[0].zoom.dartify(), zoom); }); group('throws a CameraWebException', () { @@ -811,7 +799,8 @@ void main() { final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability( minimum: 50.0, maximum: 100.0, - videoTrack: MockMediaStreamTrack(), + videoTrack: createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, ); when(() => cameraService.getZoomLevelCapabilityForCamera(camera)) @@ -846,7 +835,8 @@ void main() { final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability( minimum: 50.0, maximum: 100.0, - videoTrack: MockMediaStreamTrack(), + videoTrack: createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, ); when(() => cameraService.getZoomLevelCapabilityForCamera(camera)) @@ -878,7 +868,9 @@ void main() { 'returns a lens direction ' 'based on the first video track settings', (WidgetTester tester) async { - final MockVideoElement videoElement = MockVideoElement(); + final MockVideoElement mockVideoElement = MockVideoElement(); + final HTMLVideoElement videoElement = + createJSInteropWrapper(mockVideoElement) as HTMLVideoElement; final Camera camera = Camera( textureId: textureId, @@ -887,15 +879,19 @@ void main() { final MockMediaStreamTrack firstVideoTrack = MockMediaStreamTrack(); - when(() => videoElement.srcObject).thenReturn( - FakeMediaStream([ - firstVideoTrack, - MockMediaStreamTrack(), - ]), - ); + mockVideoElement.srcObject = createJSInteropWrapper( + FakeMediaStream( + [ + createJSInteropWrapper(firstVideoTrack) as MediaStreamTrack, + createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, + ], + ), + ) as MediaStream; - when(firstVideoTrack.getSettings) - .thenReturn({'facingMode': 'environment'}); + firstVideoTrack.getSettings = () { + return MediaTrackSettings(facingMode: 'environment'); + }.toJS; when(() => cameraService.mapFacingModeToLensDirection('environment')) .thenReturn(CameraLensDirection.external); @@ -910,7 +906,9 @@ void main() { 'returns null ' 'if the first video track is missing the facing mode', (WidgetTester tester) async { - final MockVideoElement videoElement = MockVideoElement(); + final MockVideoElement mockVideoElement = MockVideoElement(); + final HTMLVideoElement videoElement = + createJSInteropWrapper(mockVideoElement) as HTMLVideoElement; final Camera camera = Camera( textureId: textureId, @@ -919,14 +917,19 @@ void main() { final MockMediaStreamTrack firstVideoTrack = MockMediaStreamTrack(); - when(() => videoElement.srcObject).thenReturn( - FakeMediaStream([ - firstVideoTrack, - MockMediaStreamTrack(), - ]), - ); + videoElement.srcObject = createJSInteropWrapper( + FakeMediaStream( + [ + createJSInteropWrapper(firstVideoTrack) as MediaStreamTrack, + createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, + ], + ), + ) as MediaStream; - when(firstVideoTrack.getSettings).thenReturn({}); + firstVideoTrack.getSettings = () { + return MediaTrackSettings(); + }.toJS; expect( camera.getLensDirection(), @@ -938,7 +941,7 @@ void main() { 'returns null ' 'if the camera is missing video tracks', (WidgetTester tester) async { // Create a video stream with no video tracks. - final VideoElement videoElement = VideoElement(); + final HTMLVideoElement videoElement = HTMLVideoElement(); mediaStream = videoElement.captureStream(); final Camera camera = Camera( @@ -974,15 +977,15 @@ void main() { group('video recording', () { const String supportedVideoType = 'video/webm'; + late MockMediaRecorder mockMediaRecorder; late MediaRecorder mediaRecorder; bool isVideoTypeSupported(String type) => type == supportedVideoType; setUp(() { - mediaRecorder = MockMediaRecorder(); - - when(() => mediaRecorder.onError) - .thenAnswer((_) => const Stream.empty()); + mockMediaRecorder = MockMediaRecorder(); + mediaRecorder = + createJSInteropWrapper(mockMediaRecorder) as MediaRecorder; }); group('startVideoRecording', () { @@ -1027,11 +1030,21 @@ void main() { await camera.initialize(); await camera.play(); + final List capturedEvents = []; + mockMediaRecorder.addEventListener = ( + String type, + EventListener? callback, [ + JSAny? options, + ]) { + capturedEvents.add(type); + }.toJS; + await camera.startVideoRecording(); - verify( - () => mediaRecorder.addEventListener('dataavailable', any()), - ).called(1); + expect( + capturedEvents.where((String e) => e == 'dataavailable').length, + 1, + ); }); testWidgets('listens to the media recorder stop events', @@ -1046,11 +1059,21 @@ void main() { await camera.initialize(); await camera.play(); + final List capturedEvents = []; + mockMediaRecorder.addEventListener = ( + String type, + EventListener? callback, [ + JSAny? options, + ]) { + capturedEvents.add(type); + }.toJS; + await camera.startVideoRecording(); - verify( - () => mediaRecorder.addEventListener('stop', any()), - ).called(1); + expect( + capturedEvents.where((String e) => e == 'stop').length, + 1, + ); }); testWidgets('starts a video recording', (WidgetTester tester) async { @@ -1064,9 +1087,14 @@ void main() { await camera.initialize(); await camera.play(); + final List capturedStarts = []; + mockMediaRecorder.start = ([int? timeslice]) { + capturedStarts.add(timeslice); + }.toJS; + await camera.startVideoRecording(); - verify(mediaRecorder.start).called(1); + expect(capturedStarts.length, 1); }); group('throws a CameraWebException', () { @@ -1108,9 +1136,14 @@ void main() { cameraService: cameraService, )..mediaRecorder = mediaRecorder; + int pauses = 0; + mockMediaRecorder.pause = () { + pauses++; + }.toJS; + await camera.pauseVideoRecording(); - verify(mediaRecorder.pause).called(1); + expect(pauses, 1); }); testWidgets( @@ -1149,9 +1182,14 @@ void main() { cameraService: cameraService, )..mediaRecorder = mediaRecorder; + int resumes = 0; + mockMediaRecorder.resume = () { + resumes++; + }.toJS; + await camera.resumeVideoRecording(); - verify(mediaRecorder.resume).called(1); + expect(resumes, 1); }); testWidgets( @@ -1198,50 +1236,62 @@ void main() { await camera.initialize(); await camera.play(); - late void Function(Event) videoDataAvailableListener; - late void Function(Event) videoRecordingStoppedListener; - - when( - () => mediaRecorder.addEventListener('dataavailable', any()), - ).thenAnswer((Invocation invocation) { - videoDataAvailableListener = - invocation.positionalArguments[1] as void Function(Event); - }); - - when( - () => mediaRecorder.addEventListener('stop', any()), - ).thenAnswer((Invocation invocation) { - videoRecordingStoppedListener = - invocation.positionalArguments[1] as void Function(Event); - }); + late EventListener videoDataAvailableListener; + late EventListener videoRecordingStoppedListener; + + mockMediaRecorder.addEventListener = ( + String type, + EventListener? callback, [ + JSAny? options, + ]) { + if (type == 'dataavailable') { + videoDataAvailableListener = callback!; + } else if (type == 'stop') { + videoRecordingStoppedListener = callback!; + } + }.toJS; Blob? finalVideo; List? videoParts; camera.blobBuilder = (List blobs, String videoType) { videoParts = [...blobs]; - finalVideo = Blob(blobs, videoType); + finalVideo = Blob(blobs.toJS, BlobPropertyBag(type: videoType)); return finalVideo!; }; await camera.startVideoRecording(); + + int stops = 0; + mockMediaRecorder.stop = () { + stops++; + }.toJS; + final Future videoFileFuture = camera.stopVideoRecording(); - final Blob capturedVideoPartOne = Blob([]); - final Blob capturedVideoPartTwo = Blob([]); + final Blob capturedVideoPartOne = Blob([].toJS); + final Blob capturedVideoPartTwo = Blob([].toJS); final List capturedVideoParts = [ capturedVideoPartOne, capturedVideoPartTwo, ]; - videoDataAvailableListener(FakeBlobEvent(capturedVideoPartOne)); - videoDataAvailableListener(FakeBlobEvent(capturedVideoPartTwo)); + videoDataAvailableListener.callAsFunction( + null, + createJSInteropWrapper(FakeBlobEvent(capturedVideoPartOne)) + as BlobEvent, + ); + videoDataAvailableListener.callAsFunction( + null, + createJSInteropWrapper(FakeBlobEvent(capturedVideoPartTwo)) + as BlobEvent, + ); - videoRecordingStoppedListener(Event('stop')); + videoRecordingStoppedListener.callAsFunction(null, Event('stop')); final XFile videoFile = await videoFileFuture; - verify(mediaRecorder.stop).called(1); + expect(stops, 1); expect( videoFile, @@ -1294,15 +1344,18 @@ void main() { }); group('on video recording stopped', () { - late void Function(Event) videoRecordingStoppedListener; + late EventListener videoRecordingStoppedListener; setUp(() { - when( - () => mediaRecorder.addEventListener('stop', any()), - ).thenAnswer((Invocation invocation) { - videoRecordingStoppedListener = - invocation.positionalArguments[1] as void Function(Event); - }); + mockMediaRecorder.addEventListener = ( + String type, + EventListener? callback, [ + JSAny? options, + ]) { + if (type == 'stop') { + videoRecordingStoppedListener = callback!; + } + }.toJS; }); testWidgets('stops listening to the media recorder data events', @@ -1319,13 +1372,23 @@ void main() { await camera.startVideoRecording(); - videoRecordingStoppedListener(Event('stop')); + final List capturedEvents = []; + mockMediaRecorder.removeEventListener = ( + String type, + EventListener? callback, [ + JSAny? options, + ]) { + capturedEvents.add(type); + }.toJS; + + videoRecordingStoppedListener.callAsFunction(null, Event('stop')); await Future.microtask(() {}); - verify( - () => mediaRecorder.removeEventListener('dataavailable', any()), - ).called(1); + expect( + capturedEvents.where((String e) => e == 'dataavailable').length, + 1, + ); }); testWidgets('stops listening to the media recorder stop events', @@ -1342,28 +1405,41 @@ void main() { await camera.startVideoRecording(); - videoRecordingStoppedListener(Event('stop')); + final List capturedEvents = []; + mockMediaRecorder.removeEventListener = ( + String type, + EventListener? callback, [ + JSAny? options, + ]) { + capturedEvents.add(type); + }.toJS; + + videoRecordingStoppedListener.callAsFunction(null, Event('stop')); await Future.microtask(() {}); - verify( - () => mediaRecorder.removeEventListener('stop', any()), - ).called(1); + expect( + capturedEvents.where((String e) => e == 'stop').length, + 1, + ); }); testWidgets('stops listening to the media recorder errors', (WidgetTester tester) async { final StreamController onErrorStreamController = StreamController(); + final MockEventStreamProvider provider = + MockEventStreamProvider(); final Camera camera = Camera( textureId: 1, cameraService: cameraService, ) ..mediaRecorder = mediaRecorder - ..isVideoTypeSupported = isVideoTypeSupported; + ..isVideoTypeSupported = isVideoTypeSupported + ..mediaRecorderOnErrorProvider = provider; - when(() => mediaRecorder.onError) + when(() => provider.forTarget(mediaRecorder)) .thenAnswer((_) => onErrorStreamController.stream); await camera.initialize(); @@ -1371,7 +1447,7 @@ void main() { await camera.startVideoRecording(); - videoRecordingStoppedListener(Event('stop')); + videoRecordingStoppedListener.callAsFunction(null, Event('stop')); await Future.microtask(() {}); @@ -1452,9 +1528,9 @@ void main() { 'when a video recording is created', (WidgetTester tester) async { const String supportedVideoType = 'video/webm'; - final MockMediaRecorder mediaRecorder = MockMediaRecorder(); - when(() => mediaRecorder.onError) - .thenAnswer((_) => const Stream.empty()); + final MockMediaRecorder mockMediaRecorder = MockMediaRecorder(); + final MediaRecorder mediaRecorder = + createJSInteropWrapper(mockMediaRecorder) as MediaRecorder; final Camera camera = Camera( textureId: 1, @@ -1466,22 +1542,20 @@ void main() { await camera.initialize(); await camera.play(); - late void Function(Event) videoDataAvailableListener; - late void Function(Event) videoRecordingStoppedListener; - - when( - () => mediaRecorder.addEventListener('dataavailable', any()), - ).thenAnswer((Invocation invocation) { - videoDataAvailableListener = - invocation.positionalArguments[1] as void Function(Event); - }); - - when( - () => mediaRecorder.addEventListener('stop', any()), - ).thenAnswer((Invocation invocation) { - videoRecordingStoppedListener = - invocation.positionalArguments[1] as void Function(Event); - }); + late EventListener videoDataAvailableListener; + late EventListener videoRecordingStoppedListener; + + mockMediaRecorder.addEventListener = ( + String type, + EventListener? callback, [ + JSAny? options, + ]) { + if (type == 'dataavailable') { + videoDataAvailableListener = callback!; + } else if (type == 'stop') { + videoRecordingStoppedListener = callback!; + } + }.toJS; final StreamQueue streamQueue = StreamQueue(camera.onVideoRecordedEvent); @@ -1490,12 +1564,15 @@ void main() { Blob? finalVideo; camera.blobBuilder = (List blobs, String videoType) { - finalVideo = Blob(blobs, videoType); + finalVideo = Blob(blobs.toJS, BlobPropertyBag(type: videoType)); return finalVideo!; }; - videoDataAvailableListener(FakeBlobEvent(Blob([]))); - videoRecordingStoppedListener(Event('stop')); + videoDataAvailableListener.callAsFunction( + null, + createJSInteropWrapper(FakeBlobEvent(Blob([].toJS))), + ); + videoRecordingStoppedListener.callAsFunction(null, Event('stop')); expect( await streamQueue.next, @@ -1543,7 +1620,7 @@ void main() { await camera.initialize(); final List videoTracks = - camera.stream!.getVideoTracks(); + camera.stream!.getVideoTracks().toDart; final MediaStreamTrack defaultVideoTrack = videoTracks.first; defaultVideoTrack.dispatchEvent(Event('ended')); @@ -1570,7 +1647,7 @@ void main() { await camera.initialize(); final List videoTracks = - camera.stream!.getVideoTracks(); + camera.stream!.getVideoTracks().toDart; final MediaStreamTrack defaultVideoTrack = videoTracks.first; camera.stop(); @@ -1589,16 +1666,22 @@ void main() { 'emits an ErrorEvent ' 'when the media recorder fails ' 'when recording a video', (WidgetTester tester) async { - final MockMediaRecorder mediaRecorder = MockMediaRecorder(); + final MockMediaRecorder mockMediaRecorder = MockMediaRecorder(); + final MediaRecorder mediaRecorder = + createJSInteropWrapper(mockMediaRecorder) as MediaRecorder; final StreamController errorController = StreamController(); + final MockEventStreamProvider provider = + MockEventStreamProvider(); final Camera camera = Camera( textureId: textureId, cameraService: cameraService, - )..mediaRecorder = mediaRecorder; + ) + ..mediaRecorder = mediaRecorder + ..mediaRecorderOnErrorProvider = provider; - when(() => mediaRecorder.onError) + when(() => provider.forTarget(mediaRecorder)) .thenAnswer((_) => errorController.stream); final StreamQueue streamQueue = diff --git a/packages/camera/camera_web/example/integration_test/camera_web_test.dart b/packages/camera/camera_web/example/integration_test/camera_web_test.dart index d4c7d582533..ac31979eb17 100644 --- a/packages/camera/camera_web/example/integration_test/camera_web_test.dart +++ b/packages/camera/camera_web/example/integration_test/camera_web_test.dart @@ -2,8 +2,11 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +// ignore_for_file: only_throw_errors + import 'dart:async'; -import 'dart:html'; +import 'dart:js_interop'; +import 'dart:math'; import 'package:async/async.dart'; import 'package:camera_platform_interface/camera_platform_interface.dart'; @@ -17,6 +20,7 @@ import 'package:flutter/widgets.dart' as widgets; import 'package:flutter_test/flutter_test.dart'; import 'package:integration_test/integration_test.dart'; import 'package:mocktail/mocktail.dart'; +import 'package:web/web.dart' hide MediaDeviceKind, OrientationType; import 'helpers/helpers.dart'; @@ -26,41 +30,67 @@ void main() { group('CameraPlugin', () { const int cameraId = 1; + late MockWindow mockWindow; + late MockNavigator mockNavigator; + late MockMediaDevices mockMediaDevices; + late Window window; late Navigator navigator; late MediaDevices mediaDevices; - late VideoElement videoElement; + + late HTMLVideoElement videoElement; + + late MockScreen mockScreen; + late MockScreenOrientation mockScreenOrientation; + late Screen screen; late ScreenOrientation screenOrientation; + + late MockDocument mockDocument; + late MockElement mockDocumentElement; + late Document document; late Element documentElement; late CameraService cameraService; setUp(() async { - window = MockWindow(); - navigator = MockNavigator(); - mediaDevices = MockMediaDevices(); + mockWindow = MockWindow(); + mockNavigator = MockNavigator(); + mockMediaDevices = MockMediaDevices(); + + window = createJSInteropWrapper(mockWindow) as Window; + navigator = createJSInteropWrapper(mockNavigator) as Navigator; + mediaDevices = createJSInteropWrapper(mockMediaDevices) as MediaDevices; + + mockWindow.navigator = navigator; + mockNavigator.mediaDevices = mediaDevices; videoElement = getVideoElementWithBlankStream(const Size(10, 10)); - when(() => window.navigator).thenReturn(navigator); - when(() => navigator.mediaDevices).thenReturn(mediaDevices); + mockScreen = MockScreen(); + mockScreenOrientation = MockScreenOrientation(); - screen = MockScreen(); - screenOrientation = MockScreenOrientation(); + screen = createJSInteropWrapper(mockScreen) as Screen; + screenOrientation = + createJSInteropWrapper(mockScreenOrientation) as ScreenOrientation; - when(() => screen.orientation).thenReturn(screenOrientation); - when(() => window.screen).thenReturn(screen); + mockScreen.orientation = screenOrientation; + mockWindow.screen = screen; - document = MockDocument(); - documentElement = MockElement(); + mockDocument = MockDocument(); + mockDocumentElement = MockElement(); - when(() => document.documentElement).thenReturn(documentElement); - when(() => window.document).thenReturn(document); + document = createJSInteropWrapper(mockDocument) as Document; + documentElement = createJSInteropWrapper(mockDocumentElement) as Element; + + mockDocument.documentElement = documentElement; + mockWindow.document = document; cameraService = MockCameraService(); + registerFallbackValue(createJSInteropWrapper(MockWindow())); + when( () => cameraService.getMediaStreamForOptions( any(), @@ -94,9 +124,11 @@ void main() { ), ).thenReturn(null); - when(mediaDevices.enumerateDevices).thenAnswer( - (_) async => [], - ); + mockMediaDevices.enumerateDevices = () { + return Future>.value( + [].toJS, + ).toJS; + }.toJS; }); testWidgets('requests video permissions', (WidgetTester tester) async { @@ -111,18 +143,22 @@ void main() { testWidgets( 'releases the camera stream ' 'used to request video permissions', (WidgetTester tester) async { - final MockMediaStreamTrack videoTrack = MockMediaStreamTrack(); + final MockMediaStreamTrack mockVideoTrack = MockMediaStreamTrack(); + final MediaStreamTrack videoTrack = + createJSInteropWrapper(mockVideoTrack) as MediaStreamTrack; bool videoTrackStopped = false; - when(videoTrack.stop).thenAnswer((Invocation _) { + mockVideoTrack.stop = () { videoTrackStopped = true; - }); + }.toJS; when( () => cameraService.getMediaStreamForOptions(const CameraOptions()), ).thenAnswer( (_) => Future.value( - FakeMediaStream([videoTrack]), + createJSInteropWrapper( + FakeMediaStream([videoTrack]), + ) as MediaStream, ), ); @@ -135,15 +171,19 @@ void main() { testWidgets( 'gets a video stream ' 'for a video input device', (WidgetTester tester) async { - final FakeMediaDeviceInfo videoDevice = FakeMediaDeviceInfo( - '1', - 'Camera 1', - MediaDeviceKind.videoInput, - ); + final MediaDeviceInfo videoDevice = createJSInteropWrapper( + FakeMediaDeviceInfo( + '1', + 'Camera 1', + MediaDeviceKind.videoInput, + ), + ) as MediaDeviceInfo; - when(mediaDevices.enumerateDevices).thenAnswer( - (_) => Future>.value([videoDevice]), - ); + mockMediaDevices.enumerateDevices = () { + return Future>.value( + [videoDevice].toJS) + .toJS; + }.toJS; final List _ = await CameraPlatform.instance.availableCameras(); @@ -163,15 +203,19 @@ void main() { 'does not get a video stream ' 'for the video input device ' 'with an empty device id', (WidgetTester tester) async { - final FakeMediaDeviceInfo videoDevice = FakeMediaDeviceInfo( - '', - 'Camera 1', - MediaDeviceKind.videoInput, - ); + final MediaDeviceInfo videoDevice = createJSInteropWrapper( + FakeMediaDeviceInfo( + '', + 'Camera 1', + MediaDeviceKind.videoInput, + ), + ) as MediaDeviceInfo; - when(mediaDevices.enumerateDevices).thenAnswer( - (_) => Future>.value([videoDevice]), - ); + mockMediaDevices.enumerateDevices = () { + return Future>.value( + [videoDevice].toJS) + .toJS; + }.toJS; final List _ = await CameraPlatform.instance.availableCameras(); @@ -191,14 +235,24 @@ void main() { 'gets the facing mode ' 'from the first available video track ' 'of the video input device', (WidgetTester tester) async { - final FakeMediaDeviceInfo videoDevice = FakeMediaDeviceInfo( - '1', - 'Camera 1', - MediaDeviceKind.videoInput, - ); - - final FakeMediaStream videoStream = FakeMediaStream( - [MockMediaStreamTrack(), MockMediaStreamTrack()]); + final MediaDeviceInfo videoDevice = createJSInteropWrapper( + FakeMediaDeviceInfo( + '1', + 'Camera 1', + MediaDeviceKind.videoInput, + ), + ) as MediaDeviceInfo; + + final MediaStream videoStream = createJSInteropWrapper( + FakeMediaStream( + [ + createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, + createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, + ], + ), + ) as MediaStream; when( () => cameraService.getMediaStreamForOptions( @@ -208,16 +262,18 @@ void main() { ), ).thenAnswer((Invocation _) => Future.value(videoStream)); - when(mediaDevices.enumerateDevices).thenAnswer( - (_) => Future>.value([videoDevice]), - ); + mockMediaDevices.enumerateDevices = () { + return Future>.value( + [videoDevice].toJS) + .toJS; + }.toJS; final List _ = await CameraPlatform.instance.availableCameras(); verify( () => cameraService.getFacingModeForVideoTrack( - videoStream.getVideoTracks().first, + videoStream.getVideoTracks().toDart.first, ), ).called(1); }); @@ -226,44 +282,68 @@ void main() { 'returns appropriate camera descriptions ' 'for multiple video devices ' 'based on video streams', (WidgetTester tester) async { - final FakeMediaDeviceInfo firstVideoDevice = FakeMediaDeviceInfo( - '1', - 'Camera 1', - MediaDeviceKind.videoInput, - ); + final MediaDeviceInfo firstVideoDevice = createJSInteropWrapper( + FakeMediaDeviceInfo( + '1', + 'Camera 1', + MediaDeviceKind.videoInput, + ), + ) as MediaDeviceInfo; - final FakeMediaDeviceInfo secondVideoDevice = FakeMediaDeviceInfo( - '4', - 'Camera 4', - MediaDeviceKind.videoInput, - ); + final MediaDeviceInfo secondVideoDevice = createJSInteropWrapper( + FakeMediaDeviceInfo( + '4', + 'Camera 4', + MediaDeviceKind.videoInput, + ), + ) as MediaDeviceInfo; // Create a video stream for the first video device. - final FakeMediaStream firstVideoStream = FakeMediaStream( - [MockMediaStreamTrack(), MockMediaStreamTrack()]); + final MediaStream firstVideoStream = createJSInteropWrapper( + FakeMediaStream( + [ + createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, + createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, + ], + ), + ) as MediaStream; // Create a video stream for the second video device. - final FakeMediaStream secondVideoStream = - FakeMediaStream([MockMediaStreamTrack()]); + final MediaStream secondVideoStream = createJSInteropWrapper( + FakeMediaStream( + [ + createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, + ], + ), + ) as MediaStream; // Mock media devices to return two video input devices // and two audio devices. - when(mediaDevices.enumerateDevices).thenAnswer( - (_) => Future>.value([ - firstVideoDevice, - FakeMediaDeviceInfo( - '2', - 'Audio Input 2', - MediaDeviceKind.audioInput, - ), - FakeMediaDeviceInfo( - '3', - 'Audio Output 3', - MediaDeviceKind.audioOutput, - ), - secondVideoDevice, - ]), - ); + mockMediaDevices.enumerateDevices = () { + return Future>.value( + [ + firstVideoDevice, + createJSInteropWrapper( + FakeMediaDeviceInfo( + '2', + 'Audio Input 2', + MediaDeviceKind.audioInput, + ), + ) as MediaDeviceInfo, + createJSInteropWrapper( + FakeMediaDeviceInfo( + '3', + 'Audio Output 3', + MediaDeviceKind.audioOutput, + ), + ) as MediaDeviceInfo, + secondVideoDevice, + ].toJS, + ).toJS; + }.toJS; // Mock camera service to return the first video stream // for the first video device. @@ -291,7 +371,7 @@ void main() { // for the first video stream. when( () => cameraService.getFacingModeForVideoTrack( - firstVideoStream.getVideoTracks().first, + firstVideoStream.getVideoTracks().toDart.first, ), ).thenReturn('user'); @@ -302,7 +382,7 @@ void main() { // for the second video stream. when( () => cameraService.getFacingModeForVideoTrack( - secondVideoStream.getVideoTracks().first, + secondVideoStream.getVideoTracks().toDart.first, ), ).thenReturn('environment'); @@ -317,12 +397,12 @@ void main() { cameras, equals([ CameraDescription( - name: firstVideoDevice.label!, + name: firstVideoDevice.label, lensDirection: CameraLensDirection.front, sensorOrientation: 0, ), CameraDescription( - name: secondVideoDevice.label!, + name: secondVideoDevice.label, lensDirection: CameraLensDirection.back, sensorOrientation: 0, ) @@ -333,18 +413,30 @@ void main() { testWidgets( 'sets camera metadata ' 'for the camera description', (WidgetTester tester) async { - final FakeMediaDeviceInfo videoDevice = FakeMediaDeviceInfo( - '1', - 'Camera 1', - MediaDeviceKind.videoInput, - ); - - final FakeMediaStream videoStream = FakeMediaStream( - [MockMediaStreamTrack(), MockMediaStreamTrack()]); + final MediaDeviceInfo videoDevice = createJSInteropWrapper( + FakeMediaDeviceInfo( + '1', + 'Camera 1', + MediaDeviceKind.videoInput, + ), + ) as MediaDeviceInfo; + + final MediaStream videoStream = createJSInteropWrapper( + FakeMediaStream( + [ + createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, + createJSInteropWrapper(MockMediaStreamTrack()) + as MediaStreamTrack, + ], + ), + ) as MediaStream; - when(mediaDevices.enumerateDevices).thenAnswer( - (_) => Future>.value([videoDevice]), - ); + mockMediaDevices.enumerateDevices = () { + return Future>.value( + [videoDevice].toJS) + .toJS; + }.toJS; when( () => cameraService.getMediaStreamForOptions( @@ -356,7 +448,7 @@ void main() { when( () => cameraService.getFacingModeForVideoTrack( - videoStream.getVideoTracks().first, + videoStream.getVideoTracks().toDart.first, ), ).thenReturn('left'); @@ -370,7 +462,7 @@ void main() { (CameraPlatform.instance as CameraPlugin).camerasMetadata, equals({ camera: CameraMetadata( - deviceId: videoDevice.deviceId!, + deviceId: videoDevice.deviceId, facingMode: 'left', ) }), @@ -380,18 +472,32 @@ void main() { testWidgets( 'releases the video stream ' 'of a video input device', (WidgetTester tester) async { - final FakeMediaDeviceInfo videoDevice = FakeMediaDeviceInfo( - '1', - 'Camera 1', - MediaDeviceKind.videoInput, - ); + final MediaDeviceInfo videoDevice = createJSInteropWrapper( + FakeMediaDeviceInfo( + '1', + 'Camera 1', + MediaDeviceKind.videoInput, + ), + ) as MediaDeviceInfo; + + final List tracks = []; + final List stops = List.generate(2, (_) => false); + for (int i = 0; i < stops.length; i++) { + final MockMediaStreamTrack track = MockMediaStreamTrack(); + track.stop = () { + stops[i] = true; + }.toJS; + tracks.add(createJSInteropWrapper(track) as MediaStreamTrack); + } - final FakeMediaStream videoStream = FakeMediaStream( - [MockMediaStreamTrack(), MockMediaStreamTrack()]); + final MediaStream videoStream = + createJSInteropWrapper(FakeMediaStream(tracks)) as MediaStream; - when(mediaDevices.enumerateDevices).thenAnswer( - (_) => Future>.value([videoDevice]), - ); + mockMediaDevices.enumerateDevices = () { + return Future>.value( + [videoDevice].toJS) + .toJS; + }.toJS; when( () => cameraService.getMediaStreamForOptions( @@ -404,36 +510,21 @@ void main() { final List _ = await CameraPlatform.instance.availableCameras(); - for (final MediaStreamTrack videoTrack - in videoStream.getVideoTracks()) { - verify(videoTrack.stop).called(1); - } + expect(stops.every((bool e) => e), isTrue); }); group('throws CameraException', () { - testWidgets( - 'with notSupported error ' - 'when there are no media devices', (WidgetTester tester) async { - when(() => navigator.mediaDevices).thenReturn(null); - - expect( - () => CameraPlatform.instance.availableCameras(), - throwsA( - isA().having( - (CameraException e) => e.code, - 'code', - CameraErrorCode.notSupported.toString(), - ), - ), - ); - }); - testWidgets('when MediaDevices.enumerateDevices throws DomException', (WidgetTester tester) async { - final FakeDomException exception = - FakeDomException(DomException.UNKNOWN); + final DOMException exception = DOMException('UnknownError'); - when(mediaDevices.enumerateDevices).thenThrow(exception); + mockMediaDevices.enumerateDevices = () { + throw exception; + // ignore: dead_code + return Future>.value( + [].toJS, + ).toJS; + }.toJS; expect( () => CameraPlatform.instance.availableCameras(), @@ -700,14 +791,17 @@ void main() { group('initializeCamera', () { late Camera camera; - late VideoElement videoElement; + late MockVideoElement mockVideoElement; + late HTMLVideoElement videoElement; late StreamController errorStreamController, abortStreamController; late StreamController endedStreamController; setUp(() { camera = MockCamera(); - videoElement = MockVideoElement(); + mockVideoElement = MockVideoElement(); + videoElement = + createJSInteropWrapper(mockVideoElement) as HTMLVideoElement; errorStreamController = StreamController(); abortStreamController = StreamController(); @@ -719,10 +813,23 @@ void main() { when(camera.play).thenAnswer((Invocation _) => Future.value()); when(() => camera.videoElement).thenReturn(videoElement); - when(() => videoElement.onError).thenAnswer((Invocation _) => - FakeElementStream(errorStreamController.stream)); - when(() => videoElement.onAbort).thenAnswer((Invocation _) => - FakeElementStream(abortStreamController.stream)); + + final MockEventStreamProvider errorProvider = + MockEventStreamProvider(); + final MockEventStreamProvider abortProvider = + MockEventStreamProvider(); + + (CameraPlatform.instance as CameraPlugin).videoElementOnErrorProvider = + errorProvider; + (CameraPlatform.instance as CameraPlugin).videoElementOnAbortProvider = + abortProvider; + + when(() => errorProvider.forElement(videoElement)).thenAnswer( + (_) => FakeElementStream(errorStreamController.stream), + ); + when(() => abortProvider.forElement(videoElement)).thenAnswer( + (_) => FakeElementStream(abortStreamController.stream), + ); when(() => camera.onEnded) .thenAnswer((Invocation _) => endedStreamController.stream); @@ -808,8 +915,7 @@ void main() { testWidgets('when camera throws DomException', (WidgetTester tester) async { - final FakeDomException exception = - FakeDomException(DomException.NOT_ALLOWED); + final DOMException exception = DOMException('NotAllowedError'); when(camera.initialize) .thenAnswer((Invocation _) => Future.value()); @@ -834,6 +940,7 @@ void main() { group('lockCaptureOrientation', () { setUp(() { + registerFallbackValue(DeviceOrientation.portraitUp); when( () => cameraService.mapDeviceOrientationToOrientationType(any()), ).thenReturn(OrientationType.portraitPrimary); @@ -842,12 +949,18 @@ void main() { testWidgets( 'requests full-screen mode ' 'on documentElement', (WidgetTester tester) async { + int fullscreenCalls = 0; + mockDocumentElement.requestFullscreen = ([FullscreenOptions? options]) { + fullscreenCalls++; + return Future.value().toJS; + }.toJS; + await CameraPlatform.instance.lockCaptureOrientation( cameraId, DeviceOrientation.portraitUp, ); - verify(documentElement.requestFullscreen).called(1); + expect(fullscreenCalls, 1); }); testWidgets( @@ -859,6 +972,12 @@ void main() { ), ).thenReturn(OrientationType.landscapeSecondary); + final List capturedTypes = []; + mockScreenOrientation.lock = (OrientationLockType orientation) { + capturedTypes.add(orientation); + return Future.value().toJS; + }.toJS; + await CameraPlatform.instance.lockCaptureOrientation( cameraId, DeviceOrientation.landscapeRight, @@ -870,60 +989,16 @@ void main() { ), ).called(1); - verify( - () => screenOrientation.lock( - OrientationType.landscapeSecondary, - ), - ).called(1); + expect(capturedTypes.length, 1); + expect(capturedTypes[0], OrientationType.landscapeSecondary); }); group('throws PlatformException', () { - testWidgets( - 'with orientationNotSupported error ' - 'when screen is not supported', (WidgetTester tester) async { - when(() => window.screen).thenReturn(null); - - expect( - () => CameraPlatform.instance.lockCaptureOrientation( - cameraId, - DeviceOrientation.portraitUp, - ), - throwsA( - isA().having( - (PlatformException e) => e.code, - 'code', - CameraErrorCode.orientationNotSupported.toString(), - ), - ), - ); - }); - - testWidgets( - 'with orientationNotSupported error ' - 'when screen orientation is not supported', - (WidgetTester tester) async { - when(() => screen.orientation).thenReturn(null); - - expect( - () => CameraPlatform.instance.lockCaptureOrientation( - cameraId, - DeviceOrientation.portraitUp, - ), - throwsA( - isA().having( - (PlatformException e) => e.code, - 'code', - CameraErrorCode.orientationNotSupported.toString(), - ), - ), - ); - }); - testWidgets( 'with orientationNotSupported error ' 'when documentElement is not available', (WidgetTester tester) async { - when(() => document.documentElement).thenReturn(null); + mockDocument.documentElement = null; expect( () => CameraPlatform.instance.lockCaptureOrientation( @@ -938,14 +1013,19 @@ void main() { ), ), ); + + mockDocument.documentElement = documentElement; }); testWidgets('when lock throws DomException', (WidgetTester tester) async { - final FakeDomException exception = - FakeDomException(DomException.NOT_ALLOWED); + final DOMException exception = DOMException('NotAllowedError'); - when(() => screenOrientation.lock(any())).thenThrow(exception); + mockScreenOrientation.lock = (OrientationLockType orientation) { + throw exception; + // ignore: dead_code + return Future.value().toJS; + }.toJS; expect( () => CameraPlatform.instance.lockCaptureOrientation( @@ -973,58 +1053,24 @@ void main() { testWidgets('unlocks the capture orientation', (WidgetTester tester) async { + int unlocks = 0; + mockScreenOrientation.unlock = () { + unlocks++; + }.toJS; + await CameraPlatform.instance.unlockCaptureOrientation( cameraId, ); - verify(screenOrientation.unlock).called(1); + expect(unlocks, 1); }); group('throws PlatformException', () { - testWidgets( - 'with orientationNotSupported error ' - 'when screen is not supported', (WidgetTester tester) async { - when(() => window.screen).thenReturn(null); - - expect( - () => CameraPlatform.instance.unlockCaptureOrientation( - cameraId, - ), - throwsA( - isA().having( - (PlatformException e) => e.code, - 'code', - CameraErrorCode.orientationNotSupported.toString(), - ), - ), - ); - }); - - testWidgets( - 'with orientationNotSupported error ' - 'when screen orientation is not supported', - (WidgetTester tester) async { - when(() => screen.orientation).thenReturn(null); - - expect( - () => CameraPlatform.instance.unlockCaptureOrientation( - cameraId, - ), - throwsA( - isA().having( - (PlatformException e) => e.code, - 'code', - CameraErrorCode.orientationNotSupported.toString(), - ), - ), - ); - }); - testWidgets( 'with orientationNotSupported error ' 'when documentElement is not available', (WidgetTester tester) async { - when(() => document.documentElement).thenReturn(null); + mockDocument.documentElement = null; expect( () => CameraPlatform.instance.unlockCaptureOrientation( @@ -1038,14 +1084,19 @@ void main() { ), ), ); + + mockDocument.documentElement = documentElement; }); testWidgets('when unlock throws DomException', (WidgetTester tester) async { - final FakeDomException exception = - FakeDomException(DomException.NOT_ALLOWED); + final DOMException exception = DOMException('NotAllowedError'); - when(screenOrientation.unlock).thenThrow(exception); + mockScreenOrientation.unlock = () { + throw exception; + // ignore: dead_code + return Future.value().toJS; + }.toJS; expect( () => CameraPlatform.instance.unlockCaptureOrientation( @@ -1066,10 +1117,10 @@ void main() { group('takePicture', () { testWidgets('captures a picture', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final MockXFile capturedPicture = MockXFile(); + final XFile capturedPicture = XFile('/bogus/test'); when(camera.takePicture) - .thenAnswer((Invocation _) => Future.value(capturedPicture)); + .thenAnswer((Invocation _) async => capturedPicture); // Save the camera in the camera plugin. (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera; @@ -1101,8 +1152,7 @@ void main() { testWidgets('when takePicture throws DomException', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final FakeDomException exception = - FakeDomException(DomException.NOT_SUPPORTED); + final DOMException exception = DOMException('NotSupportedError'); when(camera.takePicture).thenThrow(exception); @@ -1207,8 +1257,7 @@ void main() { testWidgets('when startVideoRecording throws DomException', (WidgetTester tester) async { - final FakeDomException exception = - FakeDomException(DomException.INVALID_STATE); + final DOMException exception = DOMException('InvalidStateError'); when(camera.startVideoRecording).thenThrow(exception); @@ -1284,10 +1333,10 @@ void main() { group('stopVideoRecording', () { testWidgets('stops a video recording', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final MockXFile capturedVideo = MockXFile(); + final XFile capturedVideo = XFile('/bogus/test'); when(camera.stopVideoRecording) - .thenAnswer((Invocation _) => Future.value(capturedVideo)); + .thenAnswer((Invocation _) async => capturedVideo); // Save the camera in the camera plugin. (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera; @@ -1305,11 +1354,12 @@ void main() { final MockCamera camera = MockCamera(); final StreamController videoRecordingErrorController = StreamController(); + final XFile capturedVideo = XFile('/bogus/test'); when(camera.startVideoRecording).thenAnswer((Invocation _) async {}); when(camera.stopVideoRecording) - .thenAnswer((Invocation _) => Future.value(MockXFile())); + .thenAnswer((Invocation _) async => capturedVideo); when(() => camera.onVideoRecordingError) .thenAnswer((Invocation _) => videoRecordingErrorController.stream); @@ -1346,8 +1396,7 @@ void main() { testWidgets('when stopVideoRecording throws DomException', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final FakeDomException exception = - FakeDomException(DomException.INVALID_STATE); + final DOMException exception = DOMException('InvalidStateError'); when(camera.stopVideoRecording).thenThrow(exception); @@ -1427,8 +1476,7 @@ void main() { testWidgets('when pauseVideoRecording throws DomException', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final FakeDomException exception = - FakeDomException(DomException.INVALID_STATE); + final DOMException exception = DOMException('InvalidStateError'); when(camera.pauseVideoRecording).thenThrow(exception); @@ -1508,8 +1556,7 @@ void main() { testWidgets('when resumeVideoRecording throws DomException', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final FakeDomException exception = - FakeDomException(DomException.INVALID_STATE); + final DOMException exception = DOMException('InvalidStateError'); when(camera.resumeVideoRecording).thenThrow(exception); @@ -1595,8 +1642,7 @@ void main() { testWidgets('when setFlashMode throws DomException', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final FakeDomException exception = - FakeDomException(DomException.NOT_SUPPORTED); + final DOMException exception = DOMException('NotSupportedError'); when(() => camera.setFlashMode(any())).thenThrow(exception); @@ -1770,8 +1816,7 @@ void main() { testWidgets('when getMaxZoomLevel throws DomException', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final FakeDomException exception = - FakeDomException(DomException.NOT_SUPPORTED); + final DOMException exception = DOMException('NotSupportedError'); when(camera.getMaxZoomLevel).thenThrow(exception); @@ -1864,8 +1909,7 @@ void main() { testWidgets('when getMinZoomLevel throws DomException', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final FakeDomException exception = - FakeDomException(DomException.NOT_SUPPORTED); + final DOMException exception = DOMException('NotSupportedError'); when(camera.getMinZoomLevel).thenThrow(exception); @@ -1953,8 +1997,7 @@ void main() { testWidgets('when setZoomLevel throws DomException', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final FakeDomException exception = - FakeDomException(DomException.NOT_SUPPORTED); + final DOMException exception = DOMException('NotSupportedError'); when(() => camera.setZoomLevel(any())).thenThrow(exception); @@ -2066,8 +2109,7 @@ void main() { testWidgets('when pause throws DomException', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final FakeDomException exception = - FakeDomException(DomException.NOT_SUPPORTED); + final DOMException exception = DOMException('NotSupportedError'); when(camera.pause).thenThrow(exception); @@ -2121,8 +2163,7 @@ void main() { testWidgets('when play throws DomException', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final FakeDomException exception = - FakeDomException(DomException.NOT_SUPPORTED); + final DOMException exception = DOMException('NotSupportedError'); when(camera.play).thenThrow(exception); @@ -2192,7 +2233,8 @@ void main() { group('dispose', () { late Camera camera; - late VideoElement videoElement; + late MockVideoElement mockVideoElement; + late HTMLVideoElement videoElement; late StreamController errorStreamController, abortStreamController; late StreamController endedStreamController; @@ -2200,7 +2242,9 @@ void main() { setUp(() { camera = MockCamera(); - videoElement = MockVideoElement(); + mockVideoElement = MockVideoElement(); + videoElement = + createJSInteropWrapper(mockVideoElement) as HTMLVideoElement; errorStreamController = StreamController(); abortStreamController = StreamController(); @@ -2214,10 +2258,23 @@ void main() { when(camera.dispose).thenAnswer((Invocation _) => Future.value()); when(() => camera.videoElement).thenReturn(videoElement); - when(() => videoElement.onError).thenAnswer((Invocation _) => - FakeElementStream(errorStreamController.stream)); - when(() => videoElement.onAbort).thenAnswer((Invocation _) => - FakeElementStream(abortStreamController.stream)); + + final MockEventStreamProvider errorProvider = + MockEventStreamProvider(); + final MockEventStreamProvider abortProvider = + MockEventStreamProvider(); + + (CameraPlatform.instance as CameraPlugin).videoElementOnErrorProvider = + errorProvider; + (CameraPlatform.instance as CameraPlugin).videoElementOnAbortProvider = + abortProvider; + + when(() => errorProvider.forElement(videoElement)).thenAnswer( + (_) => FakeElementStream(errorStreamController.stream), + ); + when(() => abortProvider.forElement(videoElement)).thenAnswer( + (_) => FakeElementStream(abortStreamController.stream), + ); when(() => camera.onEnded) .thenAnswer((Invocation _) => endedStreamController.stream); @@ -2316,8 +2373,7 @@ void main() { testWidgets('when dispose throws DomException', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final FakeDomException exception = - FakeDomException(DomException.INVALID_ACCESS); + final DOMException exception = DOMException('InvalidAccessError'); when(camera.dispose).thenThrow(exception); @@ -2373,7 +2429,8 @@ void main() { group('events', () { late Camera camera; - late VideoElement videoElement; + late MockVideoElement mockVideoElement; + late HTMLVideoElement videoElement; late StreamController errorStreamController, abortStreamController; late StreamController endedStreamController; @@ -2381,7 +2438,9 @@ void main() { setUp(() { camera = MockCamera(); - videoElement = MockVideoElement(); + mockVideoElement = MockVideoElement(); + videoElement = + createJSInteropWrapper(mockVideoElement) as HTMLVideoElement; errorStreamController = StreamController(); abortStreamController = StreamController(); @@ -2394,10 +2453,23 @@ void main() { when(camera.play).thenAnswer((Invocation _) => Future.value()); when(() => camera.videoElement).thenReturn(videoElement); - when(() => videoElement.onError).thenAnswer((Invocation _) => - FakeElementStream(errorStreamController.stream)); - when(() => videoElement.onAbort).thenAnswer((Invocation _) => - FakeElementStream(abortStreamController.stream)); + + final MockEventStreamProvider errorProvider = + MockEventStreamProvider(); + final MockEventStreamProvider abortProvider = + MockEventStreamProvider(); + + (CameraPlatform.instance as CameraPlugin).videoElementOnErrorProvider = + errorProvider; + (CameraPlatform.instance as CameraPlugin).videoElementOnAbortProvider = + abortProvider; + + when(() => errorProvider.forElement(any())).thenAnswer( + (_) => FakeElementStream(errorStreamController.stream), + ); + when(() => abortProvider.forElement(any())).thenAnswer( + (_) => FakeElementStream(abortStreamController.stream), + ); when(() => camera.onEnded) .thenAnswer((Invocation _) => endedStreamController.stream); @@ -2405,8 +2477,7 @@ void main() { when(() => camera.onVideoRecordingError) .thenAnswer((Invocation _) => videoRecordingErrorController.stream); - when(() => camera.startVideoRecording()) - .thenAnswer((Invocation _) async {}); + when(camera.startVideoRecording).thenAnswer((Invocation _) async {}); }); testWidgets( @@ -2479,7 +2550,9 @@ void main() { await CameraPlatform.instance.initializeCamera(cameraId); - endedStreamController.add(MockMediaStreamTrack()); + endedStreamController.add( + createJSInteropWrapper(MockMediaStreamTrack()) as MediaStreamTrack, + ); expect( await streamQueue.next, @@ -2509,16 +2582,17 @@ void main() { await CameraPlatform.instance.initializeCamera(cameraId); - final FakeMediaError error = FakeMediaError( - MediaError.MEDIA_ERR_NETWORK, - 'A network error occurred.', - ); + final MediaError error = createJSInteropWrapper( + FakeMediaError( + MediaError.MEDIA_ERR_NETWORK, + 'A network error occurred.', + ), + ) as MediaError; final CameraErrorCode errorCode = CameraErrorCode.fromMediaError(error); - when(() => videoElement.error).thenReturn(error); - + mockVideoElement.error = error; errorStreamController.add(Event('error')); expect( @@ -2546,13 +2620,13 @@ void main() { await CameraPlatform.instance.initializeCamera(cameraId); - final FakeMediaError error = - FakeMediaError(MediaError.MEDIA_ERR_NETWORK); + final MediaError error = createJSInteropWrapper( + FakeMediaError(MediaError.MEDIA_ERR_NETWORK), + ) as MediaError; final CameraErrorCode errorCode = CameraErrorCode.fromMediaError(error); - when(() => videoElement.error).thenReturn(error); - + mockVideoElement.error = error; errorStreamController.add(Event('error')); expect( @@ -2881,7 +2955,9 @@ void main() { await CameraPlatform.instance.initializeCamera(cameraId); await CameraPlatform.instance.startVideoRecording(cameraId); - final FakeErrorEvent errorEvent = FakeErrorEvent('type', 'message'); + final ErrorEvent errorEvent = + createJSInteropWrapper(FakeErrorEvent('type', 'message')) + as ErrorEvent; videoRecordingErrorController.add(errorEvent); @@ -3013,7 +3089,7 @@ void main() { testWidgets('onVideoRecordedEvent emits a VideoRecordedEvent', (WidgetTester tester) async { final MockCamera camera = MockCamera(); - final MockXFile capturedVideo = MockXFile(); + final XFile capturedVideo = XFile('/bogus/test'); final Stream stream = Stream.value( VideoRecordedEvent(cameraId, capturedVideo, Duration.zero)); @@ -3036,24 +3112,16 @@ void main() { }); group('onDeviceOrientationChanged', () { - group('emits an empty stream', () { - testWidgets('when screen is not supported', - (WidgetTester tester) async { - when(() => window.screen).thenReturn(null); - - final Stream stream = - CameraPlatform.instance.onDeviceOrientationChanged(); - expect(await stream.isEmpty, isTrue); - }); + final StreamController eventStreamController = + StreamController(); - testWidgets('when screen orientation is not supported', - (WidgetTester tester) async { - when(() => screen.orientation).thenReturn(null); - - final Stream stream = - CameraPlatform.instance.onDeviceOrientationChanged(); - expect(await stream.isEmpty, isTrue); - }); + setUp(() { + final MockEventStreamProvider provider = + MockEventStreamProvider(); + (CameraPlatform.instance as CameraPlugin) + .orientationOnChangeProvider = provider; + when(() => provider.forTarget(any())) + .thenAnswer((_) => eventStreamController.stream); }); testWidgets('emits the initial DeviceOrientationChangedEvent', @@ -3065,14 +3133,7 @@ void main() { ).thenReturn(DeviceOrientation.portraitUp); // Set the initial screen orientation to portraitPrimary. - when(() => screenOrientation.type) - .thenReturn(OrientationType.portraitPrimary); - - final StreamController eventStreamController = - StreamController(); - - when(() => screenOrientation.onChange) - .thenAnswer((Invocation _) => eventStreamController.stream); + mockScreenOrientation.type = OrientationType.portraitPrimary; final Stream eventStream = CameraPlatform.instance.onDeviceOrientationChanged(); @@ -3108,12 +3169,6 @@ void main() { ), ).thenReturn(DeviceOrientation.portraitDown); - final StreamController eventStreamController = - StreamController(); - - when(() => screenOrientation.onChange) - .thenAnswer((Invocation _) => eventStreamController.stream); - final Stream eventStream = CameraPlatform.instance.onDeviceOrientationChanged(); @@ -3122,8 +3177,7 @@ void main() { // Change the screen orientation to landscapePrimary and // emit an event on the screenOrientation.onChange stream. - when(() => screenOrientation.type) - .thenReturn(OrientationType.landscapePrimary); + mockScreenOrientation.type = OrientationType.landscapePrimary; eventStreamController.add(Event('change')); @@ -3138,8 +3192,7 @@ void main() { // Change the screen orientation to portraitSecondary and // emit an event on the screenOrientation.onChange stream. - when(() => screenOrientation.type) - .thenReturn(OrientationType.portraitSecondary); + mockScreenOrientation.type = OrientationType.portraitSecondary; eventStreamController.add(Event('change')); diff --git a/packages/camera/camera_web/example/integration_test/helpers/mocks.dart b/packages/camera/camera_web/example/integration_test/helpers/mocks.dart index d1fbdd574ba..e4852843d1f 100644 --- a/packages/camera/camera_web/example/integration_test/helpers/mocks.dart +++ b/packages/camera/camera_web/example/integration_test/helpers/mocks.dart @@ -5,7 +5,7 @@ // ignore_for_file: avoid_implementing_value_types import 'dart:async'; -import 'dart:html'; +import 'dart:js_interop'; import 'dart:ui'; // ignore_for_file: implementation_imports @@ -13,103 +13,155 @@ import 'package:camera_web/src/camera.dart'; import 'package:camera_web/src/camera_service.dart'; import 'package:camera_web/src/shims/dart_js_util.dart'; import 'package:camera_web/src/types/types.dart'; -import 'package:cross_file/cross_file.dart'; import 'package:mocktail/mocktail.dart'; +// TODO(srujzs): This is exported in `package:web` 0.6.0. Remove this when it is available. +import 'package:web/src/helpers/events/streams.dart'; +import 'package:web/web.dart' as web; + +@JSExport() +class MockWindow { + late web.Navigator navigator; + late web.Screen screen; + late web.Document document; +} + +@JSExport() +class MockScreen { + late web.ScreenOrientation orientation; +} -class MockWindow extends Mock implements Window {} +@JSExport() +class MockScreenOrientation { + /// JSPromise Function(web.OrientationLockType orientation) + JSFunction lock = (web.OrientationLockType orientation) { + return Future.value().toJS; + }.toJS; -class MockScreen extends Mock implements Screen {} + /// void Function() + late JSFunction unlock; + late web.OrientationType type; +} + +@JSExport() +class MockDocument { + web.Element? documentElement; +} -class MockScreenOrientation extends Mock implements ScreenOrientation {} +@JSExport() +class MockElement { + /// JSPromise Function([FullscreenOptions options]) + JSFunction requestFullscreen = ([web.FullscreenOptions? options]) { + return Future.value().toJS; + }.toJS; +} -class MockDocument extends Mock implements Document {} +@JSExport() +class MockNavigator { + late web.MediaDevices mediaDevices; +} -class MockElement extends Mock implements Element {} +@JSExport() +class MockMediaDevices { + /// JSPromise Function([web.MediaStreamConstraints? constraints]) + late JSFunction getUserMedia; -class MockNavigator extends Mock implements Navigator {} + /// web.MediaTrackSupportedConstraints Function() + late JSFunction getSupportedConstraints; -class MockMediaDevices extends Mock implements MediaDevices {} + /// JSPromise> Function() + late JSFunction enumerateDevices; +} class MockCameraService extends Mock implements CameraService {} -class MockMediaStreamTrack extends Mock implements MediaStreamTrack {} +@JSExport() +class MockMediaStreamTrack { + /// web.MediaTrackCapabilities Function(); + late JSFunction getCapabilities; -class MockCamera extends Mock implements Camera {} + /// web.MediaTrackSettings Function() + JSFunction getSettings = () { + return web.MediaTrackSettings(); + }.toJS; -class MockCameraOptions extends Mock implements CameraOptions {} + /// JSPromise Function([web.MediaTrackConstraints? constraints]) + late JSFunction applyConstraints; -class MockVideoElement extends Mock implements VideoElement {} + /// void Function() + JSFunction stop = () {}.toJS; +} -class MockXFile extends Mock implements XFile {} +class MockCamera extends Mock implements Camera {} -class MockJsUtil extends Mock implements JsUtil {} +class MockCameraOptions extends Mock implements CameraOptions {} -class MockMediaRecorder extends Mock implements MediaRecorder {} +@JSExport() +class MockVideoElement { + web.MediaProvider? srcObject; + web.MediaError? error; +} -/// A fake [MediaStream] that returns the provided [_videoTracks]. -class FakeMediaStream extends Fake implements MediaStream { - FakeMediaStream(this._videoTracks); +class MockJsUtil extends Mock implements JsUtil {} - final List _videoTracks; +@JSExport() +class MockMediaRecorder { + /// void Function(String type, web.EventListener? callback, [JSAny options]) + JSFunction addEventListener = + (String type, web.EventListener? callback, [JSAny? options]) {}.toJS; - @override - List getVideoTracks() => _videoTracks; -} + /// void Function(String type, web.EventListener? callback, [JSAny options]) + JSFunction removeEventListener = + (String type, web.EventListener? callback, [JSAny? options]) {}.toJS; -/// A fake [MediaDeviceInfo] that returns the provided [_deviceId], [_label] and [_kind]. -class FakeMediaDeviceInfo extends Fake implements MediaDeviceInfo { - FakeMediaDeviceInfo(this._deviceId, this._label, this._kind); + /// void Function([int timeslice]) + JSFunction start = ([int? timeslice]) {}.toJS; - final String _deviceId; - final String _label; - final String _kind; + /// void Function() + JSFunction pause = () {}.toJS; - @override - String? get deviceId => _deviceId; + /// void Function() + JSFunction resume = () {}.toJS; - @override - String? get label => _label; + /// void Function() + JSFunction stop = () {}.toJS; - @override - String? get kind => _kind; + web.RecordingState state = 'inactive'; } -/// A fake [MediaError] that returns the provided error [_code] and [_message]. -class FakeMediaError extends Fake implements MediaError { - FakeMediaError( - this._code, [ - String message = '', - ]) : _message = message; - - final int _code; - final String _message; +/// A fake [MediaStream] that returns the provided [_videoTracks]. +@JSExport() +class FakeMediaStream { + FakeMediaStream(this._videoTracks); - @override - int get code => _code; + final List _videoTracks; - @override - String? get message => _message; + List getVideoTracks() => _videoTracks; } -/// A fake [DomException] that returns the provided error [_name] and [_message]. -class FakeDomException extends Fake implements DomException { - FakeDomException( - this._name, [ - String? message, - ]) : _message = message; +/// A fake [MediaDeviceInfo] that returns the provided [_deviceId], [_label] and [_kind]. +@JSExport() +class FakeMediaDeviceInfo { + FakeMediaDeviceInfo(this.deviceId, this.label, this.kind); - final String _name; - final String? _message; + final String deviceId; + final String label; + final String kind; +} - @override - String get name => _name; +/// A fake [MediaError] that returns the provided error [_code] and [_message]. +@JSExport() +class FakeMediaError { + FakeMediaError( + this.code, [ + this.message = '', + ]); - @override - String? get message => _message; + final int code; + final String message; } /// A fake [ElementStream] that listens to the provided [_stream] on [listen]. -class FakeElementStream extends Fake +class FakeElementStream extends Fake implements ElementStream { FakeElementStream(this._stream); @@ -128,31 +180,23 @@ class FakeElementStream extends Fake } /// A fake [BlobEvent] that returns the provided blob [data]. -class FakeBlobEvent extends Fake implements BlobEvent { - FakeBlobEvent(this._blob); - - final Blob? _blob; +@JSExport() +class FakeBlobEvent { + FakeBlobEvent(this.data); - @override - Blob? get data => _blob; + final web.Blob? data; } /// A fake [DomException] that returns the provided error [_name] and [_message]. -class FakeErrorEvent extends Fake implements ErrorEvent { +@JSExport() +class FakeErrorEvent { FakeErrorEvent( - String type, [ - String? message, - ]) : _type = type, - _message = message; + this.type, [ + this.message = '', + ]); - final String _type; - final String? _message; - - @override - String get type => _type; - - @override - String? get message => _message; + final String type; + final String message; } /// Returns a video element with a blank stream of size [videoSize]. @@ -162,14 +206,17 @@ class FakeErrorEvent extends Fake implements ErrorEvent { /// final videoElement = getVideoElementWithBlankStream(Size(100, 100)); /// final videoStream = videoElement.captureStream(); /// ``` -VideoElement getVideoElementWithBlankStream(Size videoSize) { - final CanvasElement canvasElement = CanvasElement( - width: videoSize.width.toInt(), - height: videoSize.height.toInt(), - )..context2D.fillRect(0, 0, videoSize.width, videoSize.height); +web.HTMLVideoElement getVideoElementWithBlankStream(Size videoSize) { + final web.HTMLCanvasElement canvasElement = web.HTMLCanvasElement() + ..width = videoSize.width.toInt() + ..height = videoSize.height.toInt() + ..context2D.fillRect(0, 0, videoSize.width, videoSize.height); - final VideoElement videoElement = VideoElement() + final web.HTMLVideoElement videoElement = web.HTMLVideoElement() ..srcObject = canvasElement.captureStream(); return videoElement; } + +class MockEventStreamProvider extends Mock + implements web.EventStreamProvider {} diff --git a/packages/camera/camera_web/example/integration_test/zoom_level_capability_test.dart b/packages/camera/camera_web/example/integration_test/zoom_level_capability_test.dart index d93b42690e5..3a2145e39ec 100644 --- a/packages/camera/camera_web/example/integration_test/zoom_level_capability_test.dart +++ b/packages/camera/camera_web/example/integration_test/zoom_level_capability_test.dart @@ -3,9 +3,12 @@ // found in the LICENSE file. // ignore: implementation_imports +import 'dart:js_interop'; + import 'package:camera_web/src/types/types.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:integration_test/integration_test.dart'; +import 'package:web/web.dart'; import 'helpers/helpers.dart'; @@ -16,7 +19,8 @@ void main() { testWidgets('sets all properties', (WidgetTester tester) async { const double minimum = 100.0; const double maximum = 400.0; - final MockMediaStreamTrack videoTrack = MockMediaStreamTrack(); + final MediaStreamTrack videoTrack = + createJSInteropWrapper(MockMediaStreamTrack()) as MediaStreamTrack; final ZoomLevelCapability capability = ZoomLevelCapability( minimum: minimum, @@ -30,7 +34,8 @@ void main() { }); testWidgets('supports value equality', (WidgetTester tester) async { - final MockMediaStreamTrack videoTrack = MockMediaStreamTrack(); + final MediaStreamTrack videoTrack = + createJSInteropWrapper(MockMediaStreamTrack()) as MediaStreamTrack; expect( ZoomLevelCapability( diff --git a/packages/camera/camera_web/example/pubspec.yaml b/packages/camera/camera_web/example/pubspec.yaml index 5cf64e1c5ce..d4f398ec197 100644 --- a/packages/camera/camera_web/example/pubspec.yaml +++ b/packages/camera/camera_web/example/pubspec.yaml @@ -2,8 +2,8 @@ name: camera_web_integration_tests publish_to: none environment: - sdk: ^3.2.0 - flutter: ">=3.16.0" + sdk: ^3.3.0 + flutter: ">=3.19.0" dependencies: camera_platform_interface: ^2.6.0 @@ -16,6 +16,7 @@ dependencies: path: ../ flutter: sdk: flutter + web: ^1.0.0 dev_dependencies: async: ^2.5.0 @@ -25,4 +26,3 @@ dev_dependencies: integration_test: sdk: flutter mocktail: 0.3.0 - diff --git a/packages/camera/camera_web/lib/src/camera.dart b/packages/camera/camera_web/lib/src/camera.dart index 7512fc62771..124f595fecf 100644 --- a/packages/camera/camera_web/lib/src/camera.dart +++ b/packages/camera/camera_web/lib/src/camera.dart @@ -3,14 +3,17 @@ // found in the LICENSE file. import 'dart:async'; -import 'dart:html' as html; +import 'dart:js_interop'; import 'dart:ui'; import 'dart:ui_web' as ui_web; import 'package:camera_platform_interface/camera_platform_interface.dart'; import 'package:flutter/foundation.dart'; +import 'package:web/web.dart' as web; +import 'package:web/web.dart'; import 'camera_service.dart'; +import 'pkg_web_tweaks.dart'; import 'types/types.dart'; String _getViewType(int cameraId) => 'plugins.flutter.io/camera_$cameraId'; @@ -48,10 +51,6 @@ class Camera { this.recorderOptions = const (audioBitrate: null, videoBitrate: null), }) : _cameraService = cameraService; - // A torch mode constraint name. - // See: https://w3c.github.io/mediacapture-image/#dom-mediatracksupportedconstraints-torch - static const String _torchModeKey = 'torch'; - /// The texture id used to register the camera view. final int textureId; @@ -63,16 +62,16 @@ class Camera { /// The video element that displays the camera stream. /// Initialized in [initialize]. - late final html.VideoElement videoElement; + late final web.HTMLVideoElement videoElement; /// The wrapping element for the [videoElement] to avoid overriding /// the custom styles applied in [_applyDefaultVideoStyles]. /// Initialized in [initialize]. - late final html.DivElement divElement; + late final web.HTMLDivElement divElement; /// The camera stream displayed in the [videoElement]. /// Initialized in [initialize] and [play], reset in [stop]. - html.MediaStream? stream; + web.MediaStream? stream; /// The stream of the camera video tracks that have ended playing. /// @@ -82,14 +81,14 @@ class Camera { /// /// MediaStreamTrack.onended: /// https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/onended - Stream get onEnded => onEndedController.stream; + Stream get onEnded => onEndedController.stream; /// The stream controller for the [onEnded] stream. @visibleForTesting - final StreamController onEndedController = - StreamController.broadcast(); + final StreamController onEndedController = + StreamController.broadcast(); - StreamSubscription? _onEndedSubscription; + StreamSubscription? _onEndedSubscription; /// The stream of the camera video recording errors. /// @@ -98,15 +97,20 @@ class Camera { /// /// MediaRecorder.error: /// https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/error_event - Stream get onVideoRecordingError => + Stream get onVideoRecordingError => videoRecordingErrorController.stream; + /// The stream provider for [MediaRecorder] error events. + @visibleForTesting + EventStreamProvider mediaRecorderOnErrorProvider = + EventStreamProviders.errorMediaRecorderEvent; + /// The stream controller for the [onVideoRecordingError] stream. @visibleForTesting - final StreamController videoRecordingErrorController = - StreamController.broadcast(); + final StreamController videoRecordingErrorController = + StreamController.broadcast(); - StreamSubscription? _onVideoRecordingErrorSubscription; + StreamSubscription? _onVideoRecordingErrorSubscription; /// The camera flash mode. @visibleForTesting @@ -117,36 +121,34 @@ class Camera { /// The current browser window used to access media devices. @visibleForTesting - html.Window? window = html.window; + web.Window window = web.window; /// The recorder used to record a video from the camera. @visibleForTesting - html.MediaRecorder? mediaRecorder; + web.MediaRecorder? mediaRecorder; /// Whether the video of the given type is supported. @visibleForTesting bool Function(String) isVideoTypeSupported = - html.MediaRecorder.isTypeSupported; + (String type) => web.MediaRecorder.isTypeSupported(type); /// The list of consecutive video data files recorded with [mediaRecorder]. - final List _videoData = []; + final List _videoData = []; /// Completes when the video recording is stopped/finished. Completer? _videoAvailableCompleter; /// A data listener fired when a new part of video data is available. - void Function(html.Event)? _videoDataAvailableListener; + void Function(web.BlobEvent)? _videoDataAvailableListener; /// A listener fired when a video recording is stopped. - void Function(html.Event)? _videoRecordingStoppedListener; + void Function(web.Event)? _videoRecordingStoppedListener; /// A builder to merge a list of blobs into a single blob. @visibleForTesting - // TODO(stuartmorgan): Remove this 'ignore' once we don't analyze using 2.10 - // any more. It's a false positive that is fixed in later versions. - // ignore: prefer_function_declarations_over_variables - html.Blob Function(List blobs, String type) blobBuilder = - (List blobs, String type) => html.Blob(blobs, type); + web.Blob Function(List blobs, String type) blobBuilder = + (List blobs, String type) => + web.Blob(blobs.toJS, web.BlobPropertyBag(type: type)); /// The stream that emits a [VideoRecordedEvent] when a video recording is created. Stream get onVideoRecordedEvent => @@ -166,10 +168,12 @@ class Camera { cameraId: textureId, ); - videoElement = html.VideoElement(); + videoElement = web.HTMLVideoElement(); - divElement = html.DivElement() + divElement = web.HTMLDivElement() ..style.setProperty('object-fit', 'cover') + ..style.setProperty('height', '100%') + ..style.setProperty('width', '100%') ..append(videoElement); ui_web.platformViewRegistry.registerViewFactory( @@ -185,12 +189,14 @@ class Camera { _applyDefaultVideoStyles(videoElement); - final List videoTracks = stream!.getVideoTracks(); + final List videoTracks = + stream!.getVideoTracks().toDart; if (videoTracks.isNotEmpty) { - final html.MediaStreamTrack defaultVideoTrack = videoTracks.first; - - _onEndedSubscription = defaultVideoTrack.onEnded.listen((html.Event _) { + final web.MediaStreamTrack defaultVideoTrack = videoTracks.first; + _onEndedSubscription = EventStreamProviders.endedEvent + .forTarget(defaultVideoTrack) + .listen((web.Event _) { onEndedController.add(defaultVideoTrack); }); } @@ -207,7 +213,7 @@ class Camera { ); videoElement.srcObject = stream; } - await videoElement.play(); + await videoElement.play().toDart; } /// Pauses the camera stream on the current frame. @@ -217,14 +223,15 @@ class Camera { /// Stops the camera stream and resets the camera source. void stop() { - final List videoTracks = stream!.getVideoTracks(); + final List videoTracks = + stream!.getVideoTracks().toDart; if (videoTracks.isNotEmpty) { onEndedController.add(videoTracks.first); } - final List? tracks = stream?.getTracks(); + final List? tracks = stream?.getTracks().toDart; if (tracks != null) { - for (final html.MediaStreamTrack track in tracks) { + for (final web.MediaStreamTrack track in tracks) { track.stop(); } } @@ -246,8 +253,9 @@ class Camera { final int videoWidth = videoElement.videoWidth; final int videoHeight = videoElement.videoHeight; - final html.CanvasElement canvas = - html.CanvasElement(width: videoWidth, height: videoHeight); + final web.HTMLCanvasElement canvas = web.HTMLCanvasElement() + ..width = videoWidth + ..height = videoHeight; final bool isBackCamera = getLensDirection() == CameraLensDirection.back; // Flip the picture horizontally if it is not taken from a back camera. @@ -257,16 +265,28 @@ class Camera { ..scale(-1, 1); } - canvas.context2D - .drawImageScaled(videoElement, 0, 0, videoWidth, videoHeight); + canvas.context2D.drawImageScaled( + videoElement, + 0, + 0, + videoWidth.toDouble(), + videoHeight.toDouble(), + ); - final html.Blob blob = await canvas.toBlob('image/jpeg'); + final Completer blobCompleter = Completer(); + canvas.toBlob( + (web.Blob blob) { + blobCompleter.complete(blob); + }.toJS, + 'image/jpeg', + ); + final web.Blob blob = await blobCompleter.future; if (shouldEnableTorchMode) { _setTorchMode(enabled: false); } - return XFile(html.Url.createObjectUrl(blob)); + return XFile(web.URL.createObjectURL(blob)); } /// Returns a size of the camera video based on its first video track size. @@ -274,25 +294,23 @@ class Camera { /// Returns [Size.zero] if the camera is missing a video track or /// the video track does not include the width or height setting. Size getVideoSize() { - final List videoTracks = - videoElement.srcObject?.getVideoTracks() ?? []; + final List videoTracks = + (videoElement.srcObject as web.MediaStream?)?.getVideoTracks().toDart ?? + []; if (videoTracks.isEmpty) { return Size.zero; } - final html.MediaStreamTrack defaultVideoTrack = videoTracks.first; - final Map defaultVideoTrackSettings = + final web.MediaStreamTrack defaultVideoTrack = videoTracks.first; + + final web.MediaTrackSettings defaultVideoTrackSettings = defaultVideoTrack.getSettings(); - final double? width = defaultVideoTrackSettings['width'] as double?; - final double? height = defaultVideoTrackSettings['height'] as double?; + final int width = defaultVideoTrackSettings.width; + final int height = defaultVideoTrackSettings.height; - if (width != null && height != null) { - return Size(width, height); - } else { - return Size.zero; - } + return Size(width.toDouble(), height.toDouble()); } /// Sets the camera flash mode to [mode] by modifying the camera @@ -307,11 +325,10 @@ class Camera { /// Throws a [CameraWebException] if the torch mode is not supported /// or the camera has not been initialized or started. void setFlashMode(FlashMode mode) { - final html.MediaDevices? mediaDevices = window?.navigator.mediaDevices; - final Map? supportedConstraints = - mediaDevices?.getSupportedConstraints(); - final bool torchModeSupported = - supportedConstraints?[_torchModeKey] as bool? ?? false; + final web.MediaDevices mediaDevices = window.navigator.mediaDevices; + final web.MediaTrackSupportedConstraints supportedConstraints = + mediaDevices.getSupportedConstraints(); + final bool torchModeSupported = supportedConstraints.torchNullable ?? false; if (!torchModeSupported) { throw CameraWebException( @@ -333,23 +350,22 @@ class Camera { /// Throws a [CameraWebException] if the torch mode is not supported /// or the camera has not been initialized or started. void _setTorchMode({required bool enabled}) { - final List videoTracks = - stream?.getVideoTracks() ?? []; + final List videoTracks = + stream?.getVideoTracks().toDart ?? []; if (videoTracks.isNotEmpty) { - final html.MediaStreamTrack defaultVideoTrack = videoTracks.first; - - final bool canEnableTorchMode = - defaultVideoTrack.getCapabilities()[_torchModeKey] as bool? ?? false; + final web.MediaStreamTrack defaultVideoTrack = videoTracks.first; + final bool canEnableTorchMode = defaultVideoTrack + .getCapabilities() + .torchNullable + ?.toDart + .first + .toDart ?? + false; if (canEnableTorchMode) { - defaultVideoTrack.applyConstraints({ - 'advanced': [ - { - _torchModeKey: enabled, - } - ] - }); + defaultVideoTrack.applyWebTweakConstraints( + WebTweakMediaTrackConstraints(torch: enabled.toJS)); } else { throw CameraWebException( textureId, @@ -397,13 +413,8 @@ class Camera { ); } - zoomLevelCapability.videoTrack.applyConstraints({ - 'advanced': [ - { - ZoomLevelCapability.constraintName: zoom, - } - ] - }); + zoomLevelCapability.videoTrack.applyWebTweakConstraints( + WebTweakMediaTrackConstraints(zoom: zoom.toJS)); } /// Returns a lens direction of this camera. @@ -411,21 +422,21 @@ class Camera { /// Returns null if the camera is missing a video track or /// the video track does not include the facing mode setting. CameraLensDirection? getLensDirection() { - final List videoTracks = - videoElement.srcObject?.getVideoTracks() ?? []; + final List videoTracks = + (videoElement.srcObject as web.MediaStream?)?.getVideoTracks().toDart ?? + []; if (videoTracks.isEmpty) { return null; } - final html.MediaStreamTrack defaultVideoTrack = videoTracks.first; - final Map defaultVideoTrackSettings = + final web.MediaStreamTrack defaultVideoTrack = videoTracks.first; + final web.MediaTrackSettings defaultVideoTrackSettings = defaultVideoTrack.getSettings(); - final String? facingMode = - defaultVideoTrackSettings['facingMode'] as String?; + final String? facingMode = defaultVideoTrackSettings.facingModeNullable; - if (facingMode != null) { + if (facingMode != null && facingMode.isNotEmpty) { return _cameraService.mapFacingModeToLensDirection(facingMode); } else { return null; @@ -435,65 +446,65 @@ class Camera { /// Returns the registered view type of the camera. String getViewType() => _getViewType(textureId); - /// Starts a new video recording using [html.MediaRecorder]. + /// Starts a new video recording using [web.MediaRecorder]. /// /// Throws a [CameraWebException] if the browser does not support any of the /// available video mime types from [_videoMimeType]. Future startVideoRecording() async { + final web.MediaRecorderOptions options = + web.MediaRecorderOptions(mimeType: _videoMimeType); + if (recorderOptions.audioBitrate != null) { + options.audioBitsPerSecond = recorderOptions.audioBitrate!; + } + if (recorderOptions.videoBitrate != null) { + options.videoBitsPerSecond = recorderOptions.videoBitrate!; + } + mediaRecorder ??= - html.MediaRecorder(videoElement.srcObject!, { - 'mimeType': _videoMimeType, - if (recorderOptions.audioBitrate != null) - 'audioBitsPerSecond': recorderOptions.audioBitrate!, - if (recorderOptions.videoBitrate != null) - 'videoBitsPerSecond': recorderOptions.videoBitrate!, - }); + web.MediaRecorder(videoElement.srcObject! as web.MediaStream, options); _videoAvailableCompleter = Completer(); _videoDataAvailableListener = - (html.Event event) => _onVideoDataAvailable(event); + (web.BlobEvent event) => _onVideoDataAvailable(event); _videoRecordingStoppedListener = - (html.Event event) => _onVideoRecordingStopped(event); + (web.Event event) => _onVideoRecordingStopped(event); mediaRecorder!.addEventListener( 'dataavailable', - _videoDataAvailableListener, + _videoDataAvailableListener?.toJS, ); mediaRecorder!.addEventListener( 'stop', - _videoRecordingStoppedListener, + _videoRecordingStoppedListener?.toJS, ); - _onVideoRecordingErrorSubscription = - mediaRecorder!.onError.listen((html.Event event) { - final html.ErrorEvent error = event as html.ErrorEvent; + _onVideoRecordingErrorSubscription = mediaRecorderOnErrorProvider + .forTarget(mediaRecorder) + .listen((web.Event event) { + final web.ErrorEvent error = event as web.ErrorEvent; videoRecordingErrorController.add(error); }); mediaRecorder!.start(); } - void _onVideoDataAvailable(html.Event event) { - final html.Blob? blob = (event as html.BlobEvent).data; - + void _onVideoDataAvailable(web.BlobEvent event) { // Append the recorded part of the video to the list of all video data files. - if (blob != null) { - _videoData.add(blob); - } + _videoData.add(event.data); } - Future _onVideoRecordingStopped(html.Event event) async { + Future _onVideoRecordingStopped(web.Event event) async { if (_videoData.isNotEmpty) { // Concatenate all video data files into a single blob. final String videoType = _videoData.first.type; - final html.Blob videoBlob = blobBuilder(_videoData, videoType); + final web.Blob videoBlob = blobBuilder(_videoData, videoType); // Create a file containing the video blob. final XFile file = XFile( - html.Url.createObjectUrl(videoBlob), + web.URL.createObjectURL(videoBlob), mimeType: _videoMimeType, name: videoBlob.hashCode.toString(), ); @@ -509,12 +520,12 @@ class Camera { // Clean up the media recorder with its event listeners and video data. mediaRecorder!.removeEventListener( 'dataavailable', - _videoDataAvailableListener, + _videoDataAvailableListener?.toJS, ); mediaRecorder!.removeEventListener( 'stop', - _videoDataAvailableListener, + _videoDataAvailableListener?.toJS, ); await _onVideoRecordingErrorSubscription?.cancel(); @@ -612,7 +623,7 @@ class Camera { ); /// Applies default styles to the video [element]. - void _applyDefaultVideoStyles(html.VideoElement element) { + void _applyDefaultVideoStyles(web.HTMLVideoElement element) { final bool isBackCamera = getLensDirection() == CameraLensDirection.back; // Flip the video horizontally if it is not taken from a back camera. diff --git a/packages/camera/camera_web/lib/src/camera_service.dart b/packages/camera/camera_web/lib/src/camera_service.dart index 8ac40ff33ee..072fe06859f 100644 --- a/packages/camera/camera_web/lib/src/camera_service.dart +++ b/packages/camera/camera_web/lib/src/camera_service.dart @@ -2,25 +2,24 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import 'dart:html' as html; +import 'dart:js_interop'; import 'package:camera_platform_interface/camera_platform_interface.dart'; import 'package:flutter/foundation.dart'; import 'package:flutter/services.dart'; +import 'package:web/web.dart' as web; import 'camera.dart'; +import 'pkg_web_tweaks.dart'; import 'shims/dart_js_util.dart'; import 'types/types.dart'; /// A service to fetch, map camera settings and /// obtain the camera stream. class CameraService { - // A facing mode constraint name. - static const String _facingModeKey = 'facingMode'; - /// The current browser window used to access media devices. @visibleForTesting - html.Window? window = html.window; + web.Window window = web.window; /// The utility to manipulate JavaScript interop objects. @visibleForTesting @@ -28,25 +27,19 @@ class CameraService { /// Returns a media stream associated with the camera device /// with [cameraId] and constrained by [options]. - Future getMediaStreamForOptions( + Future getMediaStreamForOptions( CameraOptions options, { int cameraId = 0, }) async { - final html.MediaDevices? mediaDevices = window?.navigator.mediaDevices; - - // Throw a not supported exception if the current browser window - // does not support any media devices. - if (mediaDevices == null) { - throw PlatformException( - code: CameraErrorCode.notSupported.toString(), - message: 'The camera is not supported on this device.', - ); - } + final web.MediaDevices mediaDevices = window.navigator.mediaDevices; try { - final Map constraints = options.toJson(); - return await mediaDevices.getUserMedia(constraints); - } on html.DomException catch (e) { + return await mediaDevices + .getUserMedia( + options.toMediaStreamConstraints(), + ) + .toDart; + } on web.DOMException catch (e) { switch (e.name) { case 'NotFoundError': case 'DevicesNotFoundError': @@ -120,12 +113,10 @@ class CameraService { ZoomLevelCapability getZoomLevelCapabilityForCamera( Camera camera, ) { - final html.MediaDevices? mediaDevices = window?.navigator.mediaDevices; - final Map? supportedConstraints = - mediaDevices?.getSupportedConstraints(); - final bool zoomLevelSupported = - supportedConstraints?[ZoomLevelCapability.constraintName] as bool? ?? - false; + final web.MediaDevices mediaDevices = window.navigator.mediaDevices; + final web.MediaTrackSupportedConstraints supportedConstraints = + mediaDevices.getSupportedConstraints(); + final bool zoomLevelSupported = supportedConstraints.zoomNullable ?? false; if (!zoomLevelSupported) { throw CameraWebException( @@ -135,31 +126,21 @@ class CameraService { ); } - final List videoTracks = - camera.stream?.getVideoTracks() ?? []; + final List videoTracks = + camera.stream?.getVideoTracks().toDart ?? []; if (videoTracks.isNotEmpty) { - final html.MediaStreamTrack defaultVideoTrack = videoTracks.first; + final web.MediaStreamTrack defaultVideoTrack = videoTracks.first; /// The zoom level capability is represented by MediaSettingsRange. /// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaSettingsRange - final Object zoomLevelCapability = defaultVideoTrack - .getCapabilities()[ZoomLevelCapability.constraintName] - as Object? ?? - {}; - - // The zoom level capability is a nested JS object, therefore - // we need to access its properties with the js_util library. - // See: https://api.dart.dev/stable/2.13.4/dart-js_util/getProperty.html - final num? minimumZoomLevel = - jsUtil.getProperty(zoomLevelCapability, 'min') as num?; - final num? maximumZoomLevel = - jsUtil.getProperty(zoomLevelCapability, 'max') as num?; - - if (minimumZoomLevel != null && maximumZoomLevel != null) { + final WebTweakMediaSettingsRange? zoomLevelCapability = + defaultVideoTrack.getCapabilities().zoomNullable; + + if (zoomLevelCapability != null) { return ZoomLevelCapability( - minimum: minimumZoomLevel.toDouble(), - maximum: maximumZoomLevel.toDouble(), + minimum: zoomLevelCapability.min, + maximum: zoomLevelCapability.max, videoTrack: defaultVideoTrack, ); } else { @@ -180,26 +161,15 @@ class CameraService { /// Returns a facing mode of the [videoTrack] /// (null if the facing mode is not available). - String? getFacingModeForVideoTrack(html.MediaStreamTrack videoTrack) { - final html.MediaDevices? mediaDevices = window?.navigator.mediaDevices; - - // Throw a not supported exception if the current browser window - // does not support any media devices. - if (mediaDevices == null) { - throw PlatformException( - code: CameraErrorCode.notSupported.toString(), - message: 'The camera is not supported on this device.', - ); - } + String? getFacingModeForVideoTrack(web.MediaStreamTrack videoTrack) { + final web.MediaDevices mediaDevices = window.navigator.mediaDevices; // Check if the camera facing mode is supported by the current browser. - final Map supportedConstraints = + final web.MediaTrackSupportedConstraints supportedConstraints = mediaDevices.getSupportedConstraints(); - final bool facingModeSupported = - supportedConstraints[_facingModeKey] as bool? ?? false; // Return null if the facing mode is not supported. - if (!facingModeSupported) { + if (!supportedConstraints.facingMode) { return null; } @@ -209,10 +179,10 @@ class CameraService { // // MediaTrackSettings: // https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackSettings - final Map videoTrackSettings = videoTrack.getSettings(); - final String? facingMode = videoTrackSettings[_facingModeKey] as String?; + final web.MediaTrackSettings videoTrackSettings = videoTrack.getSettings(); + final String? facingMode = videoTrackSettings.facingModeNullable; - if (facingMode == null) { + if (facingMode == null || facingMode.isEmpty) { // If the facing mode does not exist in the video track settings, // check for the facing mode in the video track capabilities. // @@ -223,20 +193,20 @@ class CameraService { // // The method may not be supported on Firefox. // See: https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/getCapabilities#browser_compatibility - if (!jsUtil.hasProperty(videoTrack, 'getCapabilities')) { + if (!jsUtil.hasProperty(videoTrack, 'getCapabilities'.toJS)) { // Return null if the video track capabilities are not supported. return null; } - final Map videoTrackCapabilities = + final web.MediaTrackCapabilities videoTrackCapabilities = videoTrack.getCapabilities(); // A list of facing mode capabilities as // the camera may support multiple facing modes. - final List facingModeCapabilities = List.from( - (videoTrackCapabilities[_facingModeKey] as List?) - ?.cast() ?? - []); + final List facingModeCapabilities = videoTrackCapabilities + .facingMode.toDart + .map((JSString e) => e.toDart) + .toList(); if (facingModeCapabilities.isNotEmpty) { final String facingModeCapability = facingModeCapabilities.first; diff --git a/packages/camera/camera_web/lib/src/camera_web.dart b/packages/camera/camera_web/lib/src/camera_web.dart index 799d742533a..11d14316971 100644 --- a/packages/camera/camera_web/lib/src/camera_web.dart +++ b/packages/camera/camera_web/lib/src/camera_web.dart @@ -3,7 +3,7 @@ // found in the LICENSE file. import 'dart:async'; -import 'dart:html' as html; +import 'dart:js_interop'; import 'dart:math'; import 'package:camera_platform_interface/camera_platform_interface.dart'; @@ -11,9 +11,11 @@ import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:flutter_web_plugins/flutter_web_plugins.dart'; import 'package:stream_transform/stream_transform.dart'; +import 'package:web/web.dart' as web; import 'camera.dart'; import 'camera_service.dart'; +import 'pkg_web_tweaks.dart'; import 'types/types.dart'; // The default error message, when the error is an empty string. @@ -58,81 +60,86 @@ class CameraPlugin extends CameraPlatform { final StreamController cameraEventStreamController = StreamController.broadcast(); - final Map> - _cameraVideoErrorSubscriptions = >{}; + /// The stream provider for [web.HTMLVideoElement] error events. + @visibleForTesting + web.EventStreamProvider videoElementOnErrorProvider = + web.EventStreamProviders.errorElementEvent; + + final Map> _cameraVideoErrorSubscriptions = + >{}; - final Map> - _cameraVideoAbortSubscriptions = >{}; + /// The stream provider for [web.HTMLVideoElement] abort events. + @visibleForTesting + web.EventStreamProvider videoElementOnAbortProvider = + web.EventStreamProviders.errorElementEvent; - final Map> + final Map> _cameraVideoAbortSubscriptions = + >{}; + + final Map> _cameraEndedSubscriptions = - >{}; + >{}; - final Map> + final Map> _cameraVideoRecordingErrorSubscriptions = - >{}; + >{}; /// Returns a stream of camera events for the given [cameraId]. Stream _cameraEvents(int cameraId) => cameraEventStreamController.stream .where((CameraEvent event) => event.cameraId == cameraId); + /// The stream provider for [web.ScreenOrientation] change events. + @visibleForTesting + web.EventStreamProvider orientationOnChangeProvider = + web.EventStreamProviders.changeEvent; + /// The current browser window used to access media devices. @visibleForTesting - html.Window? window = html.window; + web.Window window = web.window; @override Future> availableCameras() async { try { - final html.MediaDevices? mediaDevices = window?.navigator.mediaDevices; + final web.MediaDevices mediaDevices = window.navigator.mediaDevices; final List cameras = []; - // Throw a not supported exception if the current browser window - // does not support any media devices. - if (mediaDevices == null) { - throw PlatformException( - code: CameraErrorCode.notSupported.toString(), - message: 'The camera is not supported on this device.', - ); - } - // Request video permissions only. - final html.MediaStream cameraStream = + final web.MediaStream cameraStream = await _cameraService.getMediaStreamForOptions(const CameraOptions()); // Release the camera stream used to request video permissions. cameraStream .getVideoTracks() - .forEach((html.MediaStreamTrack videoTrack) => videoTrack.stop()); + .toDart + .forEach((web.MediaStreamTrack videoTrack) => videoTrack.stop()); // Request available media devices. - final List devices = await mediaDevices.enumerateDevices(); + final List devices = + (await mediaDevices.enumerateDevices().toDart).toDart; // Filter video input devices. - final Iterable videoInputDevices = devices - .whereType() - .where((html.MediaDeviceInfo device) => - device.kind == MediaDeviceKind.videoInput) + final Iterable videoInputDevices = devices + .where( + (web.MediaDeviceInfo device) => + device.kind == MediaDeviceKind.videoInput, + ) /// The device id property is currently not supported on Internet Explorer: /// https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo/deviceId#browser_compatibility - .where( - (html.MediaDeviceInfo device) => - device.deviceId != null && device.deviceId!.isNotEmpty, - ); + .where((web.MediaDeviceInfo device) => device.deviceId.isNotEmpty); // Map video input devices to camera descriptions. - for (final html.MediaDeviceInfo videoInputDevice in videoInputDevices) { + for (final web.MediaDeviceInfo videoInputDevice in videoInputDevices) { // Get the video stream for the current video input device // to later use for the available video tracks. - final html.MediaStream videoStream = await _getVideoStreamForDevice( - videoInputDevice.deviceId!, - ); + final web.MediaStream videoStream = + await _getVideoStreamForDevice(videoInputDevice.deviceId); // Get all video tracks in the video stream // to later extract the lens direction from the first track. - final List videoTracks = - videoStream.getVideoTracks(); + final List videoTracks = + videoStream.getVideoTracks().toDart; if (videoTracks.isNotEmpty) { // Get the facing mode from the first available video track. @@ -155,15 +162,14 @@ class CameraPlugin extends CameraPlatform { // https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo/label // // Sensor orientation is currently not supported. - final String cameraLabel = videoInputDevice.label ?? ''; final CameraDescription camera = CameraDescription( - name: cameraLabel, + name: videoInputDevice.label, lensDirection: lensDirection, sensorOrientation: 0, ); final CameraMetadata cameraMetadata = CameraMetadata( - deviceId: videoInputDevice.deviceId!, + deviceId: videoInputDevice.deviceId, facingMode: facingMode, ); @@ -172,7 +178,7 @@ class CameraPlugin extends CameraPlatform { camerasMetadata[camera] = cameraMetadata; // Release the camera stream of the current video input device. - for (final html.MediaStreamTrack videoTrack in videoTracks) { + for (final web.MediaStreamTrack videoTrack in videoTracks) { videoTrack.stop(); } } else { @@ -182,7 +188,7 @@ class CameraPlugin extends CameraPlatform { } return cameras; - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw CameraException(e.name, e.message); } on PlatformException catch (e) { throw CameraException(e.code, e.message); @@ -278,14 +284,15 @@ class CameraPlugin extends CameraPlatform { // Add camera's video error events to the camera events stream. // The error event fires when the video element's source has failed to load, or can't be used. - _cameraVideoErrorSubscriptions[cameraId] = - camera.videoElement.onError.listen((html.Event _) { + _cameraVideoErrorSubscriptions[cameraId] = videoElementOnErrorProvider + .forElement(camera.videoElement) + .listen((web.Event _) { // The Event itself (_) doesn't contain information about the actual error. // We need to look at the HTMLMediaElement.error. // See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/error - final html.MediaError error = camera.videoElement.error!; + final web.MediaError error = camera.videoElement.error!; final CameraErrorCode errorCode = CameraErrorCode.fromMediaError(error); - final String? errorMessage = + final String errorMessage = error.message != '' ? error.message : _kDefaultErrorMessage; cameraEventStreamController.add( @@ -298,8 +305,9 @@ class CameraPlugin extends CameraPlatform { // Add camera's video abort events to the camera events stream. // The abort event fires when the video element's source has not fully loaded. - _cameraVideoAbortSubscriptions[cameraId] = - camera.videoElement.onAbort.listen((html.Event _) { + _cameraVideoAbortSubscriptions[cameraId] = videoElementOnAbortProvider + .forElement(camera.videoElement) + .listen((web.Event _) { cameraEventStreamController.add( CameraErrorEvent( cameraId, @@ -313,7 +321,7 @@ class CameraPlugin extends CameraPlatform { // Add camera's closing events to the camera events stream. // The onEnded stream fires when there is no more camera stream data. _cameraEndedSubscriptions[cameraId] = - camera.onEnded.listen((html.MediaStreamTrack _) { + camera.onEnded.listen((web.MediaStreamTrack _) { cameraEventStreamController.add( CameraClosingEvent(cameraId), ); @@ -334,7 +342,7 @@ class CameraPlugin extends CameraPlatform { false, ), ); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } on CameraWebException catch (e) { _addCameraErrorEvent(e); @@ -374,23 +382,22 @@ class CameraPlugin extends CameraPlatform { @override Stream onDeviceOrientationChanged() { - final html.ScreenOrientation? orientation = window?.screen?.orientation; - - if (orientation != null) { - // Create an initial orientation event that emits the device orientation - // as soon as subscribed to this stream. - final html.Event initialOrientationEvent = html.Event('change'); - - return orientation.onChange.startWith(initialOrientationEvent).map( - (html.Event _) { - final DeviceOrientation deviceOrientation = _cameraService - .mapOrientationTypeToDeviceOrientation(orientation.type!); - return DeviceOrientationChangedEvent(deviceOrientation); - }, - ); - } else { - return const Stream.empty(); - } + final web.ScreenOrientation orientation = window.screen.orientation; + + // Create an initial orientation event that emits the device orientation + // as soon as subscribed to this stream. + final web.Event initialOrientationEvent = web.Event('change'); + + return orientationOnChangeProvider + .forTarget(orientation) + .startWith(initialOrientationEvent) + .map( + (web.Event _) { + final DeviceOrientation deviceOrientation = _cameraService + .mapOrientationTypeToDeviceOrientation(orientation.type); + return DeviceOrientationChangedEvent(deviceOrientation); + }, + ); } @override @@ -399,11 +406,10 @@ class CameraPlugin extends CameraPlatform { DeviceOrientation orientation, ) async { try { - final html.ScreenOrientation? screenOrientation = - window?.screen?.orientation; - final html.Element? documentElement = window?.document.documentElement; + final web.ScreenOrientation screenOrientation = window.screen.orientation; + final web.Element? documentElement = window.document.documentElement; - if (screenOrientation != null && documentElement != null) { + if (documentElement != null) { final String orientationType = _cameraService.mapDeviceOrientationToOrientationType(orientation); @@ -411,16 +417,16 @@ class CameraPlugin extends CameraPlatform { // See: https://w3c.github.io/screen-orientation/#interaction-with-fullscreen-api // Recent versions of Dart changed requestFullscreen to return a Future instead of void. // This wrapper allows use of both the old and new APIs. - dynamic fullScreen() => documentElement.requestFullscreen(); + dynamic fullScreen() => documentElement.requestFullScreenTweak(); await fullScreen(); - await screenOrientation.lock(orientationType); + await screenOrientation.lock(orientationType).toDart; } else { throw PlatformException( code: CameraErrorCode.orientationNotSupported.toString(), message: 'Orientation is not supported in the current browser.', ); } - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } } @@ -428,10 +434,10 @@ class CameraPlugin extends CameraPlatform { @override Future unlockCaptureOrientation(int cameraId) async { try { - final html.ScreenOrientation? orientation = window?.screen?.orientation; - final html.Element? documentElement = window?.document.documentElement; + final web.ScreenOrientation orientation = window.screen.orientation; + final web.Element? documentElement = window.document.documentElement; - if (orientation != null && documentElement != null) { + if (documentElement != null) { orientation.unlock(); } else { throw PlatformException( @@ -439,7 +445,7 @@ class CameraPlugin extends CameraPlatform { message: 'Orientation is not supported in the current browser.', ); } - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } } @@ -448,7 +454,7 @@ class CameraPlugin extends CameraPlatform { Future takePicture(int cameraId) { try { return getCamera(cameraId).takePicture(); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } on CameraWebException catch (e) { _addCameraErrorEvent(e); @@ -480,7 +486,7 @@ class CameraPlugin extends CameraPlatform { // The error event fires when the video recording is not allowed or an unsupported // codec is used. _cameraVideoRecordingErrorSubscriptions[options.cameraId] = - camera.onVideoRecordingError.listen((html.ErrorEvent errorEvent) { + camera.onVideoRecordingError.listen((web.ErrorEvent errorEvent) { cameraEventStreamController.add( CameraErrorEvent( options.cameraId, @@ -490,7 +496,7 @@ class CameraPlugin extends CameraPlatform { }); return camera.startVideoRecording(); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } on CameraWebException catch (e) { _addCameraErrorEvent(e); @@ -505,7 +511,7 @@ class CameraPlugin extends CameraPlatform { await getCamera(cameraId).stopVideoRecording(); await _cameraVideoRecordingErrorSubscriptions[cameraId]?.cancel(); return videoRecording; - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } on CameraWebException catch (e) { _addCameraErrorEvent(e); @@ -517,7 +523,7 @@ class CameraPlugin extends CameraPlatform { Future pauseVideoRecording(int cameraId) { try { return getCamera(cameraId).pauseVideoRecording(); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } on CameraWebException catch (e) { _addCameraErrorEvent(e); @@ -529,7 +535,7 @@ class CameraPlugin extends CameraPlatform { Future resumeVideoRecording(int cameraId) { try { return getCamera(cameraId).resumeVideoRecording(); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } on CameraWebException catch (e) { _addCameraErrorEvent(e); @@ -541,7 +547,7 @@ class CameraPlugin extends CameraPlatform { Future setFlashMode(int cameraId, FlashMode mode) async { try { getCamera(cameraId).setFlashMode(mode); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } on CameraWebException catch (e) { _addCameraErrorEvent(e); @@ -593,7 +599,7 @@ class CameraPlugin extends CameraPlatform { Future getMaxZoomLevel(int cameraId) async { try { return getCamera(cameraId).getMaxZoomLevel(); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } on CameraWebException catch (e) { _addCameraErrorEvent(e); @@ -605,7 +611,7 @@ class CameraPlugin extends CameraPlatform { Future getMinZoomLevel(int cameraId) async { try { return getCamera(cameraId).getMinZoomLevel(); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } on CameraWebException catch (e) { _addCameraErrorEvent(e); @@ -617,7 +623,7 @@ class CameraPlugin extends CameraPlatform { Future setZoomLevel(int cameraId, double zoom) async { try { getCamera(cameraId).setZoomLevel(zoom); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw CameraException(e.name, e.message); } on PlatformException catch (e) { throw CameraException(e.code, e.message); @@ -631,7 +637,7 @@ class CameraPlugin extends CameraPlatform { Future pausePreview(int cameraId) async { try { getCamera(cameraId).pause(); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } } @@ -640,7 +646,7 @@ class CameraPlugin extends CameraPlatform { Future resumePreview(int cameraId) async { try { await getCamera(cameraId).play(); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } on CameraWebException catch (e) { _addCameraErrorEvent(e); @@ -661,6 +667,7 @@ class CameraPlugin extends CameraPlatform { await getCamera(cameraId).dispose(); await _cameraVideoErrorSubscriptions[cameraId]?.cancel(); await _cameraVideoAbortSubscriptions[cameraId]?.cancel(); + await _cameraEndedSubscriptions[cameraId]?.cancel(); await _cameraVideoRecordingErrorSubscriptions[cameraId]?.cancel(); @@ -668,13 +675,13 @@ class CameraPlugin extends CameraPlatform { _cameraVideoErrorSubscriptions.remove(cameraId); _cameraVideoAbortSubscriptions.remove(cameraId); _cameraEndedSubscriptions.remove(cameraId); - } on html.DomException catch (e) { + } on web.DOMException catch (e) { throw PlatformException(code: e.name, message: e.message); } } /// Returns a media video stream for the device with the given [deviceId]. - Future _getVideoStreamForDevice( + Future _getVideoStreamForDevice( String deviceId, ) { // Create camera options with the desired device id. diff --git a/packages/camera/camera_web/lib/src/pkg_web_tweaks.dart b/packages/camera/camera_web/lib/src/pkg_web_tweaks.dart new file mode 100644 index 00000000000..fb0e84ef377 --- /dev/null +++ b/packages/camera/camera_web/lib/src/pkg_web_tweaks.dart @@ -0,0 +1,74 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// ignore_for_file: public_member_api_docs + +import 'dart:js_interop'; + +import 'package:web/web.dart'; + +/// Adds missing fields to [Element]. +extension FullScreenSupportMethods on Element { + @JS('requestFullscreen') + external JSPromise requestFullScreenTweak([JSAny options]); +} + +/// Adds missing fields to [MediaTrackSupportedConstraints]. +extension NonStandardFieldsOnMediaTrackSupportedConstraints + on MediaTrackSupportedConstraints { + @JS('zoom') + external bool? get zoomNullable; + + @JS('torch') + external bool? get torchNullable; +} + +/// Adds missing fields to [MediaTrackCapabilities]. +extension NonStandardFieldsOnMediaTrackCapabilities on MediaTrackCapabilities { + @JS('zoom') + external WebTweakMediaSettingsRange? get zoomNullable; + + @JS('torch') + external JSArray? get torchNullable; +} + +/// Adds missing fields to [MediaTrackSettings] +extension NonStandardFieldsOnMediaTrackSettings on MediaTrackSettings { + @JS('facingMode') + external String? get facingModeNullable; +} + +/// Brought over from package:web 1.0.0 +extension type WebTweakMediaSettingsRange._(JSObject _) implements JSObject { + @JS('MediaSettingsRange') + external factory WebTweakMediaSettingsRange({ + num max, + num min, + num step, + }); + + external double get max; + external set max(num value); + external double get min; + external set min(num value); + external double get step; + external set step(num value); +} + +/// Adds an applyConstraints method that accepts the WebTweakMediaTrackConstraints. +extension WebTweakMethodVersions on MediaStreamTrack { + @JS('applyConstraints') + external JSPromise applyWebTweakConstraints( + [WebTweakMediaTrackConstraints constraints]); +} + +/// Allows creating the MediaTrackConstraints that are needed. +/// Brought over from package:web 1.0.0 +extension type WebTweakMediaTrackConstraints._(JSObject _) implements JSObject { + @JS('MediaTrackConstraints') + external factory WebTweakMediaTrackConstraints({ + JSAny zoom, + ConstrainBoolean torch, + }); +} diff --git a/packages/camera/camera_web/lib/src/shims/dart_js_util.dart b/packages/camera/camera_web/lib/src/shims/dart_js_util.dart index 7d766e8c269..50cda211725 100644 --- a/packages/camera/camera_web/lib/src/shims/dart_js_util.dart +++ b/packages/camera/camera_web/lib/src/shims/dart_js_util.dart @@ -2,14 +2,14 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import 'dart:js_util' as js_util; +import 'dart:js_interop'; +import 'dart:js_interop_unsafe'; -/// A utility that shims dart:js_util to manipulate JavaScript interop objects. +/// A utility that shims dart:js_interop to manipulate JavaScript interop objects. class JsUtil { /// Returns true if the object [o] has the property [name]. - bool hasProperty(Object o, Object name) => js_util.hasProperty(o, name); + bool hasProperty(JSObject o, JSAny name) => o.hasProperty(name).toDart; /// Returns the value of the property [name] in the object [o]. - dynamic getProperty(Object o, Object name) => - js_util.getProperty(o, name); + JSAny? getProperty(JSObject o, JSAny name) => o.getProperty(name); } diff --git a/packages/camera/camera_web/lib/src/types/camera_error_code.dart b/packages/camera/camera_web/lib/src/types/camera_error_code.dart index 8f1831f79cf..6619e5fb388 100644 --- a/packages/camera/camera_web/lib/src/types/camera_error_code.dart +++ b/packages/camera/camera_web/lib/src/types/camera_error_code.dart @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import 'dart:html' as html; +import 'package:web/web.dart' as web; /// Error codes that may occur during the camera initialization, /// configuration or video streaming. @@ -78,18 +78,17 @@ class CameraErrorCode { /// Returns a camera error code based on the media error. /// /// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code - static CameraErrorCode fromMediaError(html.MediaError error) { - switch (error.code) { - case html.MediaError.MEDIA_ERR_ABORTED: - return const CameraErrorCode._('mediaErrorAborted'); - case html.MediaError.MEDIA_ERR_NETWORK: - return const CameraErrorCode._('mediaErrorNetwork'); - case html.MediaError.MEDIA_ERR_DECODE: - return const CameraErrorCode._('mediaErrorDecode'); - case html.MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED: - return const CameraErrorCode._('mediaErrorSourceNotSupported'); - default: - return const CameraErrorCode._('mediaErrorUnknown'); + static CameraErrorCode fromMediaError(web.MediaError error) { + if (error.code == web.MediaError.MEDIA_ERR_ABORTED) { + return const CameraErrorCode._('mediaErrorAborted'); + } else if (error.code == web.MediaError.MEDIA_ERR_NETWORK) { + return const CameraErrorCode._('mediaErrorNetwork'); + } else if (error.code == web.MediaError.MEDIA_ERR_DECODE) { + return const CameraErrorCode._('mediaErrorDecode'); + } else if (error.code == web.MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED) { + return const CameraErrorCode._('mediaErrorSourceNotSupported'); + } else { + return const CameraErrorCode._('mediaErrorUnknown'); } } } diff --git a/packages/camera/camera_web/lib/src/types/camera_options.dart b/packages/camera/camera_web/lib/src/types/camera_options.dart index ecb729d7454..45b446215a8 100644 --- a/packages/camera/camera_web/lib/src/types/camera_options.dart +++ b/packages/camera/camera_web/lib/src/types/camera_options.dart @@ -2,7 +2,10 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +import 'dart:js_interop'; + import 'package:flutter/foundation.dart'; +import 'package:web/web.dart' as web; /// Options used to create a camera with the given /// [audio] and [video] media constraints. @@ -28,12 +31,12 @@ class CameraOptions { /// The video constraints for the camera. final VideoConstraints video; - /// Converts the current instance to a Map. - Map toJson() { - return { - 'audio': audio.toJson(), - 'video': video.toJson(), - }; + /// Converts `this` to something that can be used by the browser. + web.MediaStreamConstraints toMediaStreamConstraints() { + return web.MediaStreamConstraints( + audio: audio.toMediaStreamConstraints(), + video: video.toMediaStreamConstraints(), + ); } @override @@ -63,8 +66,8 @@ class AudioConstraints { /// Whether the audio track should be enabled. final bool enabled; - /// Converts the current instance to a Map. - Object toJson() => enabled; + /// Convert `this` to something that can be used on the browser. + JSAny toMediaStreamConstraints() => enabled.toJS; @override bool operator ==(Object other) { @@ -104,24 +107,15 @@ class VideoConstraints { /// The device id of the video track. final String? deviceId; - /// Converts the current instance to a Map. - Object toJson() { - final Map json = {}; - - if (width != null) { - json['width'] = width!.toJson(); - } - if (height != null) { - json['height'] = height!.toJson(); - } - if (facingMode != null) { - json['facingMode'] = facingMode!.toJson(); - } - if (deviceId != null) { - json['deviceId'] = {'exact': deviceId!}; - } - - return json; + // TODO(dit): package:web has a class for this. Use it instead of jsify and toJson. + /// Convert `this` to something that can be used on the browser. + JSAny toMediaStreamConstraints() { + return { + if (width != null) 'width': width!.toJson(), + if (height != null) 'height': height!.toJson(), + if (facingMode != null) 'facingMode': facingMode!.toJson(), + if (deviceId != null) 'deviceId': {'exact': deviceId!}, + }.jsify()!; } @override @@ -162,6 +156,7 @@ enum CameraType { String toString() => _type; } +// TODO(dit): package:web has a class for this. Use it instead of toJson. /// Indicates the direction in which the desired camera should be pointing. @immutable class FacingModeConstraint { @@ -191,6 +186,7 @@ class FacingModeConstraint { /// the desired facing [type] to be considered acceptable. final CameraType? exact; + // TODO(dit): package:web has a class for this. Use it instead of toJson. /// Converts the current instance to a Map. Object toJson() { return { @@ -214,6 +210,7 @@ class FacingModeConstraint { int get hashCode => Object.hash(ideal, exact); } +// TODO(dit): package:web has a class for this. Use it instead of toJson. /// The size of the requested video track used in /// [VideoConstraints.width] and [VideoConstraints.height]. /// @@ -240,6 +237,7 @@ class VideoSizeConstraint { /// The maximum video size. final int? maximum; + // TODO(dit): package:web has a class for this. Use it instead of toJson. /// Converts the current instance to a Map. Object toJson() { final Map json = {}; diff --git a/packages/camera/camera_web/lib/src/types/zoom_level_capability.dart b/packages/camera/camera_web/lib/src/types/zoom_level_capability.dart index d20bd25108b..71e250b3a8a 100644 --- a/packages/camera/camera_web/lib/src/types/zoom_level_capability.dart +++ b/packages/camera/camera_web/lib/src/types/zoom_level_capability.dart @@ -2,9 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import 'dart:html' as html; - import 'package:flutter/foundation.dart'; +import 'package:web/web.dart' as web; /// The possible range of values for the zoom level configurable /// on the camera video track. @@ -30,7 +29,7 @@ class ZoomLevelCapability { final double maximum; /// The video track capable of configuring the zoom level. - final html.MediaStreamTrack videoTrack; + final web.MediaStreamTrack videoTrack; @override bool operator ==(Object other) { diff --git a/packages/camera/camera_web/pubspec.yaml b/packages/camera/camera_web/pubspec.yaml index e33a2e0fd28..f439c625036 100644 --- a/packages/camera/camera_web/pubspec.yaml +++ b/packages/camera/camera_web/pubspec.yaml @@ -2,11 +2,11 @@ name: camera_web description: A Flutter plugin for getting information about and controlling the camera on Web. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_web issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.3.4 +version: 0.3.5 environment: - sdk: ^3.2.0 - flutter: ">=3.16.0" + sdk: ^3.3.0 + flutter: ">=3.19.0" flutter: plugin: @@ -23,6 +23,7 @@ dependencies: flutter_web_plugins: sdk: flutter stream_transform: ^2.0.0 + web: ">=0.5.1 <2.0.0" dev_dependencies: flutter_test: diff --git a/script/configs/exclude_all_packages_app_wasm.yaml b/script/configs/exclude_all_packages_app_wasm.yaml index cad06d3043a..8a02d4e4799 100644 --- a/script/configs/exclude_all_packages_app_wasm.yaml +++ b/script/configs/exclude_all_packages_app_wasm.yaml @@ -5,7 +5,4 @@ # This is only used for wasm compilation. Once all packages in the repo have # been migrated, remove this file and use `exclude_all_packages_app.yaml` only. -# Packages that aren't migrated yet. -# https://github.com/flutter/flutter/issues/117022 -- camera - +[] # Needed so the contents of this file are an empty array, not `null`!