diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md
index 53a6c4ca1ad..d018f494199 100644
--- a/packages/camera/camera/CHANGELOG.md
+++ b/packages/camera/camera/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.11.0+2
+
+* Updates minimum supported SDK version to Flutter 3.19/Dart 3.3.
+
## 0.11.0+1
* Updates minimum supported SDK version to Flutter 3.16/Dart 3.2.
diff --git a/packages/camera/camera/example/pubspec.yaml b/packages/camera/camera/example/pubspec.yaml
index f3b240b56c2..54f2cebd4fe 100644
--- a/packages/camera/camera/example/pubspec.yaml
+++ b/packages/camera/camera/example/pubspec.yaml
@@ -28,5 +28,9 @@ dev_dependencies:
integration_test:
sdk: flutter
+dependency_overrides:
+ camera_web:
+ path: ../../camera_web
+
flutter:
uses-material-design: true
diff --git a/packages/camera/camera/example/web/index.html b/packages/camera/camera/example/web/index.html
index 91502587eda..0dd4a04db41 100644
--- a/packages/camera/camera/example/web/index.html
+++ b/packages/camera/camera/example/web/index.html
@@ -3,8 +3,9 @@
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file. -->
-
+
+
@@ -16,14 +17,12 @@
-
+
Camera Web Example
-
-
diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml
index f00a7e798f0..931f091bbdc 100644
--- a/packages/camera/camera/pubspec.yaml
+++ b/packages/camera/camera/pubspec.yaml
@@ -4,11 +4,11 @@ description: A Flutter plugin for controlling the camera. Supports previewing
Dart.
repository: https://github.com/flutter/packages/tree/main/packages/camera/camera
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
-version: 0.11.0+1
+version: 0.11.0+2
environment:
- sdk: ^3.2.3
- flutter: ">=3.16.6"
+ sdk: ^3.3.0
+ flutter: ">=3.19.0"
flutter:
plugin:
diff --git a/packages/camera/camera_web/CHANGELOG.md b/packages/camera/camera_web/CHANGELOG.md
index c9e8e661da4..d508877041c 100644
--- a/packages/camera/camera_web/CHANGELOG.md
+++ b/packages/camera/camera_web/CHANGELOG.md
@@ -1,3 +1,8 @@
+## 0.3.5
+
+* Migrates to package:web to support WASM
+* Updates minimum supported SDK version to Flutter 3.19/Dart 3.3.
+
## 0.3.4
* Removes `maxVideoDuration`/`maxDuration`, as the feature was never exposed at
diff --git a/packages/camera/camera_web/example/integration_test/camera_bitrate_test.dart b/packages/camera/camera_web/example/integration_test/camera_bitrate_test.dart
index 3bf946029c2..3e2c9bd40c9 100644
--- a/packages/camera/camera_web/example/integration_test/camera_bitrate_test.dart
+++ b/packages/camera/camera_web/example/integration_test/camera_bitrate_test.dart
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-import 'dart:html';
+import 'dart:js_interop';
import 'dart:math';
import 'dart:ui';
@@ -13,6 +13,7 @@ import 'package:camera_web/src/types/types.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:integration_test/integration_test.dart';
import 'package:mocktail/mocktail.dart';
+import 'package:web/web.dart';
import 'helpers/helpers.dart';
@@ -22,7 +23,7 @@ void main() {
const Size videoSize = Size(320, 240);
/// Draw some seconds of random video frames on canvas in realtime.
- Future simulateCamera(CanvasElement canvasElement) async {
+ Future simulateCamera(HTMLCanvasElement canvasElement) async {
const int fps = 15;
const int seconds = 3;
const int frameDuration = 1000 ~/ fps;
@@ -34,8 +35,10 @@ void main() {
final int h = videoSize.height ~/ 20;
for (int y = 0; y < videoSize.height; y += h) {
for (int x = 0; x < videoSize.width; x += w) {
- canvasElement.context2D.setFillColorRgb(
- random.nextInt(255), random.nextInt(255), random.nextInt(255));
+ final int r = random.nextInt(255);
+ final int g = random.nextInt(255);
+ final int b = random.nextInt(255);
+ canvasElement.context2D.fillStyle = 'rgba($r, $g, $b, 1)'.toJS;
canvasElement.context2D.fillRect(x, y, w, h);
}
}
@@ -53,19 +56,25 @@ void main() {
bool isVideoTypeSupported(String type) => type == supportedVideoType;
Future recordVideo(int videoBitrate) async {
- final Window window = MockWindow();
- final Navigator navigator = MockNavigator();
- final MediaDevices mediaDevices = MockMediaDevices();
+ final MockWindow mockWindow = MockWindow();
+ final MockNavigator mockNavigator = MockNavigator();
+ final MockMediaDevices mockMediaDevices = MockMediaDevices();
- when(() => window.navigator).thenReturn(navigator);
- when(() => navigator.mediaDevices).thenReturn(mediaDevices);
+ final Window window = createJSInteropWrapper(mockWindow) as Window;
+ final Navigator navigator =
+ createJSInteropWrapper(mockNavigator) as Navigator;
+ final MediaDevices mediaDevices =
+ createJSInteropWrapper(mockMediaDevices) as MediaDevices;
- final CanvasElement canvasElement = CanvasElement(
- width: videoSize.width.toInt(),
- height: videoSize.height.toInt(),
- )..context2D.clearRect(0, 0, videoSize.width, videoSize.height);
+ mockWindow.navigator = navigator;
+ mockNavigator.mediaDevices = mediaDevices;
- final VideoElement videoElement = VideoElement();
+ final HTMLCanvasElement canvasElement = HTMLCanvasElement()
+ ..width = videoSize.width.toInt()
+ ..height = videoSize.height.toInt()
+ ..context2D.clearRect(0, 0, videoSize.width, videoSize.height);
+
+ final HTMLVideoElement videoElement = HTMLVideoElement();
final MockCameraService cameraService = MockCameraService();
diff --git a/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart b/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart
index 6bd86b0fe8e..4adb5cc4046 100644
--- a/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart
+++ b/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart
@@ -2,12 +2,13 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-import 'dart:html';
-
// ignore: implementation_imports
+import 'dart:js_interop';
+
import 'package:camera_web/src/types/types.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:integration_test/integration_test.dart';
+import 'package:web/web.dart';
import 'helpers/helpers.dart';
@@ -132,7 +133,8 @@ void main() {
testWidgets('with aborted error code', (WidgetTester tester) async {
expect(
CameraErrorCode.fromMediaError(
- FakeMediaError(MediaError.MEDIA_ERR_ABORTED),
+ createJSInteropWrapper(
+ FakeMediaError(MediaError.MEDIA_ERR_ABORTED)) as MediaError,
).toString(),
equals('mediaErrorAborted'),
);
@@ -141,7 +143,8 @@ void main() {
testWidgets('with network error code', (WidgetTester tester) async {
expect(
CameraErrorCode.fromMediaError(
- FakeMediaError(MediaError.MEDIA_ERR_NETWORK),
+ createJSInteropWrapper(
+ FakeMediaError(MediaError.MEDIA_ERR_NETWORK)) as MediaError,
).toString(),
equals('mediaErrorNetwork'),
);
@@ -150,7 +153,8 @@ void main() {
testWidgets('with decode error code', (WidgetTester tester) async {
expect(
CameraErrorCode.fromMediaError(
- FakeMediaError(MediaError.MEDIA_ERR_DECODE),
+ createJSInteropWrapper(
+ FakeMediaError(MediaError.MEDIA_ERR_DECODE)) as MediaError,
).toString(),
equals('mediaErrorDecode'),
);
@@ -160,7 +164,9 @@ void main() {
(WidgetTester tester) async {
expect(
CameraErrorCode.fromMediaError(
- FakeMediaError(MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED),
+ createJSInteropWrapper(
+ FakeMediaError(MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED))
+ as MediaError,
).toString(),
equals('mediaErrorSourceNotSupported'),
);
@@ -169,7 +175,7 @@ void main() {
testWidgets('with unknown error code', (WidgetTester tester) async {
expect(
CameraErrorCode.fromMediaError(
- FakeMediaError(5),
+ createJSInteropWrapper(FakeMediaError(5)) as MediaError,
).toString(),
equals('mediaErrorUnknown'),
);
diff --git a/packages/camera/camera_web/example/integration_test/camera_options_test.dart b/packages/camera/camera_web/example/integration_test/camera_options_test.dart
index 7dd25e37556..66f4a6bd1eb 100644
--- a/packages/camera/camera_web/example/integration_test/camera_options_test.dart
+++ b/packages/camera/camera_web/example/integration_test/camera_options_test.dart
@@ -3,6 +3,8 @@
// found in the LICENSE file.
// ignore: implementation_imports
+import 'dart:js_interop';
+
import 'package:camera_web/src/types/types.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:integration_test/integration_test.dart';
@@ -20,10 +22,10 @@ void main() {
);
expect(
- cameraOptions.toJson(),
+ cameraOptions.toMediaStreamConstraints().dartify(),
equals({
- 'audio': cameraOptions.audio.toJson(),
- 'video': cameraOptions.video.toJson(),
+ 'audio': cameraOptions.audio.toMediaStreamConstraints().dartify()!,
+ 'video': cameraOptions.video.toMediaStreamConstraints().dartify()!,
}),
);
});
@@ -61,8 +63,8 @@ void main() {
group('AudioConstraints', () {
testWidgets('serializes correctly', (WidgetTester tester) async {
expect(
- const AudioConstraints(enabled: true).toJson(),
- equals(true),
+ const AudioConstraints(enabled: true).toMediaStreamConstraints(),
+ true.toJS,
);
});
@@ -84,7 +86,7 @@ void main() {
);
expect(
- videoConstraints.toJson(),
+ videoConstraints.toMediaStreamConstraints().dartify(),
equals({
'facingMode': videoConstraints.facingMode!.toJson(),
'width': videoConstraints.width!.toJson(),
diff --git a/packages/camera/camera_web/example/integration_test/camera_service_test.dart b/packages/camera/camera_web/example/integration_test/camera_service_test.dart
index fb2279a0942..2ed0c54e633 100644
--- a/packages/camera/camera_web/example/integration_test/camera_service_test.dart
+++ b/packages/camera/camera_web/example/integration_test/camera_service_test.dart
@@ -2,8 +2,10 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-import 'dart:html';
-import 'dart:js_util' as js_util;
+// ignore_for_file: only_throw_errors
+
+import 'dart:js_interop';
+import 'dart:js_interop_unsafe';
import 'package:camera_platform_interface/camera_platform_interface.dart';
// ignore_for_file: implementation_imports
@@ -15,6 +17,7 @@ import 'package:flutter/services.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:integration_test/integration_test.dart';
import 'package:mocktail/mocktail.dart';
+import 'package:web/web.dart' as web;
import 'helpers/helpers.dart';
@@ -24,27 +27,40 @@ void main() {
group('CameraService', () {
const int cameraId = 1;
- late Window window;
- late Navigator navigator;
- late MediaDevices mediaDevices;
- late CameraService cameraService;
+ late MockWindow mockWindow;
+ late MockNavigator mockNavigator;
+ late MockMediaDevices mockMediaDevices;
+
+ late web.Window window;
+ late web.Navigator navigator;
+ late web.MediaDevices mediaDevices;
+
late JsUtil jsUtil;
+ late CameraService cameraService;
+
setUp(() async {
- window = MockWindow();
- navigator = MockNavigator();
- mediaDevices = MockMediaDevices();
+ mockWindow = MockWindow();
+ mockNavigator = MockNavigator();
+ mockMediaDevices = MockMediaDevices();
+
+ window = createJSInteropWrapper(mockWindow) as web.Window;
+ navigator = createJSInteropWrapper(mockNavigator) as web.Navigator;
+ mediaDevices =
+ createJSInteropWrapper(mockMediaDevices) as web.MediaDevices;
+
+ mockWindow.navigator = navigator;
+ mockNavigator.mediaDevices = mediaDevices;
+
jsUtil = MockJsUtil();
- when(() => window.navigator).thenReturn(navigator);
- when(() => navigator.mediaDevices).thenReturn(mediaDevices);
+ registerFallbackValue(createJSInteropWrapper(MockWindow()));
// Mock JsUtil to return the real getProperty from dart:js_util.
when(() => jsUtil.getProperty(any(), any())).thenAnswer(
- (Invocation invocation) => js_util.getProperty(
- invocation.positionalArguments[0] as Object,
- invocation.positionalArguments[1] as Object,
- ),
+ (Invocation invocation) =>
+ (invocation.positionalArguments[0] as JSObject)
+ .getProperty(invocation.positionalArguments[1] as JSAny),
);
cameraService = CameraService()..window = window;
@@ -54,8 +70,15 @@ void main() {
testWidgets(
'calls MediaDevices.getUserMedia '
'with provided options', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenAnswer((_) async => FakeMediaStream([]));
+ late final web.MediaStreamConstraints? capturedConstraints;
+ mockMediaDevices.getUserMedia =
+ ([web.MediaStreamConstraints? constraints]) {
+ capturedConstraints = constraints;
+ final web.MediaStream stream =
+ createJSInteropWrapper(FakeMediaStream([]))
+ as web.MediaStream;
+ return Future.value(stream).toJS;
+ }.toJS;
final CameraOptions options = CameraOptions(
video: VideoConstraints(
@@ -66,26 +89,13 @@ void main() {
await cameraService.getMediaStreamForOptions(options);
- verify(
- () => mediaDevices.getUserMedia(options.toJson()),
- ).called(1);
- });
-
- testWidgets(
- 'throws PlatformException '
- 'with notSupported error '
- 'when there are no media devices', (WidgetTester tester) async {
- when(() => navigator.mediaDevices).thenReturn(null);
-
expect(
- () => cameraService.getMediaStreamForOptions(const CameraOptions()),
- throwsA(
- isA().having(
- (PlatformException e) => e.code,
- 'code',
- CameraErrorCode.notSupported.toString(),
- ),
- ),
+ capturedConstraints?.video.dartify(),
+ equals(options.video.toMediaStreamConstraints().dartify()),
+ );
+ expect(
+ capturedConstraints?.audio.dartify(),
+ equals(options.audio.toMediaStreamConstraints().dartify()),
);
});
@@ -94,8 +104,11 @@ void main() {
'with notFound error '
'when MediaDevices.getUserMedia throws DomException '
'with NotFoundError', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('NotFoundError'));
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'NotFoundError');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -116,8 +129,11 @@ void main() {
'with notFound error '
'when MediaDevices.getUserMedia throws DomException '
'with DevicesNotFoundError', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('DevicesNotFoundError'));
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'DevicesNotFoundError');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -138,9 +154,11 @@ void main() {
'with notReadable error '
'when MediaDevices.getUserMedia throws DomException '
'with NotReadableError', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('NotReadableError'));
-
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'NotReadableError');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
const CameraOptions(),
@@ -160,8 +178,11 @@ void main() {
'with notReadable error '
'when MediaDevices.getUserMedia throws DomException '
'with TrackStartError', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('TrackStartError'));
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'TrackStartError');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -182,8 +203,11 @@ void main() {
'with overconstrained error '
'when MediaDevices.getUserMedia throws DomException '
'with OverconstrainedError', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('OverconstrainedError'));
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'OverconstrainedError');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -204,8 +228,11 @@ void main() {
'with overconstrained error '
'when MediaDevices.getUserMedia throws DomException '
'with ConstraintNotSatisfiedError', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('ConstraintNotSatisfiedError'));
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'ConstraintNotSatisfiedError');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -226,8 +253,11 @@ void main() {
'with permissionDenied error '
'when MediaDevices.getUserMedia throws DomException '
'with NotAllowedError', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('NotAllowedError'));
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'NotAllowedError');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -248,8 +278,11 @@ void main() {
'with permissionDenied error '
'when MediaDevices.getUserMedia throws DomException '
'with PermissionDeniedError', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('PermissionDeniedError'));
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'PermissionDeniedError');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -270,8 +303,11 @@ void main() {
'with type error '
'when MediaDevices.getUserMedia throws DomException '
'with TypeError', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('TypeError'));
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'TypeError');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -292,8 +328,11 @@ void main() {
'with abort error '
'when MediaDevices.getUserMedia throws DomException '
'with AbortError', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('AbortError'));
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'AbortError');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -314,8 +353,11 @@ void main() {
'with security error '
'when MediaDevices.getUserMedia throws DomException '
'with SecurityError', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('SecurityError'));
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'SecurityError');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -336,8 +378,11 @@ void main() {
'with unknown error '
'when MediaDevices.getUserMedia throws DomException '
'with an unknown error', (WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any()))
- .thenThrow(FakeDomException('Unknown'));
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw web.DOMException('', 'Unknown');
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -358,7 +403,11 @@ void main() {
'with unknown error '
'when MediaDevices.getUserMedia throws an unknown exception',
(WidgetTester tester) async {
- when(() => mediaDevices.getUserMedia(any())).thenThrow(Exception());
+ mockMediaDevices.getUserMedia = ([web.MediaStreamConstraints? _]) {
+ throw Exception();
+ // ignore: dead_code
+ return Future.value(web.MediaStream()).toJS;
+ }.toJS;
expect(
() => cameraService.getMediaStreamForOptions(
@@ -379,17 +428,23 @@ void main() {
group('getZoomLevelCapabilityForCamera', () {
late Camera camera;
- late List videoTracks;
+ late MockMediaStreamTrack mockVideoTrack;
+ late List videoTracks;
setUp(() {
camera = MockCamera();
- videoTracks = [
- MockMediaStreamTrack(),
- MockMediaStreamTrack()
+ mockVideoTrack = MockMediaStreamTrack();
+ videoTracks = [
+ createJSInteropWrapper(mockVideoTrack) as web.MediaStreamTrack,
+ createJSInteropWrapper(MockMediaStreamTrack())
+ as web.MediaStreamTrack,
];
when(() => camera.textureId).thenReturn(0);
- when(() => camera.stream).thenReturn(FakeMediaStream(videoTracks));
+ when(() => camera.stream).thenReturn(
+ createJSInteropWrapper(FakeMediaStream(videoTracks))
+ as web.MediaStream,
+ );
cameraService.jsUtil = jsUtil;
});
@@ -397,18 +452,15 @@ void main() {
testWidgets(
'returns the zoom level capability '
'based on the first video track', (WidgetTester tester) async {
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'zoom': true,
- });
+ mockMediaDevices.getSupportedConstraints = () {
+ return web.MediaTrackSupportedConstraints(zoom: true);
+ }.toJS;
- when(videoTracks.first.getCapabilities).thenReturn({
- 'zoom': js_util.jsify({
- 'min': 100,
- 'max': 400,
- 'step': 2,
- }),
- });
+ mockVideoTrack.getCapabilities = () {
+ return web.MediaTrackCapabilities(
+ zoom: web.MediaSettingsRange(min: 100, max: 400, step: 2),
+ );
+ }.toJS;
final ZoomLevelCapability zoomLevelCapability =
cameraService.getZoomLevelCapabilityForCamera(camera);
@@ -419,75 +471,19 @@ void main() {
});
group('throws CameraWebException', () {
- testWidgets(
- 'with zoomLevelNotSupported error '
- 'when there are no media devices', (WidgetTester tester) async {
- when(() => navigator.mediaDevices).thenReturn(null);
-
- expect(
- () => cameraService.getZoomLevelCapabilityForCamera(camera),
- throwsA(
- isA()
- .having(
- (CameraWebException e) => e.cameraId,
- 'cameraId',
- camera.textureId,
- )
- .having(
- (CameraWebException e) => e.code,
- 'code',
- CameraErrorCode.zoomLevelNotSupported,
- ),
- ),
- );
- });
-
testWidgets(
'with zoomLevelNotSupported error '
'when the zoom level is not supported '
'in the browser', (WidgetTester tester) async {
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'zoom': false,
- });
-
- when(videoTracks.first.getCapabilities).thenReturn({
- 'zoom': {
- 'min': 100,
- 'max': 400,
- 'step': 2,
- },
- });
-
- expect(
- () => cameraService.getZoomLevelCapabilityForCamera(camera),
- throwsA(
- isA()
- .having(
- (CameraWebException e) => e.cameraId,
- 'cameraId',
- camera.textureId,
- )
- .having(
- (CameraWebException e) => e.code,
- 'code',
- CameraErrorCode.zoomLevelNotSupported,
- ),
- ),
- );
- });
-
- testWidgets(
- 'with zoomLevelNotSupported error '
- 'when the zoom level is not supported '
- 'by the camera', (WidgetTester tester) async {
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'zoom': true,
- });
+ mockMediaDevices.getSupportedConstraints = () {
+ return web.MediaTrackSupportedConstraints(zoom: false);
+ }.toJS;
- when(videoTracks.first.getCapabilities)
- .thenReturn({});
+ mockVideoTrack.getCapabilities = () {
+ return web.MediaTrackCapabilities(
+ zoom: web.MediaSettingsRange(min: 100, max: 400, step: 2),
+ );
+ }.toJS;
expect(
() => cameraService.getZoomLevelCapabilityForCamera(camera),
@@ -511,14 +507,15 @@ void main() {
'with notStarted error '
'when the camera stream has not been initialized',
(WidgetTester tester) async {
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'zoom': true,
- });
+ mockMediaDevices.getSupportedConstraints = () {
+ return web.MediaTrackSupportedConstraints(zoom: true);
+ }.toJS;
// Create a camera stream with no video tracks.
- when(() => camera.stream)
- .thenReturn(FakeMediaStream([]));
+ when(() => camera.stream).thenReturn(
+ createJSInteropWrapper(FakeMediaStream([]))
+ as web.MediaStream,
+ );
expect(
() => cameraService.getZoomLevelCapabilityForCamera(camera),
@@ -545,59 +542,44 @@ void main() {
cameraService.jsUtil = jsUtil;
});
- testWidgets(
- 'throws PlatformException '
- 'with notSupported error '
- 'when there are no media devices', (WidgetTester tester) async {
- when(() => navigator.mediaDevices).thenReturn(null);
-
- expect(
- () =>
- cameraService.getFacingModeForVideoTrack(MockMediaStreamTrack()),
- throwsA(
- isA().having(
- (PlatformException e) => e.code,
- 'code',
- CameraErrorCode.notSupported.toString(),
- ),
- ),
- );
- });
-
testWidgets(
'returns null '
'when the facing mode is not supported', (WidgetTester tester) async {
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'facingMode': false,
- });
+ mockMediaDevices.getSupportedConstraints = () {
+ return web.MediaTrackSupportedConstraints(facingMode: false);
+ }.toJS;
- final String? facingMode =
- cameraService.getFacingModeForVideoTrack(MockMediaStreamTrack());
+ final String? facingMode = cameraService.getFacingModeForVideoTrack(
+ createJSInteropWrapper(MockMediaStreamTrack())
+ as web.MediaStreamTrack,
+ );
expect(facingMode, isNull);
});
group('when the facing mode is supported', () {
- late MediaStreamTrack videoTrack;
+ late MockMediaStreamTrack mockVideoTrack;
+ late web.MediaStreamTrack videoTrack;
setUp(() {
- videoTrack = MockMediaStreamTrack();
+ mockVideoTrack = MockMediaStreamTrack();
+ videoTrack =
+ createJSInteropWrapper(mockVideoTrack) as web.MediaStreamTrack;
- when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'))
+ when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'.toJS))
.thenReturn(true);
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'facingMode': true,
- });
+ mockMediaDevices.getSupportedConstraints = () {
+ return web.MediaTrackSupportedConstraints(facingMode: true);
+ }.toJS;
});
testWidgets(
'returns an appropriate facing mode '
'based on the video track settings', (WidgetTester tester) async {
- when(videoTrack.getSettings)
- .thenReturn({'facingMode': 'user'});
+ mockVideoTrack.getSettings = () {
+ return web.MediaTrackSettings(facingMode: 'user');
+ }.toJS;
final String? facingMode =
cameraService.getFacingModeForVideoTrack(videoTrack);
@@ -610,12 +592,16 @@ void main() {
'based on the video track capabilities '
'when the facing mode setting is empty',
(WidgetTester tester) async {
- when(videoTrack.getSettings).thenReturn({});
- when(videoTrack.getCapabilities).thenReturn({
- 'facingMode': ['environment', 'left']
- });
-
- when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'))
+ mockVideoTrack.getSettings = () {
+ return web.MediaTrackSettings(facingMode: '');
+ }.toJS;
+ mockVideoTrack.getCapabilities = () {
+ return web.MediaTrackCapabilities(
+ facingMode: ['environment'.toJS, 'left'.toJS].toJS,
+ );
+ }.toJS;
+
+ when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'.toJS))
.thenReturn(true);
final String? facingMode =
@@ -628,9 +614,12 @@ void main() {
'returns null '
'when the facing mode setting '
'and capabilities are empty', (WidgetTester tester) async {
- when(videoTrack.getSettings).thenReturn({});
- when(videoTrack.getCapabilities)
- .thenReturn({'facingMode': []});
+ mockVideoTrack.getSettings = () {
+ return web.MediaTrackSettings(facingMode: '');
+ }.toJS;
+ mockVideoTrack.getCapabilities = () {
+ return web.MediaTrackCapabilities(facingMode: [].toJS);
+ }.toJS;
final String? facingMode =
cameraService.getFacingModeForVideoTrack(videoTrack);
@@ -643,9 +632,11 @@ void main() {
'when the facing mode setting is empty and '
'the video track capabilities are not supported',
(WidgetTester tester) async {
- when(videoTrack.getSettings).thenReturn({});
+ mockVideoTrack.getSettings = () {
+ return web.MediaTrackSettings(facingMode: '');
+ }.toJS;
- when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'))
+ when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'.toJS))
.thenReturn(false);
final String? facingMode =
diff --git a/packages/camera/camera_web/example/integration_test/camera_test.dart b/packages/camera/camera_web/example/integration_test/camera_test.dart
index 7611e288b84..e953a06b0e8 100644
--- a/packages/camera/camera_web/example/integration_test/camera_test.dart
+++ b/packages/camera/camera_web/example/integration_test/camera_test.dart
@@ -3,7 +3,8 @@
// found in the LICENSE file.
import 'dart:async';
-import 'dart:html';
+import 'dart:js_interop';
+import 'dart:js_interop_unsafe';
import 'dart:ui';
import 'package:async/async.dart';
@@ -15,6 +16,7 @@ import 'package:camera_web/src/types/types.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:integration_test/integration_test.dart';
import 'package:mocktail/mocktail.dart';
+import 'package:web/web.dart';
import 'helpers/helpers.dart';
@@ -24,6 +26,10 @@ void main() {
group('Camera', () {
const int textureId = 1;
+ late MockWindow mockWindow;
+ late MockNavigator mockNavigator;
+ late MockMediaDevices mockMediaDevices;
+
late Window window;
late Navigator navigator;
late MediaDevices mediaDevices;
@@ -32,16 +38,20 @@ void main() {
late CameraService cameraService;
setUp(() {
- window = MockWindow();
- navigator = MockNavigator();
- mediaDevices = MockMediaDevices();
+ mockWindow = MockWindow();
+ mockNavigator = MockNavigator();
+ mockMediaDevices = MockMediaDevices();
+
+ window = createJSInteropWrapper(mockWindow) as Window;
+ navigator = createJSInteropWrapper(mockNavigator) as Navigator;
+ mediaDevices = createJSInteropWrapper(mockMediaDevices) as MediaDevices;
- when(() => window.navigator).thenReturn(navigator);
- when(() => navigator.mediaDevices).thenReturn(mediaDevices);
+ mockWindow.navigator = navigator;
+ mockNavigator.mediaDevices = mediaDevices;
cameraService = MockCameraService();
- final VideoElement videoElement =
+ final HTMLVideoElement videoElement =
getVideoElementWithBlankStream(const Size(10, 10));
mediaStream = videoElement.captureStream();
@@ -110,7 +120,8 @@ void main() {
expect(camera.videoElement.autoplay, isFalse);
expect(camera.videoElement.muted, isTrue);
expect(camera.videoElement.srcObject, mediaStream);
- expect(camera.videoElement.attributes.keys, contains('playsinline'));
+ expect(camera.videoElement.attributes.getNamedItem('playsinline'),
+ isNotNull);
expect(
camera.videoElement.style.transformOrigin, equals('center center'));
@@ -154,7 +165,10 @@ void main() {
expect(camera.divElement, isNotNull);
expect(camera.divElement.style.objectFit, equals('cover'));
- expect(camera.divElement.children, contains(camera.videoElement));
+ final JSArray? array = (globalContext['Array']! as JSObject)
+ .callMethod('from'.toJS, camera.divElement.children)
+ as JSArray?;
+ expect(array?.toDart, contains(camera.videoElement));
});
testWidgets('initializes the camera stream', (WidgetTester tester) async {
@@ -303,26 +317,26 @@ void main() {
group(
'enables the torch mode '
'when taking a picture', () {
+ late MockMediaStreamTrack mockVideoTrack;
late List videoTracks;
late MediaStream videoStream;
- late VideoElement videoElement;
+ late HTMLVideoElement videoElement;
setUp(() {
+ mockVideoTrack = MockMediaStreamTrack();
videoTracks = [
- MockMediaStreamTrack(),
- MockMediaStreamTrack()
+ createJSInteropWrapper(mockVideoTrack) as MediaStreamTrack,
+ createJSInteropWrapper(MockMediaStreamTrack()) as MediaStreamTrack,
];
- videoStream = FakeMediaStream(videoTracks);
+ videoStream = createJSInteropWrapper(FakeMediaStream(videoTracks))
+ as MediaStream;
videoElement = getVideoElementWithBlankStream(const Size(100, 100))
..muted = true;
- when(() => videoTracks.first.applyConstraints(any()))
- .thenAnswer((_) async => {});
-
- when(videoTracks.first.getCapabilities).thenReturn({
- 'torch': true,
- });
+ mockVideoTrack.getCapabilities = () {
+ return MediaTrackCapabilities(torch: [true.toJS].toJS);
+ }.toJS;
});
testWidgets('if the flash mode is auto', (WidgetTester tester) async {
@@ -337,27 +351,22 @@ void main() {
await camera.play();
+ final List capturedConstraints =
+ [];
+ mockVideoTrack.applyConstraints = ([
+ MediaTrackConstraints? constraints,
+ ]) {
+ if (constraints != null) {
+ capturedConstraints.add(constraints);
+ }
+ return Future.value().toJS;
+ }.toJS;
+
final XFile _ = await camera.takePicture();
- verify(
- () => videoTracks.first.applyConstraints({
- 'advanced': [
- {
- 'torch': true,
- }
- ]
- }),
- ).called(1);
-
- verify(
- () => videoTracks.first.applyConstraints({
- 'advanced': [
- {
- 'torch': false,
- }
- ]
- }),
- ).called(1);
+ expect(capturedConstraints.length, 2);
+ expect(capturedConstraints[0].torch.dartify(), true);
+ expect(capturedConstraints[1].torch.dartify(), false);
});
testWidgets('if the flash mode is always', (WidgetTester tester) async {
@@ -372,27 +381,22 @@ void main() {
await camera.play();
+ final List capturedConstraints =
+ [];
+ mockVideoTrack.applyConstraints = ([
+ MediaTrackConstraints? constraints,
+ ]) {
+ if (constraints != null) {
+ capturedConstraints.add(constraints);
+ }
+ return Future.value().toJS;
+ }.toJS;
+
final XFile _ = await camera.takePicture();
- verify(
- () => videoTracks.first.applyConstraints({
- 'advanced': [
- {
- 'torch': true,
- }
- ]
- }),
- ).called(1);
-
- verify(
- () => videoTracks.first.applyConstraints({
- 'advanced': [
- {
- 'torch': false,
- }
- ]
- }),
- ).called(1);
+ expect(capturedConstraints.length, 2);
+ expect(capturedConstraints[0].torch.dartify(), true);
+ expect(capturedConstraints[1].torch.dartify(), false);
});
});
});
@@ -404,7 +408,7 @@ void main() {
(WidgetTester tester) async {
const Size videoSize = Size(1280, 720);
- final VideoElement videoElement =
+ final HTMLVideoElement videoElement =
getVideoElementWithBlankStream(videoSize);
mediaStream = videoElement.captureStream();
@@ -425,7 +429,7 @@ void main() {
'returns Size.zero '
'if the camera is missing video tracks', (WidgetTester tester) async {
// Create a video stream with no video tracks.
- final VideoElement videoElement = VideoElement();
+ final HTMLVideoElement videoElement = HTMLVideoElement();
mediaStream = videoElement.captureStream();
final Camera camera = Camera(
@@ -443,32 +447,38 @@ void main() {
});
group('setFlashMode', () {
+ late MockMediaStreamTrack mockVideoTrack;
late List videoTracks;
late MediaStream videoStream;
setUp(() {
+ mockVideoTrack = MockMediaStreamTrack();
videoTracks = [
- MockMediaStreamTrack(),
- MockMediaStreamTrack()
+ createJSInteropWrapper(mockVideoTrack) as MediaStreamTrack,
+ createJSInteropWrapper(MockMediaStreamTrack()) as MediaStreamTrack,
];
- videoStream = FakeMediaStream(videoTracks);
-
- when(() => videoTracks.first.applyConstraints(any()))
- .thenAnswer((_) async => {});
-
- when(videoTracks.first.getCapabilities)
- .thenReturn({});
+ videoStream =
+ createJSInteropWrapper(FakeMediaStream(videoTracks)) as MediaStream;
+
+ mockVideoTrack.applyConstraints = ([
+ MediaTrackConstraints? constraints,
+ ]) {
+ return Future.value().toJS;
+ }.toJS;
+
+ mockVideoTrack.getCapabilities = () {
+ return MediaTrackCapabilities();
+ }.toJS;
});
testWidgets('sets the camera flash mode', (WidgetTester tester) async {
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'torch': true,
- });
+ mockMediaDevices.getSupportedConstraints = () {
+ return MediaTrackSupportedConstraints(torch: true);
+ }.toJS;
- when(videoTracks.first.getCapabilities).thenReturn({
- 'torch': true,
- });
+ mockVideoTrack.getCapabilities = () {
+ return MediaTrackCapabilities(torch: [true.toJS].toJS);
+ }.toJS;
final Camera camera = Camera(
textureId: textureId,
@@ -490,14 +500,13 @@ void main() {
testWidgets(
'enables the torch mode '
'if the flash mode is torch', (WidgetTester tester) async {
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'torch': true,
- });
+ mockMediaDevices.getSupportedConstraints = () {
+ return MediaTrackSupportedConstraints(torch: true);
+ }.toJS;
- when(videoTracks.first.getCapabilities).thenReturn({
- 'torch': true,
- });
+ mockVideoTrack.getCapabilities = () {
+ return MediaTrackCapabilities(torch: [true.toJS].toJS);
+ }.toJS;
final Camera camera = Camera(
textureId: textureId,
@@ -506,30 +515,33 @@ void main() {
..window = window
..stream = videoStream;
+ final List capturedConstraints =
+ [];
+ mockVideoTrack.applyConstraints = ([
+ MediaTrackConstraints? constraints,
+ ]) {
+ if (constraints != null) {
+ capturedConstraints.add(constraints);
+ }
+ return Future.value().toJS;
+ }.toJS;
+
camera.setFlashMode(FlashMode.torch);
- verify(
- () => videoTracks.first.applyConstraints({
- 'advanced': [
- {
- 'torch': true,
- }
- ]
- }),
- ).called(1);
+ expect(capturedConstraints.length, 1);
+ expect(capturedConstraints[0].torch.dartify(), true);
});
testWidgets(
'disables the torch mode '
'if the flash mode is not torch', (WidgetTester tester) async {
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'torch': true,
- });
+ mockMediaDevices.getSupportedConstraints = () {
+ return MediaTrackSupportedConstraints(torch: true);
+ }.toJS;
- when(videoTracks.first.getCapabilities).thenReturn({
- 'torch': true,
- });
+ mockVideoTrack.getCapabilities = () {
+ return MediaTrackCapabilities(torch: [true.toJS].toJS);
+ }.toJS;
final Camera camera = Camera(
textureId: textureId,
@@ -538,62 +550,35 @@ void main() {
..window = window
..stream = videoStream;
+ final List capturedConstraints =
+ [];
+ mockVideoTrack.applyConstraints = ([
+ MediaTrackConstraints? constraints,
+ ]) {
+ if (constraints != null) {
+ capturedConstraints.add(constraints);
+ }
+ return Future.value().toJS;
+ }.toJS;
+
camera.setFlashMode(FlashMode.auto);
- verify(
- () => videoTracks.first.applyConstraints({
- 'advanced': [
- {
- 'torch': false,
- }
- ]
- }),
- ).called(1);
+ expect(capturedConstraints.length, 1);
+ expect(capturedConstraints[0].torch.dartify(), false);
});
group('throws a CameraWebException', () {
- testWidgets(
- 'with torchModeNotSupported error '
- 'when there are no media devices', (WidgetTester tester) async {
- when(() => navigator.mediaDevices).thenReturn(null);
-
- final Camera camera = Camera(
- textureId: textureId,
- cameraService: cameraService,
- )
- ..window = window
- ..stream = videoStream;
-
- expect(
- () => camera.setFlashMode(FlashMode.always),
- throwsA(
- isA()
- .having(
- (CameraWebException e) => e.cameraId,
- 'cameraId',
- textureId,
- )
- .having(
- (CameraWebException e) => e.code,
- 'code',
- CameraErrorCode.torchModeNotSupported,
- ),
- ),
- );
- });
-
testWidgets(
'with torchModeNotSupported error '
'when the torch mode is not supported '
'in the browser', (WidgetTester tester) async {
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'torch': false,
- });
+ mockMediaDevices.getSupportedConstraints = () {
+ return MediaTrackSupportedConstraints(torch: false);
+ }.toJS;
- when(videoTracks.first.getCapabilities).thenReturn({
- 'torch': true,
- });
+ mockVideoTrack.getCapabilities = () {
+ return MediaTrackCapabilities(torch: [true.toJS].toJS);
+ }.toJS;
final Camera camera = Camera(
textureId: textureId,
@@ -624,14 +609,13 @@ void main() {
'with torchModeNotSupported error '
'when the torch mode is not supported '
'by the camera', (WidgetTester tester) async {
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'torch': true,
- });
+ mockMediaDevices.getSupportedConstraints = () {
+ return MediaTrackSupportedConstraints(torch: true);
+ }.toJS;
- when(videoTracks.first.getCapabilities).thenReturn({
- 'torch': false,
- });
+ mockVideoTrack.getCapabilities = () {
+ return MediaTrackCapabilities(torch: [false.toJS].toJS);
+ }.toJS;
final Camera camera = Camera(
textureId: textureId,
@@ -662,14 +646,13 @@ void main() {
'with notStarted error '
'when the camera stream has not been initialized',
(WidgetTester tester) async {
- when(mediaDevices.getSupportedConstraints)
- .thenReturn({
- 'torch': true,
- });
+ mockMediaDevices.getSupportedConstraints = () {
+ return MediaTrackSupportedConstraints(torch: true);
+ }.toJS;
- when(videoTracks.first.getCapabilities).thenReturn({
- 'torch': true,
- });
+ mockVideoTrack.getCapabilities = () {
+ return MediaTrackCapabilities(torch: [true.toJS].toJS);
+ }.toJS;
final Camera camera = Camera(
textureId: textureId,
@@ -710,7 +693,8 @@ void main() {
final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability(
minimum: 50.0,
maximum: 100.0,
- videoTrack: MockMediaStreamTrack(),
+ videoTrack: createJSInteropWrapper(MockMediaStreamTrack())
+ as MediaStreamTrack,
);
when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
@@ -741,7 +725,8 @@ void main() {
final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability(
minimum: 50.0,
maximum: 100.0,
- videoTrack: MockMediaStreamTrack(),
+ videoTrack: createJSInteropWrapper(MockMediaStreamTrack())
+ as MediaStreamTrack,
);
when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
@@ -769,7 +754,9 @@ void main() {
cameraService: cameraService,
);
- final MockMediaStreamTrack videoTrack = MockMediaStreamTrack();
+ final MockMediaStreamTrack mockVideoTrack = MockMediaStreamTrack();
+ final MediaStreamTrack videoTrack =
+ createJSInteropWrapper(mockVideoTrack) as MediaStreamTrack;
final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability(
minimum: 50.0,
@@ -777,8 +764,16 @@ void main() {
videoTrack: videoTrack,
);
- when(() => videoTrack.applyConstraints(any()))
- .thenAnswer((_) async {});
+ final List capturedConstraints =
+ [];
+ mockVideoTrack.applyConstraints = ([
+ MediaTrackConstraints? constraints,
+ ]) {
+ if (constraints != null) {
+ capturedConstraints.add(constraints);
+ }
+ return Future.value().toJS;
+ }.toJS;
when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
.thenReturn(zoomLevelCapability);
@@ -787,15 +782,8 @@ void main() {
camera.setZoomLevel(zoom);
- verify(
- () => videoTrack.applyConstraints({
- 'advanced': [
- {
- ZoomLevelCapability.constraintName: zoom,
- }
- ]
- }),
- ).called(1);
+ expect(capturedConstraints.length, 1);
+ expect(capturedConstraints[0].zoom.dartify(), zoom);
});
group('throws a CameraWebException', () {
@@ -811,7 +799,8 @@ void main() {
final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability(
minimum: 50.0,
maximum: 100.0,
- videoTrack: MockMediaStreamTrack(),
+ videoTrack: createJSInteropWrapper(MockMediaStreamTrack())
+ as MediaStreamTrack,
);
when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
@@ -846,7 +835,8 @@ void main() {
final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability(
minimum: 50.0,
maximum: 100.0,
- videoTrack: MockMediaStreamTrack(),
+ videoTrack: createJSInteropWrapper(MockMediaStreamTrack())
+ as MediaStreamTrack,
);
when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
@@ -878,7 +868,9 @@ void main() {
'returns a lens direction '
'based on the first video track settings',
(WidgetTester tester) async {
- final MockVideoElement videoElement = MockVideoElement();
+ final MockVideoElement mockVideoElement = MockVideoElement();
+ final HTMLVideoElement videoElement =
+ createJSInteropWrapper(mockVideoElement) as HTMLVideoElement;
final Camera camera = Camera(
textureId: textureId,
@@ -887,15 +879,19 @@ void main() {
final MockMediaStreamTrack firstVideoTrack = MockMediaStreamTrack();
- when(() => videoElement.srcObject).thenReturn(
- FakeMediaStream([
- firstVideoTrack,
- MockMediaStreamTrack(),
- ]),
- );
+ mockVideoElement.srcObject = createJSInteropWrapper(
+ FakeMediaStream(
+ [
+ createJSInteropWrapper(firstVideoTrack) as MediaStreamTrack,
+ createJSInteropWrapper(MockMediaStreamTrack())
+ as MediaStreamTrack,
+ ],
+ ),
+ ) as MediaStream;
- when(firstVideoTrack.getSettings)
- .thenReturn({'facingMode': 'environment'});
+ firstVideoTrack.getSettings = () {
+ return MediaTrackSettings(facingMode: 'environment');
+ }.toJS;
when(() => cameraService.mapFacingModeToLensDirection('environment'))
.thenReturn(CameraLensDirection.external);
@@ -910,7 +906,9 @@ void main() {
'returns null '
'if the first video track is missing the facing mode',
(WidgetTester tester) async {
- final MockVideoElement videoElement = MockVideoElement();
+ final MockVideoElement mockVideoElement = MockVideoElement();
+ final HTMLVideoElement videoElement =
+ createJSInteropWrapper(mockVideoElement) as HTMLVideoElement;
final Camera camera = Camera(
textureId: textureId,
@@ -919,14 +917,19 @@ void main() {
final MockMediaStreamTrack firstVideoTrack = MockMediaStreamTrack();
- when(() => videoElement.srcObject).thenReturn(
- FakeMediaStream([
- firstVideoTrack,
- MockMediaStreamTrack(),
- ]),
- );
+ videoElement.srcObject = createJSInteropWrapper(
+ FakeMediaStream(
+ [
+ createJSInteropWrapper(firstVideoTrack) as MediaStreamTrack,
+ createJSInteropWrapper(MockMediaStreamTrack())
+ as MediaStreamTrack,
+ ],
+ ),
+ ) as MediaStream;
- when(firstVideoTrack.getSettings).thenReturn({});
+ firstVideoTrack.getSettings = () {
+ return MediaTrackSettings();
+ }.toJS;
expect(
camera.getLensDirection(),
@@ -938,7 +941,7 @@ void main() {
'returns null '
'if the camera is missing video tracks', (WidgetTester tester) async {
// Create a video stream with no video tracks.
- final VideoElement videoElement = VideoElement();
+ final HTMLVideoElement videoElement = HTMLVideoElement();
mediaStream = videoElement.captureStream();
final Camera camera = Camera(
@@ -974,15 +977,15 @@ void main() {
group('video recording', () {
const String supportedVideoType = 'video/webm';
+ late MockMediaRecorder mockMediaRecorder;
late MediaRecorder mediaRecorder;
bool isVideoTypeSupported(String type) => type == supportedVideoType;
setUp(() {
- mediaRecorder = MockMediaRecorder();
-
- when(() => mediaRecorder.onError)
- .thenAnswer((_) => const Stream.empty());
+ mockMediaRecorder = MockMediaRecorder();
+ mediaRecorder =
+ createJSInteropWrapper(mockMediaRecorder) as MediaRecorder;
});
group('startVideoRecording', () {
@@ -1027,11 +1030,21 @@ void main() {
await camera.initialize();
await camera.play();
+ final List capturedEvents = [];
+ mockMediaRecorder.addEventListener = (
+ String type,
+ EventListener? callback, [
+ JSAny? options,
+ ]) {
+ capturedEvents.add(type);
+ }.toJS;
+
await camera.startVideoRecording();
- verify(
- () => mediaRecorder.addEventListener('dataavailable', any()),
- ).called(1);
+ expect(
+ capturedEvents.where((String e) => e == 'dataavailable').length,
+ 1,
+ );
});
testWidgets('listens to the media recorder stop events',
@@ -1046,11 +1059,21 @@ void main() {
await camera.initialize();
await camera.play();
+ final List capturedEvents = [];
+ mockMediaRecorder.addEventListener = (
+ String type,
+ EventListener? callback, [
+ JSAny? options,
+ ]) {
+ capturedEvents.add(type);
+ }.toJS;
+
await camera.startVideoRecording();
- verify(
- () => mediaRecorder.addEventListener('stop', any()),
- ).called(1);
+ expect(
+ capturedEvents.where((String e) => e == 'stop').length,
+ 1,
+ );
});
testWidgets('starts a video recording', (WidgetTester tester) async {
@@ -1064,9 +1087,14 @@ void main() {
await camera.initialize();
await camera.play();
+ final List capturedStarts = [];
+ mockMediaRecorder.start = ([int? timeslice]) {
+ capturedStarts.add(timeslice);
+ }.toJS;
+
await camera.startVideoRecording();
- verify(mediaRecorder.start).called(1);
+ expect(capturedStarts.length, 1);
});
group('throws a CameraWebException', () {
@@ -1108,9 +1136,14 @@ void main() {
cameraService: cameraService,
)..mediaRecorder = mediaRecorder;
+ int pauses = 0;
+ mockMediaRecorder.pause = () {
+ pauses++;
+ }.toJS;
+
await camera.pauseVideoRecording();
- verify(mediaRecorder.pause).called(1);
+ expect(pauses, 1);
});
testWidgets(
@@ -1149,9 +1182,14 @@ void main() {
cameraService: cameraService,
)..mediaRecorder = mediaRecorder;
+ int resumes = 0;
+ mockMediaRecorder.resume = () {
+ resumes++;
+ }.toJS;
+
await camera.resumeVideoRecording();
- verify(mediaRecorder.resume).called(1);
+ expect(resumes, 1);
});
testWidgets(
@@ -1198,50 +1236,62 @@ void main() {
await camera.initialize();
await camera.play();
- late void Function(Event) videoDataAvailableListener;
- late void Function(Event) videoRecordingStoppedListener;
-
- when(
- () => mediaRecorder.addEventListener('dataavailable', any()),
- ).thenAnswer((Invocation invocation) {
- videoDataAvailableListener =
- invocation.positionalArguments[1] as void Function(Event);
- });
-
- when(
- () => mediaRecorder.addEventListener('stop', any()),
- ).thenAnswer((Invocation invocation) {
- videoRecordingStoppedListener =
- invocation.positionalArguments[1] as void Function(Event);
- });
+ late EventListener videoDataAvailableListener;
+ late EventListener videoRecordingStoppedListener;
+
+ mockMediaRecorder.addEventListener = (
+ String type,
+ EventListener? callback, [
+ JSAny? options,
+ ]) {
+ if (type == 'dataavailable') {
+ videoDataAvailableListener = callback!;
+ } else if (type == 'stop') {
+ videoRecordingStoppedListener = callback!;
+ }
+ }.toJS;
Blob? finalVideo;
List? videoParts;
camera.blobBuilder = (List blobs, String videoType) {
videoParts = [...blobs];
- finalVideo = Blob(blobs, videoType);
+ finalVideo = Blob(blobs.toJS, BlobPropertyBag(type: videoType));
return finalVideo!;
};
await camera.startVideoRecording();
+
+ int stops = 0;
+ mockMediaRecorder.stop = () {
+ stops++;
+ }.toJS;
+
final Future videoFileFuture = camera.stopVideoRecording();
- final Blob capturedVideoPartOne = Blob(