Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

google_mlkit_face_detection not working with video_player file extracted frames #736

Open
M-Farjad opened this issue Jan 24, 2025 · 0 comments

Comments

@M-Farjad
Copy link

@fbernaly i am trying this code using the video_player package as the stream the video is picked using image_picker. Can you please help in this?

i am getting the exact same error which is:
PlatformException(InputImageConverterError, java.lang.IllegalArgumentException, null, null)

after extracting the frame using ffmpeg_kit_flutter package using this code below, can anybody suggest a fix for the video in this code:

import 'dart:developer';
import 'dart:typed_data';
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
import 'package:video_player/video_player.dart';
import 'package:ffmpeg_kit_flutter/ffmpeg_kit.dart'; // Import ffmpeg_kit_flutter
import 'package:path_provider/path_provider.dart'; // To get temporary directory

class VideoUtil {
// Static method to extract the current frame from the video as bytes
static Future<InputImage?> extractFrameAndReturnInputImage(
VideoPlayerController controller) async {
if (!controller.value.isInitialized) {
log("Video not initialized");
return null;
}
try {
// Path to save the extracted frame
final Directory tempDir = await getTemporaryDirectory();
final String tempFilePath = '${tempDir.path}/frame.jpg';
// final File frameFile = File(tempFilePath);
// if (frameFile.existsSync()) {
// await frameFile.delete();
// }
// Ensure the path does not include 'file:///' prefix if it's local
final String sanitizedPath =
controller.dataSource.replaceFirst('file://', '');
// Add double quotes around the path
final String quotedPath = '"$sanitizedPath"';
final String command =
'-y -ss ${controller.value.position.inSeconds.toStringAsFixed(2)} -i $quotedPath -vf format=yuv420p -frames:v 1 -f rawvideo $tempFilePath';
// final String command =
// '-y -v debug -ss ${controller.value.position.inSeconds.toStringAsFixed(2)} -i $quotedPath -vf format=yuv420p -frames:v 1 $tempFilePath';
final session = await FFmpegKit.execute(command);
final returnCode = await session.getReturnCode();
final output = await session.getOutput();
log("FFmpeg return code: $returnCode");
log("FFmpeg output: $output");
if (returnCode!.isValueSuccess()) {
log("Frame return code valid");
// Read the extracted frame as raw bytes
final File frameFile = File(tempFilePath);
final Uint8List frameBytes = await frameFile.readAsBytes();
// Validate and log video metadata
final int width = controller.value.size.width.toInt();
final int height = controller.value.size.height.toInt();
final int rotation = _calculateRotation(controller);
// Estimate bytes per row dynamically
final int bytesPerRow = width * 3; // Assuming 3 bytes per pixel (RGB)
log("Estimated bytes per row: $bytesPerRow");
final InputImageRotation? inputRotation =
InputImageRotationValue.fromRawValue(rotation);
if (inputRotation == null) {
log("Invalid rotation value: $rotation");
return null;
}
// Dynamically determine the format
final InputImageFormat format =
_detectImageFormat(frameBytes, width, height);
log("Detected image format: ${format.name}");
Uint8List bytes;
if ((format == InputImageFormat.yuv_420_888 ||
format == InputImageFormat.yuv420) &&
Platform.isAndroid) {
log("Converting YUV_420_888 to NV21...");
// Validate frame size
final int expectedSize =
width * height + 2 * ((width ~/ 2) * (height ~/ 2));
if (frameBytes.length != expectedSize) {
throw Exception(
"Frame file size mismatch. Expected: $expectedSize, Actual Size: {frameBytes.length}");
}
bytes = convertYUV420ToNV21(frameFile, width, height);
} else {
log("Using raw plane bytes.");
bytes = frameBytes;
}
// Return an InputImage
final InputImage inputImage = InputImage.fromBytes(
bytes: bytes,
metadata: InputImageMetadata(
size: Size(width.toDouble(), height.toDouble()),
rotation: inputRotation,
format: format,
bytesPerRow: bytesPerRow,
),
);
log("InputImage: ${inputImage.metadata?.toJson()}");
return inputImage;
} else {
log("FFmpeg error: $returnCode");
return null; // Return null if FFmpeg failed
}
} catch (e) {
log("Error extracting frame bytes: $e");
return null; // Return null if an error occurs
}
}
static int _calculateRotation(VideoPlayerController controller) {
try {
// Calculate rotation using the controller's metadata
final int rotationDegrees = controller.value.rotationCorrection;
log("Video rotation correction: $rotationDegrees degrees");
// Convert to InputImageRotation enum values
switch (rotationDegrees) {
case 0:
return InputImageRotation.rotation0deg.rawValue;
case 90:
return InputImageRotation.rotation90deg.rawValue;
case 180:
return InputImageRotation.rotation180deg.rawValue;
case 270:
return InputImageRotation.rotation270deg.rawValue;
default:
log("Unexpected rotation value: $rotationDegrees. Defaulting to 0.");
return InputImageRotation.rotation0deg.rawValue;
}
} catch (e) {
log("Error calculating video rotation: $e");
return InputImageRotation.rotation0deg.rawValue; // Default to 0 degrees
}
}
static Uint8List convertYUV420ToNV21(File frameFile, int width, int height) {
try {
// Read raw bytes from the frame file
final Uint8List frameBytes = frameFile.readAsBytesSync();
// Calculate Y and UV sizes
final int ySize = width * height;
final int uvSize = (width ~/ 2) * (height ~/ 2);
log("Width: $width, Height: $height, Y Size: $ySize, UV Size: $uvSize");
log("Frame file size: ${frameBytes.length} bytes");
// Validate frame size
if (frameBytes.length < ySize + 2 * uvSize) {
throw Exception(
"Frame file size is smaller than expected for YUV420 format. Expected: ${ySize + 2 * uvSize}, Actual: ${frameBytes.length}");
}
// Initialize the NV21 buffer
final Uint8List nv21 = Uint8List(ySize + 2 * uvSize);
// Extract Y Plane (assume no stride adjustment is needed for Y)
int yIndex = 0;
for (int row = 0; row < height; row++) {
nv21.setRange(
yIndex,
yIndex + width,
frameBytes.sublist(row * width, row * width + width),
);
yIndex += width;
}
log("Copied Y Plane with stride correction.");
// Extract and interleave UV Planes
int uvIndex = ySize; // Start after Y plane
int uvPlaneOffset = ySize; // Offset where UV data begins
final int uvRowStride = width ~/ 2; // UV stride (1/2 width for YUV420)
for (int row = 0; row < height ~/ 2; row++) {
for (int col = 0; col < width ~/ 2; col++) {
int uvOffset = uvPlaneOffset + (row * uvRowStride) + col;
nv21[uvIndex++] = frameBytes[uvOffset + uvSize]; // V plane first
nv21[uvIndex++] = frameBytes[uvOffset]; // U plane second
}
}
log("Converted UV Planes to NV21 format with stride correction.");
return nv21;
} catch (e) {
log("Error converting YUV420 to NV21: $e");
rethrow;
}
}
static InputImageFormat _detectImageFormat(
Uint8List frameBytes, int width, int height) {
if (frameBytes.isEmpty) {
throw Exception("Empty frame bytes; cannot detect format.");
}
// Calculate expected sizes for common formats
final int ySize = width * height; // Y plane size
final int uvSize =
(width ~/ 2) * (height ~/ 2); // U/V plane size for 4:2:0 subsampling
if (frameBytes.length == ySize + 2 * uvSize) {
log("Detected YUV_420_888 format based on size.");
return InputImageFormat.yuv_420_888;
} else if (frameBytes.length == ySize + uvSize * 2) {
log("Detected NV21 format based on size.");
return InputImageFormat.nv21;
} else if (frameBytes.length == ySize * 4) {
//(frameBytes.length == ySize * 4)
log("Detected BGRA8888 format based on size.");
return InputImageFormat.bgra8888;
} else if (Platform.isAndroid) {
log("Detected YUV420 format based on size and structure.");
return InputImageFormat.yuv_420_888;
} else {
//(Platform.isIOS)
return InputImageFormat.bgra8888;
}
}

Originally posted by @M-Farjad in #518

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant