Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

updates #43

Merged
merged 5 commits into from
Nov 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file modified resources/common/ffmpeg_av_jni.dll
Binary file not shown.
Binary file modified resources/common/libffmpeg_av_jni.jnilib
Binary file not shown.
Binary file modified resources/common/libffmpeg_av_jni.so
Binary file not shown.
84 changes: 63 additions & 21 deletions src/main/java/com/zoffcc/applications/ffmpegav/AVActivity.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
public class AVActivity {

private static final String TAG = "ffmpegav.AVActivity";
static final String Version = "0.99.7";
static final String Version = "0.99.9";

public static native String ffmpegav_version();
public static native String ffmpegav_libavutil_version();
Expand All @@ -14,7 +14,7 @@ public class AVActivity {
public static native String[] ffmpegav_get_video_in_devices();
public static native String[] ffmpegav_get_audio_in_devices();
public static native String[] ffmpegav_get_in_sources(String devicename, int is_video);
public static native int ffmpegav_open_video_in_device(String deviceformat, String inputname, int wanted_width, int wanted_height, int fps);
public static native int ffmpegav_open_video_in_device(String deviceformat, String inputname, int wanted_width, int wanted_height, int fps, int force_mjpeg);
public static native int ffmpegav_open_audio_in_device(String deviceformat, String inputname);
public static native int ffmpegav_start_video_in_capture();
public static native int ffmpegav_start_audio_in_capture();
Expand Down Expand Up @@ -71,12 +71,14 @@ else if (value == AV_CODEC_ID_H264.value)

public static interface video_capture_callback {
void onSuccess(long width, long height, long source_width, long source_height, long pts, int fps, int source_format);
void onBufferTooSmall(int y_buffer_size, int u_buffer_size, int v_buffer_size);
void onError();
}
static video_capture_callback video_capture_callback_function = null;

public static interface audio_capture_callback {
void onSuccess(long read_bytes, int out_samples, int out_channels, int out_sample_rate, long pts);
void onBufferTooSmall(int audio_buffer_size);
void onError();
}
static audio_capture_callback audio_capture_callback_function = null;
Expand All @@ -103,16 +105,30 @@ public static void ffmpegav_callback_video_capture_frame_pts_cb_method(long widt
}
}

public static void ffmpegav_callback_audio_capture_frame_pts_cb_method(long read_bytes, int out_samples, int out_channels, int out_sample_rate, long pts)
{
// Log.i(TAG, "capture audio frame bytes: " + read_bytes + " samples: " + out_samples + " channels: " + out_channels + " sample_rate: " + out_sample_rate);
if (audio_capture_callback_function != null) {
audio_capture_callback_function.onSuccess(read_bytes, out_samples, out_channels, out_sample_rate, pts);
}
}

public static void ffmpegav_set_audio_capture_callback(audio_capture_callback callback)
{
audio_capture_callback_function = callback;
}

public static void ffmpegav_callback_audio_capture_frame_pts_cb_method(long read_bytes, int out_samples, int out_channels, int out_sample_rate, long pts)
public static void ffmpegav_callback_video_capture_frame_too_small_cb_method(int y_buffer_size, int u_buffer_size, int v_buffer_size)
{
if (video_capture_callback_function != null) {
video_capture_callback_function.onBufferTooSmall(y_buffer_size, u_buffer_size, v_buffer_size);
}
}

public static void ffmpegav_callback_audio_capture_frame_too_small_cb_method(int audio_buffer_size)
{
// Log.i(TAG, "capture audio frame bytes: " + read_bytes + " samples: " + out_samples + " channels: " + out_channels + " sample_rate: " + out_sample_rate);
if (audio_capture_callback_function != null) {
audio_capture_callback_function.onSuccess(read_bytes, out_samples, out_channels, out_sample_rate, pts);
audio_capture_callback_function.onBufferTooSmall(audio_buffer_size);
}
}

Expand Down Expand Up @@ -268,23 +284,36 @@ public static void main(String[] args) {
}
}
}
for (int i=0;i<video_in_devices.length;i++)

final boolean TEST_DEV_VIDEO_0 = false;

if (TEST_DEV_VIDEO_0)
{
if (video_in_devices[i] != null)
vdevice = "video4linux2";
vsource = "/dev/video0";
final int res_vd = ffmpegav_open_video_in_device(vdevice,
vsource, 640, 480, 20, 1);
Log.i(TAG, "ffmpeg open video capture device: " + res_vd);
}
else
{
for (int i=0;i<video_in_devices.length;i++)
{
Log.i(TAG, "ffmpeg video in device #"+i+": " + video_in_devices[i]);
if (i == 1)
if (video_in_devices[i] != null)
{
vdevice = video_in_devices[i];
vsource = ":0";
final int res_vd = ffmpegav_open_video_in_device(vdevice,
vsource, 640, 480, 30);
Log.i(TAG, "ffmpeg open video capture device: " + res_vd);
Log.i(TAG, "ffmpeg video in device #"+i+": " + video_in_devices[i]);
if (i == 1)
{
vdevice = video_in_devices[i];
vsource = ":0";
final int res_vd = ffmpegav_open_video_in_device(vdevice,
vsource, 640, 480, 30, 0);
Log.i(TAG, "ffmpeg open video capture device: " + res_vd);
}
}
}
}


final String[] audio_in_devices = ffmpegav_get_audio_in_devices();
Log.i(TAG, "ffmpeg audio in devices: " + audio_in_devices.length);
for (int i=0;i<audio_in_devices.length;i++)
Expand Down Expand Up @@ -328,7 +357,7 @@ public static void main(String[] args) {

final int frame_width_px2 = 640;
final int frame_height_px2 = 480;
final int buffer_size_in_bytes2 = ((frame_width_px2 * frame_height_px2) * 3) / 2;
final int buffer_size_in_bytes2 = 10; // ((frame_width_px2 * frame_height_px2) * 3) / 2;
final java.nio.ByteBuffer video_buffer_2_y = java.nio.ByteBuffer.allocateDirect(buffer_size_in_bytes2);
final java.nio.ByteBuffer video_buffer_2_u = java.nio.ByteBuffer.allocateDirect(buffer_size_in_bytes2);
final java.nio.ByteBuffer video_buffer_2_v = java.nio.ByteBuffer.allocateDirect(buffer_size_in_bytes2);
Expand All @@ -341,12 +370,21 @@ public static void main(String[] args) {
@Override
public void onSuccess(long width, long height, long source_width, long source_height, long pts, int fps, int source_format) {
Log.i(TAG, "ffmpeg open video capture onSuccess:" + width + " " + height + " " +
source_width + " " + source_height + " " + pts + " fps: " + fps +
" source_format: " + ffmpegav_video_source_format_name.value_str(source_format));
source_width + " " + source_height + " " + pts + " fps: " + fps +
" source_format: " + ffmpegav_video_source_format_name.value_str(source_format));
}
@Override
public void onError() {
}
@Override
public void onBufferTooSmall(int y_buffer_size, int u_buffer_size, int v_buffer_size) {
Log.i(TAG, "Video buffer too small, needed sizes: " + y_buffer_size
+ " " + u_buffer_size + " "+ v_buffer_size);
final java.nio.ByteBuffer video_buffer_2_y = java.nio.ByteBuffer.allocateDirect(y_buffer_size);
final java.nio.ByteBuffer video_buffer_2_u = java.nio.ByteBuffer.allocateDirect(u_buffer_size);
final java.nio.ByteBuffer video_buffer_2_v = java.nio.ByteBuffer.allocateDirect(v_buffer_size);
ffmpegav_set_JNI_video_buffer2(video_buffer_2_y, video_buffer_2_u, video_buffer_2_v, frame_width_px2, frame_height_px2);
}
});

ffmpegav_set_audio_capture_callback(new audio_capture_callback() {
Expand All @@ -367,14 +405,18 @@ public void onSuccess(long read_bytes, int out_samples, int out_channels, int ou
@Override
public void onError() {
}
@Override
public void onBufferTooSmall(int audio_buffer_size) {
Log.i(TAG, "Audio buffer too small, needed size=" + audio_buffer_size);
}
});

ffmpegav_start_video_in_capture();
ffmpegav_apply_audio_filter(1);
ffmpegav_start_audio_in_capture();
try
{
Thread.sleep(10000);
Thread.sleep(1000);
}
catch(Exception e)
{
Expand All @@ -401,7 +443,7 @@ public void onError() {
// -----------------------
/*
final int res_vd2 = ffmpegav_open_video_in_device(vdevice,
vsource, 640, 480, 15);
vsource, 640, 480, 15, 0);
Log.i(TAG, "ffmpeg open video capture device: " + res_vd2);

final int res_ad2 = ffmpegav_open_audio_in_device(adevice,
Expand All @@ -428,7 +470,7 @@ public void onError() {
// -----------------------
// -----------------------
final int res_vd3 = ffmpegav_open_video_in_device("",
"", 640, 480, 30);
"", 640, 480, 30, 0);
Log.i(TAG, "ffmpeg open video capture device: " + res_vd3);

final int res_ad3 = ffmpegav_open_audio_in_device("",
Expand Down
30 changes: 24 additions & 6 deletions src/main/kotlin/com/zoffcc/applications/trifa/AVState.kt
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import com.zoffcc.applications.ffmpegav.AVActivity.ffmpegav_apply_audio_filter
import com.zoffcc.applications.ffmpegav.AVActivity.ffmpegav_init
import com.zoffcc.applications.trifa.MainActivity.Companion.PREF__audio_input_filter
import com.zoffcc.applications.trifa.MainActivity.Companion.PREF__audio_play_volume_percent
import com.zoffcc.applications.trifa.MainActivity.Companion.PREF__v4l2_capture_force_mjpeg
import com.zoffcc.applications.trifa.MainActivity.Companion.set_audio_play_volume_percent
import global_prefs
import kotlinx.coroutines.CoroutineScope
Expand Down Expand Up @@ -84,7 +85,8 @@ data class AVState(val a: Int)
{
println("ffmpeg video in device: " + video_in_device + " " + video_in_source)
val res_vd = AVActivity.ffmpegav_open_video_in_device(video_in_device, video_in_source,
video_in_resolution_width, video_in_resolution_height, CAPTURE_VIDEO_FPS)
video_in_resolution_width, video_in_resolution_height, CAPTURE_VIDEO_FPS,
PREF__v4l2_capture_force_mjpeg)
println("ffmpeg open video capture device: $res_vd")
}
}
Expand Down Expand Up @@ -423,7 +425,8 @@ data class AVState(val a: Int)
{
println("ffmpeg video in device: " + video_in_device + " " + video_in_source)
val res_vd = AVActivity.ffmpegav_open_video_in_device(video_in_device, video_in_source,
video_in_resolution_width_pin, video_in_resolution_height_pin, CAPTURE_VIDEO_FPS)
video_in_resolution_width_pin, video_in_resolution_height_pin, CAPTURE_VIDEO_FPS,
PREF__v4l2_capture_force_mjpeg)
println("ffmpeg open video capture device: $res_vd")
}
}
Expand All @@ -437,9 +440,9 @@ data class AVState(val a: Int)
val y_size = frame_width_px2 * frame_height_px2
val u_size = (frame_width_px2 * frame_height_px2 / 4)
val v_size = (frame_width_px2 * frame_height_px2 / 4)
val video_buffer_2_y = ByteBuffer.allocateDirect(y_size)
val video_buffer_2_u = ByteBuffer.allocateDirect(u_size)
val video_buffer_2_v = ByteBuffer.allocateDirect(v_size)
var video_buffer_2_y = ByteBuffer.allocateDirect(y_size)
var video_buffer_2_u = ByteBuffer.allocateDirect(u_size)
var video_buffer_2_v = ByteBuffer.allocateDirect(v_size)
AVActivity.ffmpegav_set_JNI_video_buffer2(video_buffer_2_y, video_buffer_2_u, video_buffer_2_v, frame_width_px2, frame_height_px2)
val audio_in_device = audio_in_device_get()
val audio_in_source = audio_in_source_get()
Expand All @@ -455,7 +458,7 @@ data class AVState(val a: Int)
}
}
val buffer_size_in_bytes2 = 50000 // TODO: don't hardcode this
val audio_buffer_1 = ByteBuffer.allocateDirect(buffer_size_in_bytes2)
var audio_buffer_1 = ByteBuffer.allocateDirect(buffer_size_in_bytes2)
AVActivity.ffmpegav_set_JNI_audio_buffer2(audio_buffer_1)

AVActivity.ffmpegav_set_audio_capture_callback(object : AVActivity.audio_capture_callback
Expand Down Expand Up @@ -518,6 +521,12 @@ data class AVState(val a: Int)
DEBUG ONLY ---------------------------- */
}

override fun onBufferTooSmall(audio_buffer_size: Int)
{
audio_buffer_1 = ByteBuffer.allocateDirect(audio_buffer_size)
AVActivity.ffmpegav_set_JNI_audio_buffer2(audio_buffer_1)
}

override fun onError()
{
}
Expand Down Expand Up @@ -568,6 +577,15 @@ data class AVState(val a: Int)
VideoOutFrame.new_video_out_frame(video_buffer_2, frame_width_px, frame_height_px)
}

override fun onBufferTooSmall(y_buffer_size: Int, u_buffer_size: Int, v_buffer_size: Int)
{
Log.i(TAG, "ffmpeg open video capture onBufferTooSmall: sizes needed: " + y_buffer_size + " " + u_buffer_size + " " + v_buffer_size)
video_buffer_2_y = ByteBuffer.allocateDirect(y_buffer_size)
video_buffer_2_u = ByteBuffer.allocateDirect(u_buffer_size)
video_buffer_2_v = ByteBuffer.allocateDirect(v_buffer_size)
AVActivity.ffmpegav_set_JNI_video_buffer2(video_buffer_2_y, video_buffer_2_u, video_buffer_2_v, frame_width_px2, frame_height_px2)
}

override fun onError()
{
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ class MainActivity
var PREF__udp_enabled = 1
var PREF__audio_play_volume_percent = 100
var PREF__audio_input_filter = 0
var PREF__v4l2_capture_force_mjpeg: Int = 0 // 0 -> auto, 1 -> force MJPEG video capture with v4l2 devices
var PREF__orbot_enabled_to_int = 0
var PREF__local_discovery_enabled = 1
var PREF__ipv6_enabled = 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import java.awt.Frame

object FilePicker {

val TAG = "trifa.ImagePicker"
val TAG = "trifa.FilePicker"

private val SUPPORTED_EXTENSIONS_IMAGE = setOf("png", "jpg", "jpeg", "gif", "webp")

Expand Down
Loading