diff --git a/build.gradle b/build.gradle index 72026fdd6..94a2bb840 100644 --- a/build.gradle +++ b/build.gradle @@ -7,7 +7,7 @@ buildscript { jcenter() } dependencies { - classpath 'com.android.tools.build:gradle:4.2.0-beta04' + classpath 'com.android.tools.build:gradle:4.1.2' classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" // TODO: Close JCenter on May 1st https://jfrog.com/blog/into-the-sunset-bintray-jcenter-gocenter-and-chartcenter/ diff --git a/gradle.properties b/gradle.properties index bd6a24475..9c94088a8 100644 --- a/gradle.properties +++ b/gradle.properties @@ -19,8 +19,8 @@ VERSION_NAME=2.1.0 VERSION_CODE=14 COMPILE_SDK_VERSION=30 -TARGET_SDK_VERSION=30 -MIN_SDK_VERSION=14 +TARGET_SDK_VERSION=29 +MIN_SDK_VERSION=18 android.useAndroidX=true android.enableJetifier=true diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index a7a55a149..cb05be028 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https://services.gradle.org/distributions/gradle-6.7.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists \ No newline at end of file diff --git a/library/src/main/java/jp/co/cyberagent/android/gpuimage/GPUImageMovieWriter.java b/library/src/main/java/jp/co/cyberagent/android/gpuimage/GPUImageMovieWriter.java new file mode 100644 index 000000000..6b57e1791 --- /dev/null +++ b/library/src/main/java/jp/co/cyberagent/android/gpuimage/GPUImageMovieWriter.java @@ -0,0 +1,146 @@ +package jp.co.cyberagent.android.gpuimage; + +import android.annotation.TargetApi; +import android.opengl.EGL14; + +import java.io.IOException; +import java.nio.FloatBuffer; + +import javax.microedition.khronos.egl.EGL10; +import javax.microedition.khronos.egl.EGLContext; +import javax.microedition.khronos.egl.EGLDisplay; +import javax.microedition.khronos.egl.EGLSurface; + +import jp.co.cyberagent.android.gpuimage.encoder.EglCore; +import jp.co.cyberagent.android.gpuimage.encoder.MediaAudioEncoder; +import jp.co.cyberagent.android.gpuimage.encoder.MediaEncoder; +import jp.co.cyberagent.android.gpuimage.encoder.MediaMuxerWrapper; +import jp.co.cyberagent.android.gpuimage.encoder.MediaVideoEncoder; +import jp.co.cyberagent.android.gpuimage.encoder.WindowSurface; +import jp.co.cyberagent.android.gpuimage.filter.GPUImageFilter; + +@TargetApi(18) +public class GPUImageMovieWriter extends GPUImageFilter { + private MediaMuxerWrapper mMuxer; + private MediaVideoEncoder mVideoEncoder; + private MediaAudioEncoder mAudioEncoder; + private WindowSurface mCodecInput; + + private EGLSurface mEGLScreenSurface; + private EGL10 mEGL; + private EGLDisplay mEGLDisplay; + private EGLContext mEGLContext; + private EglCore mEGLCore; + + private boolean mIsRecording = false; + + @Override + public void onInit() { + super.onInit(); + mEGL = (EGL10) EGLContext.getEGL(); + mEGLDisplay = mEGL.eglGetCurrentDisplay(); + mEGLContext = mEGL.eglGetCurrentContext(); + mEGLScreenSurface = mEGL.eglGetCurrentSurface(EGL10.EGL_DRAW); + } + + @Override + public void onDraw(int textureId, FloatBuffer cubeBuffer, FloatBuffer textureBuffer) { + // Draw on screen surface + super.onDraw(textureId, cubeBuffer, textureBuffer); + + if (mIsRecording) { + // create encoder surface + if (mCodecInput == null) { + mEGLCore = new EglCore(EGL14.eglGetCurrentContext(), EglCore.FLAG_RECORDABLE); + mCodecInput = new WindowSurface(mEGLCore, mVideoEncoder.getSurface(), false); + } + + // Draw on encoder surface + mCodecInput.makeCurrent(); + super.onDraw(textureId, cubeBuffer, textureBuffer); + mCodecInput.swapBuffers(); + mVideoEncoder.frameAvailableSoon(); + } + + // Make screen surface be current surface + mEGL.eglMakeCurrent(mEGLDisplay, mEGLScreenSurface, mEGLScreenSurface, mEGLContext); + } + + @Override + public void onDestroy() { + super.onDestroy(); + releaseEncodeSurface(); + } + + public void startRecording(final String outputPath, final int width, final int height) { + runOnDraw(new Runnable() { + @Override + public void run() { + if (mIsRecording) { + return; + } + + try { + mMuxer = new MediaMuxerWrapper(outputPath); + + // for video capturing + mVideoEncoder = new MediaVideoEncoder(mMuxer, mMediaEncoderListener, width, height); + // for audio capturing + mAudioEncoder = new MediaAudioEncoder(mMuxer, mMediaEncoderListener); + + mMuxer.prepare(); + mMuxer.startRecording(); + + mIsRecording = true; + } catch (IOException e) { + e.printStackTrace(); + } + } + }); + } + + public void stopRecording() { + runOnDraw(new Runnable() { + @Override + public void run() { + if (!mIsRecording) { + return; + } + + mMuxer.stopRecording(); + mIsRecording = false; + releaseEncodeSurface(); + } + }); + } + + private void releaseEncodeSurface() { + if (mEGLCore != null) { + mEGLCore.makeNothingCurrent(); + mEGLCore.release(); + mEGLCore = null; + } + + if (mCodecInput != null) { + mCodecInput.release(); + mCodecInput = null; + } + } + + /** + * callback methods from encoder + */ + private final MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() { + @Override + public void onPrepared(final MediaEncoder encoder) { + } + + @Override + public void onStopped(final MediaEncoder encoder) { + } + + @Override + public void onMuxerStopped() { + } + }; +} diff --git a/library/src/main/java/jp/co/cyberagent/android/gpuimage/GPUImageRenderer.java b/library/src/main/java/jp/co/cyberagent/android/gpuimage/GPUImageRenderer.java index 5fea701bb..fa060eca7 100644 --- a/library/src/main/java/jp/co/cyberagent/android/gpuimage/GPUImageRenderer.java +++ b/library/src/main/java/jp/co/cyberagent/android/gpuimage/GPUImageRenderer.java @@ -301,6 +301,11 @@ private void adjustImageScaling() { addDistance(textureCords[6], distHorizontal), addDistance(textureCords[7], distVertical), }; } else { + if (rotation == Rotation.ROTATION_270 || rotation == Rotation.ROTATION_90) { + ratioWidth = ratioWidth + ratioHeight; + ratioHeight = ratioWidth - ratioHeight; + ratioWidth = ratioWidth - ratioHeight; + } cube = new float[]{ CUBE[0] / ratioHeight, CUBE[1] / ratioWidth, CUBE[2] / ratioHeight, CUBE[3] / ratioWidth, diff --git a/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/EglCore.java b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/EglCore.java new file mode 100644 index 000000000..375f4d17c --- /dev/null +++ b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/EglCore.java @@ -0,0 +1,375 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package jp.co.cyberagent.android.gpuimage.encoder; + +import android.annotation.TargetApi; +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.opengl.EGLConfig; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; +import android.opengl.EGLExt; +import android.opengl.EGLSurface; +import android.util.Log; +import android.view.Surface; + +/** + * Core EGL state (display, context, config). + *

+ * The EGLContext must only be attached to one thread at a time. This class is not thread-safe. + */ +@TargetApi(18) +public final class EglCore { + private static final String TAG = "EglCore"; + + /** + * Constructor flag: surface must be recordable. This discourages EGL from using a + * pixel format that cannot be converted efficiently to something usable by the video + * encoder. + */ + public static final int FLAG_RECORDABLE = 0x01; + + /** + * Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this + * flag, GLES2 is used. + */ + public static final int FLAG_TRY_GLES3 = 0x02; + + // Android-specific extension. + private static final int EGL_RECORDABLE_ANDROID = 0x3142; + + private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY; + private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT; + private EGLConfig mEGLConfig = null; + private int mGlVersion = -1; + + + /** + * Prepares EGL display and context. + *

+ * Equivalent to EglCore(null, 0). + */ + public EglCore() { + this(null, 0); + } + + /** + * Prepares EGL display and context. + *

+ * @param sharedContext The context to share, or null if sharing is not desired. + * @param flags Configuration bit flags, e.g. FLAG_RECORDABLE. + */ + public EglCore(EGLContext sharedContext, int flags) { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + throw new RuntimeException("EGL already set up"); + } + + if (sharedContext == null) { + sharedContext = EGL14.EGL_NO_CONTEXT; + } + + mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { + throw new RuntimeException("unable to get EGL14 display"); + } + int[] version = new int[2]; + if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) { + mEGLDisplay = null; + throw new RuntimeException("unable to initialize EGL14"); + } + + // Try to get a GLES3 context, if requested. + if ((flags & FLAG_TRY_GLES3) != 0) { + //Log.d(TAG, "Trying GLES 3"); + EGLConfig config = getConfig(flags, 3); + if (config != null) { + int[] attrib3_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 3, + EGL14.EGL_NONE + }; + EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext, + attrib3_list, 0); + + if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) { + //Log.d(TAG, "Got GLES 3 config"); + mEGLConfig = config; + mEGLContext = context; + mGlVersion = 3; + } + } + } + if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed + //Log.d(TAG, "Trying GLES 2"); + EGLConfig config = getConfig(flags, 2); + if (config == null) { + throw new RuntimeException("Unable to find a suitable EGLConfig"); + } + int[] attrib2_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, + EGL14.EGL_NONE + }; + EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext, + attrib2_list, 0); + checkEglError("eglCreateContext"); + mEGLConfig = config; + mEGLContext = context; + mGlVersion = 2; + } + + // Confirm with query. + int[] values = new int[1]; + EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, + values, 0); + Log.d(TAG, "EGLContext created, client version " + values[0]); + } + + /** + * Finds a suitable EGLConfig. + * + * @param flags Bit flags from constructor. + * @param version Must be 2 or 3. + */ + private EGLConfig getConfig(int flags, int version) { + int renderableType = EGL14.EGL_OPENGL_ES2_BIT; + if (version >= 3) { + renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR; + } + + // The actual surface is generally RGBA or RGBX, so situationally omitting alpha + // doesn't really help. It can also lead to a huge performance hit on glReadPixels() + // when reading into a GL_RGBA buffer. + int[] attribList = { + EGL14.EGL_RED_SIZE, 8, + EGL14.EGL_GREEN_SIZE, 8, + EGL14.EGL_BLUE_SIZE, 8, + EGL14.EGL_ALPHA_SIZE, 8, + //EGL14.EGL_DEPTH_SIZE, 16, + //EGL14.EGL_STENCIL_SIZE, 8, + EGL14.EGL_RENDERABLE_TYPE, renderableType, + EGL14.EGL_NONE, 0, // placeholder for recordable [@-3] + EGL14.EGL_NONE + }; + if ((flags & FLAG_RECORDABLE) != 0) { + attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID; + attribList[attribList.length - 2] = 1; + } + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, + numConfigs, 0)) { + Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig"); + return null; + } + return configs[0]; + } + + /** + * Discards all resources held by this class, notably the EGL context. This must be + * called from the thread where the context was created. + *

+ * On completion, no context will be current. + */ + public void release() { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + // Android is unusual in that it uses a reference-counted EGLDisplay. So for + // every eglInitialize() we need an eglTerminate(). + EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, + EGL14.EGL_NO_CONTEXT); + EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); + EGL14.eglReleaseThread(); + EGL14.eglTerminate(mEGLDisplay); + } + + mEGLDisplay = EGL14.EGL_NO_DISPLAY; + mEGLContext = EGL14.EGL_NO_CONTEXT; + mEGLConfig = null; + } + + @Override + protected void finalize() throws Throwable { + try { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + // We're limited here -- finalizers don't run on the thread that holds + // the EGL state, so if a surface or context is still current on another + // thread we can't fully release it here. Exceptions thrown from here + // are quietly discarded. Complain in the log file. + Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked"); + release(); + } + } finally { + super.finalize(); + } + } + + /** + * Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's + * still current in a context. + */ + public void releaseSurface(EGLSurface eglSurface) { + EGL14.eglDestroySurface(mEGLDisplay, eglSurface); + } + + /** + * Creates an EGL surface associated with a Surface. + *

+ * If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute. + */ + public EGLSurface createWindowSurface(Object surface) { + if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) { + throw new RuntimeException("invalid surface: " + surface); + } + + // Create a window surface, and attach it to the Surface we received. + int[] surfaceAttribs = { + EGL14.EGL_NONE + }; + EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface, + surfaceAttribs, 0); + checkEglError("eglCreateWindowSurface"); + if (eglSurface == null) { + throw new RuntimeException("surface was null"); + } + return eglSurface; + } + + /** + * Creates an EGL surface associated with an offscreen buffer. + */ + public EGLSurface createOffscreenSurface(int width, int height) { + int[] surfaceAttribs = { + EGL14.EGL_WIDTH, width, + EGL14.EGL_HEIGHT, height, + EGL14.EGL_NONE + }; + EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, + surfaceAttribs, 0); + checkEglError("eglCreatePbufferSurface"); + if (eglSurface == null) { + throw new RuntimeException("surface was null"); + } + return eglSurface; + } + + /** + * Makes our EGL context current, using the supplied surface for both "draw" and "read". + */ + public void makeCurrent(EGLSurface eglSurface) { + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { + // called makeCurrent() before create? + Log.d(TAG, "NOTE: makeCurrent w/o display"); + } + if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) { + int error = EGL14.eglGetError(); + throw new RuntimeException("eglMakeCurrent failed with error = " + error); + } + } + + /** + * Makes our EGL context current, using the supplied "draw" and "read" surfaces. + */ + public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) { + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { + // called makeCurrent() before create? + Log.d(TAG, "NOTE: makeCurrent w/o display"); + } + if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) { + throw new RuntimeException("eglMakeCurrent(draw,read) failed"); + } + } + + /** + * Makes no context current. + */ + public void makeNothingCurrent() { + if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, + EGL14.EGL_NO_CONTEXT)) { + throw new RuntimeException("eglMakeCurrent failed"); + } + } + + /** + * Calls eglSwapBuffers. Use this to "publish" the current frame. + * + * @return false on failure + */ + public boolean swapBuffers(EGLSurface eglSurface) { + return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface); + } + + /** + * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds. + */ + public void setPresentationTime(EGLSurface eglSurface, long nsecs) { + EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs); + } + + /** + * Returns true if our context and the specified surface are current. + */ + public boolean isCurrent(EGLSurface eglSurface) { + return mEGLContext.equals(EGL14.eglGetCurrentContext()) && + eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW)); + } + + /** + * Performs a simple surface query. + */ + public int querySurface(EGLSurface eglSurface, int what) { + int[] value = new int[1]; + EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0); + return value[0]; + } + + /** + * Queries a string value. + */ + public String queryString(int what) { + return EGL14.eglQueryString(mEGLDisplay, what); + } + + /** + * Returns the GLES version this context is configured for (currently 2 or 3). + */ + public int getGlVersion() { + return mGlVersion; + } + + /** + * Writes the current display, context, and surface to the log. + */ + public static void logCurrent(String msg) { + EGLDisplay display; + EGLContext context; + EGLSurface surface; + + display = EGL14.eglGetCurrentDisplay(); + context = EGL14.eglGetCurrentContext(); + surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW); + Log.i(TAG, "Current EGL (" + msg + "): display=" + display + ", context=" + context + + ", surface=" + surface); + } + + /** + * Checks for EGL errors. Throws an exception if an error has been raised. + */ + private void checkEglError(String msg) { + int error; + if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) { + throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); + } + } +} diff --git a/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/EglSurfaceBase.java b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/EglSurfaceBase.java new file mode 100644 index 000000000..52fac8e8c --- /dev/null +++ b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/EglSurfaceBase.java @@ -0,0 +1,238 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package jp.co.cyberagent.android.gpuimage.encoder; + +import android.annotation.TargetApi; +import android.graphics.Bitmap; +import android.graphics.Matrix; +import android.opengl.EGL14; +import android.opengl.EGLSurface; +import android.opengl.GLES20; +import android.util.Log; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +/** + * Common base class for EGL surfaces. + *

+ * There can be multiple surfaces associated with a single context. + */ +@TargetApi(18) +public class EglSurfaceBase { + protected static final String TAG = "EglSurfaceBase"; + + // EglCore object we're associated with. It may be associated with multiple surfaces. + protected EglCore mEglCore; + + private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE; + private int mWidth = -1; + private int mHeight = -1; + + protected EglSurfaceBase(EglCore eglCore) { + mEglCore = eglCore; + } + + /** + * Creates a window surface. + *

+ * @param surface May be a Surface or SurfaceTexture. + */ + public void createWindowSurface(Object surface) { + if (mEGLSurface != EGL14.EGL_NO_SURFACE) { + throw new IllegalStateException("surface already created"); + } + mEGLSurface = mEglCore.createWindowSurface(surface); + + // Don't cache width/height here, because the size of the underlying surface can change + // out from under us (see e.g. HardwareScalerActivity). + //mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH); + //mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT); + } + + /** + * Creates an off-screen surface. + */ + public void createOffscreenSurface(int width, int height) { + if (mEGLSurface != EGL14.EGL_NO_SURFACE) { + throw new IllegalStateException("surface already created"); + } + mEGLSurface = mEglCore.createOffscreenSurface(width, height); + mWidth = width; + mHeight = height; + } + + /** + * Returns the surface's width, in pixels. + *

+ * If this is called on a window surface, and the underlying surface is in the process + * of changing size, we may not see the new size right away (e.g. in the "surfaceChanged" + * callback). The size should match after the next buffer swap. + */ + public int getWidth() { + if (mWidth < 0) { + return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH); + } else { + return mWidth; + } + } + + /** + * Returns the surface's height, in pixels. + */ + public int getHeight() { + if (mHeight < 0) { + return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT); + } else { + return mHeight; + } + } + + /** + * Release the EGL surface. + */ + public void releaseEglSurface() { + mEglCore.releaseSurface(mEGLSurface); + mEGLSurface = EGL14.EGL_NO_SURFACE; + mWidth = mHeight = -1; + } + + /** + * Makes our EGL context and surface current. + */ + public void makeCurrent() { + mEglCore.makeCurrent(mEGLSurface); + } + + /** + * Makes our EGL context and surface current for drawing, using the supplied surface + * for reading. + */ + public void makeCurrentReadFrom(EglSurfaceBase readSurface) { + mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface); + } + + /** + * Calls eglSwapBuffers. Use this to "publish" the current frame. + * + * @return false on failure + */ + public boolean swapBuffers() { + boolean result = mEglCore.swapBuffers(mEGLSurface); + if (!result) { + Log.d(TAG, "WARNING: swapBuffers() failed"); + } + return result; + } + + /** + * Sends the presentation time stamp to EGL. + * + * @param nsecs Timestamp, in nanoseconds. + */ + public void setPresentationTime(long nsecs) { + mEglCore.setPresentationTime(mEGLSurface, nsecs); + } + + /** + * Saves the EGL surface to a file. + *

+ * Expects that this object's EGL surface is current. + */ + public void saveFrame(File file) throws IOException { + if (!mEglCore.isCurrent(mEGLSurface)) { + throw new RuntimeException("Expected EGL context/surface is not current"); + } + + // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA + // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap + // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the + // Bitmap "copy pixels" method wants the same format GL provides. + // + // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling + // here often. + // + // Making this even more interesting is the upside-down nature of GL, which means + // our output will look upside down relative to what appears on screen if the + // typical GL conventions are used. + + String filename = file.toString(); + + int width = getWidth(); + int height = getHeight(); + ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4); + buf.order(ByteOrder.LITTLE_ENDIAN); + GLES20.glReadPixels(0, 0, width, height, + GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); + //GlUtil.checkGlError("glReadPixels"); + buf.rewind(); + + BufferedOutputStream bos = null; + try { + bos = new BufferedOutputStream(new FileOutputStream(filename)); + Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + bmp.copyPixelsFromBuffer(buf); + bmp.compress(Bitmap.CompressFormat.PNG, 90, bos); + bmp.recycle(); + } finally { + if (bos != null) bos.close(); + } + Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'"); + } + + public Bitmap getFrameBitmap() { + if (!mEglCore.isCurrent(mEGLSurface)) { + throw new RuntimeException("Expected EGL context/surface is not current"); + } + + // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA + // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap + // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the + // Bitmap "copy pixels" method wants the same format GL provides. + // + // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling + // here often. + // + // Making this even more interesting is the upside-down nature of GL, which means + // our output will look upside down relative to what appears on screen if the + // typical GL conventions are used. + + int width = getWidth(); + int height = getHeight(); + ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4); + buf.order(ByteOrder.LITTLE_ENDIAN); + GLES20.glReadPixels(0, 0, width, height, + GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); + //GlUtil.checkGlError("glReadPixels"); + buf.rewind(); + + Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + bmp.copyPixelsFromBuffer(buf); + + Matrix matrix = new Matrix(); + matrix.preScale(1f, -1f); + Bitmap bmp2 = Bitmap.createBitmap(bmp, 0, 0, bmp.getWidth(), bmp.getHeight(), matrix, false); + + bmp.recycle(); + bmp = null; + return bmp2; + } +} diff --git a/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaAudioEncoder.java b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaAudioEncoder.java new file mode 100644 index 000000000..c6c726123 --- /dev/null +++ b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaAudioEncoder.java @@ -0,0 +1,218 @@ +package jp.co.cyberagent.android.gpuimage.encoder; +/* + * AudioVideoRecordingSample + * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file. + * + * Copyright (c) 2014-2015 saki t_saki@serenegiant.com + * + * File name: MediaAudioEncoder.java + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * All files in the folder are under this Apache License, Version 2.0. +*/ + +import java.io.IOException; +import java.nio.ByteBuffer; + +import android.annotation.TargetApi; +import android.media.AudioFormat; +import android.media.AudioRecord; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaCodecList; +import android.media.MediaFormat; +import android.media.MediaRecorder; +import android.util.Log; + +@TargetApi(18) +public class MediaAudioEncoder extends MediaEncoder { + private static final boolean DEBUG = false; + private static final String TAG = "MediaAudioEncoder"; + + private static final String MIME_TYPE = "audio/mp4a-latm"; + private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices. + private static final int BIT_RATE = 64000; + public static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel + public static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec + + private AudioThread mAudioThread = null; + + public MediaAudioEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) { + super(muxer, listener); + } + + @Override + protected void prepare() throws IOException { + if (DEBUG) Log.v(TAG, "prepare:"); + mTrackIndex = -1; + mMuxerStarted = mIsEOS = false; + // prepare MediaCodec for AAC encoding of audio data from inernal mic. + final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE); + if (audioCodecInfo == null) { + Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); + return; + } + if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName()); + + final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1); + audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); + //audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO); + audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE); + audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); +// audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length()); +// audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs ); + if (DEBUG) Log.i(TAG, "format: " + audioFormat); + mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); + mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mMediaCodec.start(); + if (DEBUG) Log.i(TAG, "prepare finishing"); + if (mListener != null) { + try { + mListener.onPrepared(this); + } catch (final Exception e) { + Log.e(TAG, "prepare:", e); + } + } + } + + @Override + protected void startRecording() { + super.startRecording(); + // create and execute audio capturing thread using internal mic + if (mAudioThread == null) { + mAudioThread = new AudioThread(); + mAudioThread.start(); + } + } + + @Override + protected void release() { + mAudioThread = null; + super.release(); + } + + private static final int[] AUDIO_SOURCES = new int[] { + MediaRecorder.AudioSource.MIC, + //MediaRecorder.AudioSource.DEFAULT, + //MediaRecorder.AudioSource.CAMCORDER, + //MediaRecorder.AudioSource.VOICE_COMMUNICATION, + //MediaRecorder.AudioSource.VOICE_RECOGNITION, + }; + + /** + * Thread to capture audio data from internal mic as uncompressed 16bit PCM data + * and write them to the MediaCodec encoder + */ + private class AudioThread extends Thread { + @Override + public void run() { + android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); + try { + final int min_buffer_size = AudioRecord.getMinBufferSize( + SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, + AudioFormat.ENCODING_PCM_16BIT); + int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER; + if (buffer_size < min_buffer_size) + buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2; + + AudioRecord audioRecord = null; + for (final int source : AUDIO_SOURCES) { + try { + audioRecord = new AudioRecord( + source, SAMPLE_RATE, + AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffer_size); + if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) + audioRecord = null; + } catch (final Exception e) { + audioRecord = null; + } + if (audioRecord != null) break; + } + if (audioRecord != null) { + try { + if (mIsCapturing) { + if (DEBUG) Log.v(TAG, "AudioThread:start audio recording"); + final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME); + int readBytes; + int errorCount = 0; + audioRecord.startRecording(); + try { + for (; mIsCapturing && !mRequestStop && !mIsEOS ;) { + // read audio data from internal mic + buf.clear(); + readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME); + if (readBytes > 0) { + // set audio data to encoder + buf.position(readBytes); + buf.flip(); + encode(buf, readBytes, getPTSUs()); + frameAvailableSoon(); + } else { + errorCount++; + if (errorCount >= 3) { + Log.e(TAG, "audio recorder error.."); + mInputError = true; + break; + } + } + } + frameAvailableSoon(); + } finally { + audioRecord.stop(); + } + } + } finally { + audioRecord.release(); + } + } else { + Log.e(TAG, "failed to initialize AudioRecord"); + } + } catch (final Exception e) { + Log.e(TAG, "AudioThread#run", e); + } + if (DEBUG) Log.v(TAG, "AudioThread:finished"); + } + } + + /** + * select the first codec that match a specific MIME type + * @param mimeType + * @return + */ + private static final MediaCodecInfo selectAudioCodec(final String mimeType) { + if (DEBUG) Log.v(TAG, "selectAudioCodec:"); + + MediaCodecInfo result = null; + // get the list of available codecs + final int numCodecs = MediaCodecList.getCodecCount(); + LOOP: for (int i = 0; i < numCodecs; i++) { + final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); + if (!codecInfo.isEncoder()) { // skipp decoder + continue; + } + final String[] types = codecInfo.getSupportedTypes(); + for (int j = 0; j < types.length; j++) { + if (DEBUG) Log.i(TAG, "supportedType:" + codecInfo.getName() + ",MIME=" + types[j]); + if (types[j].equalsIgnoreCase(mimeType)) { + if (result == null) { + result = codecInfo; + break LOOP; + } + } + } + } + return result; + } + +} diff --git a/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaEncoder.java b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaEncoder.java new file mode 100644 index 000000000..2aad6327a --- /dev/null +++ b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaEncoder.java @@ -0,0 +1,430 @@ +package jp.co.cyberagent.android.gpuimage.encoder; +/* + * AudioVideoRecordingSample + * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file. + * + * Copyright (c) 2014-2015 saki t_saki@serenegiant.com + * + * File name: MediaEncoder.java + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * All files in the folder are under this Apache License, Version 2.0. +*/ + +import android.annotation.TargetApi; +import android.media.MediaCodec; +import android.media.MediaFormat; +import android.util.Log; + +import java.io.IOException; +import java.nio.ByteBuffer; + +@TargetApi(18) +public abstract class MediaEncoder implements Runnable { + private static final boolean DEBUG = false; + private static final String TAG = "MediaEncoder"; + + protected static final int TIMEOUT_USEC = 10000; // 10[msec] + protected static final int MSG_FRAME_AVAILABLE = 1; + protected static final int MSG_STOP_RECORDING = 9; + + public interface MediaEncoderListener { + void onPrepared(MediaEncoder encoder); + void onStopped(MediaEncoder encoder); + void onMuxerStopped(); + } + + protected final Object mSync = new Object(); + /** + * Flag that indicate this encoder is capturing now. + */ + protected volatile boolean mIsCapturing; + /** + * Flag that indicate the frame data will be available soon. + */ + private int mRequestDrain; + /** + * Flag to request stop capturing + */ + protected volatile boolean mRequestStop; + /** + * Flag that indicate encoder received EOS(End Of Stream) + */ + protected boolean mIsEOS; + /** + * Flag the indicate the muxer is running + */ + protected boolean mMuxerStarted; + /** + * Track Number + */ + protected int mTrackIndex; + /** + * MediaCodec instance for encoding + */ + protected MediaCodec mMediaCodec; // API >= 16(Android4.1.2) + /** + * Weak refarence of MediaMuxerWarapper instance + */ + protected MediaMuxerWrapper mMuxer; + /** + * BufferInfo instance for dequeuing + */ + private MediaCodec.BufferInfo mBufferInfo; // API >= 16(Android4.1.2) + + protected final MediaEncoderListener mListener; + + boolean mInputError = false; + + public MediaEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) { + if (listener == null) throw new NullPointerException("MediaEncoderListener is null"); + if (muxer == null) throw new NullPointerException("MediaMuxerWrapper is null"); + mMuxer = muxer; + muxer.addEncoder(this); + mListener = listener; + synchronized (mSync) { + // create BufferInfo here for effectiveness(to reduce GC) + mBufferInfo = new MediaCodec.BufferInfo(); + // wait for starting thread + new Thread(this, getClass().getSimpleName()).start(); + try { + mSync.wait(); + } catch (final InterruptedException e) { + } + } + } + + public String getOutputPath() { + final MediaMuxerWrapper muxer = mMuxer; + return muxer != null ? muxer.getOutputPath() : null; + } + + /** + * the method to indicate frame data is soon available or already available + * @return return true if encoder is ready to encod. + */ + public boolean frameAvailableSoon() { +// if (DEBUG) Log.v(TAG, "frameAvailableSoon"); + synchronized (mSync) { + if (!mIsCapturing || mRequestStop) { + return false; + } + mRequestDrain++; + mSync.notifyAll(); + } + return true; + } + + /** + * encoding loop on private thread + */ + @Override + public void run() { +// android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); + synchronized (mSync) { + mRequestStop = false; + mRequestDrain = 0; + mSync.notify(); + } + final boolean isRunning = true; + boolean localRequestStop; + boolean localRequestDrain; + while (isRunning) { + synchronized (mSync) { + localRequestStop = mRequestStop; + localRequestDrain = (mRequestDrain > 0); + if (localRequestDrain) + mRequestDrain--; + } + + if (mInputError) { + inputError(); + release(); + break; + } + + if (localRequestStop) { + drain(); + // request stop recording + signalEndOfInputStream(); + // process output data again for EOS signale + drain(); + // release all related objects + release(); + break; + } + + if (localRequestDrain) { + drain(); + } else { + synchronized (mSync) { + try { + mSync.wait(); + } catch (final InterruptedException e) { + break; + } + } + } + } // end of while + if (DEBUG) Log.d(TAG, "Encoder thread exiting"); + synchronized (mSync) { + mRequestStop = true; + mIsCapturing = false; + } + } + + /* + * prepareing method for each sub class + * this method should be implemented in sub class, so set this as abstract method + * @throws IOException + */ + /*package*/ abstract void prepare() throws IOException; + + /*package*/ void startRecording() { + if (DEBUG) Log.v(TAG, "startRecording"); + synchronized (mSync) { + mIsCapturing = true; + mRequestStop = false; + mSync.notifyAll(); + } + } + + /** + * the method to request stop encoding + */ + /*package*/ void stopRecording() { + if (DEBUG) Log.v(TAG, "stopRecording"); + synchronized (mSync) { + if (!mIsCapturing || mRequestStop) { + return; + } + mRequestStop = true; // for rejecting newer frame + mSync.notifyAll(); + // We can not know when the encoding and writing finish. + // so we return immediately after request to avoid delay of caller thread + } + } + +//******************************************************************************** +//******************************************************************************** + /** + * Release all releated objects + */ + protected void release() { + if (DEBUG) Log.d(TAG, "release:"); + try { + mListener.onStopped(this); + } catch (final Exception e) { + Log.e(TAG, "failed onStopped", e); + } + mIsCapturing = false; + if (mMediaCodec != null) { + try { + mMediaCodec.stop(); + mMediaCodec.release(); + mMediaCodec = null; + } catch (final Exception e) { + Log.e(TAG, "failed releasing MediaCodec", e); + } + } + if (mMuxerStarted) { + final MediaMuxerWrapper muxer = mMuxer; + if (muxer != null) { + try { + if (muxer.stop()) { + mListener.onMuxerStopped(); + } + } catch (final Exception e) { + Log.e(TAG, "failed stopping muxer", e); + } + } + } + mBufferInfo = null; + mMuxer = null; + } + + protected void signalEndOfInputStream() { + if (DEBUG) Log.d(TAG, "sending EOS to encoder"); + // signalEndOfInputStream is only avairable for video encoding with surface + // and equivalent sending a empty buffer with BUFFER_FLAG_END_OF_STREAM flag. +// mMediaCodec.signalEndOfInputStream(); // API >= 18 + encode(null, 0, getPTSUs()); + } + + /** + * Method to set byte array to the MediaCodec encoder + * @param buffer + * @param length length of byte array, zero means EOS. + * @param presentationTimeUs + */ + protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) { + if (!mIsCapturing) return; + final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); + while (mIsCapturing) { + final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC); + if (inputBufferIndex >= 0) { + final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; + inputBuffer.clear(); + if (buffer != null) { + inputBuffer.put(buffer); + } +// if (DEBUG) Log.v(TAG, "encode:queueInputBuffer"); + if (length <= 0) { + // send EOS + mIsEOS = true; + if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM"); + mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0, + presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + break; + } else { + mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length, + presentationTimeUs, 0); + } + break; + } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { + // wait for MediaCodec encoder is ready to encode + // nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC) + // will wait for maximum TIMEOUT_USEC(10msec) on each call + } + } + } + + /** + * drain encoded data and write them to muxer + */ + protected void drain() { + if (mMediaCodec == null) return; + ByteBuffer[] encoderOutputBuffers = null; + try { + encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + } catch (IllegalStateException e) { + Log.e(TAG, " mMediaCodec.getOutputBuffers() error"); + return; + } + + int encoderStatus, count = 0; + final MediaMuxerWrapper muxer = mMuxer; + if (muxer == null) { +// throw new NullPointerException("muxer is unexpectedly null"); + Log.w(TAG, "muxer is unexpectedly null"); + return; + } + LOOP: while (mIsCapturing) { + // get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec]) + try { + encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); + } catch (IllegalStateException e) { + encoderStatus = MediaCodec.INFO_TRY_AGAIN_LATER; + } + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come + if (!mIsEOS) { + if (++count > 5) + break LOOP; // out of while + } + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED"); + // this shoud not come when encoding + encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED"); + // this status indicate the output format of codec is changed + // this should come only once before actual encoded data + // but this status never come on Android4.3 or less + // and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come. + if (mMuxerStarted) { // second time request is error + throw new RuntimeException("format changed twice"); + } + // get output format from codec and pass them to muxer + // getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash. + final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16 + mTrackIndex = muxer.addTrack(format); + mMuxerStarted = true; + if (!muxer.start()) { + // we should wait until muxer is ready + synchronized (muxer) { + while (!muxer.isStarted()) + try { + muxer.wait(100); + } catch (final InterruptedException e) { + break LOOP; + } + } + } + } else if (encoderStatus < 0) { + // unexpected status + if (DEBUG) Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus); + } else { + final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; + if (encodedData == null) { + // this never should come...may be a MediaCodec internal error + throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null"); + } + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + // You shoud set output format to muxer here when you target Android4.3 or less + // but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet) + // therefor we should expand and prepare output format from buffer data. + // This sample is for API>=18(>=Android 4.3), just ignore this flag here + if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG"); + mBufferInfo.size = 0; + } + + if (mBufferInfo.size != 0) { + // encoded data is ready, clear waiting counter + count = 0; + if (!mMuxerStarted) { + // muxer is not ready...this will prrograming failure. + throw new RuntimeException("drain:muxer hasn't started"); + } + // write encoded data to muxer(need to adjust presentationTimeUs. + mBufferInfo.presentationTimeUs = getPTSUs(); + muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); + prevOutputPTSUs = mBufferInfo.presentationTimeUs; + } + // return buffer to encoder + mMediaCodec.releaseOutputBuffer(encoderStatus, false); + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + // when EOS come. + mIsCapturing = false; + break; // out of while + } + } + } + } + + /** + * previous presentationTimeUs for writing + */ + private long prevOutputPTSUs = 0; + /** + * get next encoding presentationTimeUs + * @return + */ + protected long getPTSUs() { + long result = System.nanoTime() / 1000L; + // presentationTimeUs should be monotonic + // otherwise muxer fail to write + if (result < prevOutputPTSUs) + result = (prevOutputPTSUs - result) + result; + return result; + } + + protected void inputError() { + final MediaMuxerWrapper muxer = mMuxer; + if (muxer != null) { + muxer.removeFailEncoder(); + } + } + +} diff --git a/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaMuxerWrapper.java b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaMuxerWrapper.java new file mode 100644 index 000000000..033025ebc --- /dev/null +++ b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaMuxerWrapper.java @@ -0,0 +1,200 @@ +package jp.co.cyberagent.android.gpuimage.encoder; +/* + * AudioVideoRecordingSample + * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file. + * + * Copyright (c) 2014-2015 saki t_saki@serenegiant.com + * + * File name: MediaMuxerWrapper.java + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * All files in the folder are under this Apache License, Version 2.0. +*/ + +import android.annotation.TargetApi; +import android.media.MediaCodec; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.util.Log; + +import java.io.IOException; +import java.nio.ByteBuffer; + +@TargetApi(18) +public class MediaMuxerWrapper { + private static final boolean DEBUG = false; + private static final String TAG = "MediaMuxerWrapper"; + + //private static final String DIR_NAME = "AVRecSample"; + //private static final SimpleDateFormat mDateTimeFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss", Locale.US); + + private String mOutputPath; + private final MediaMuxer mMediaMuxer; // API >= 18 + private int mEncoderCount, mStatredCount; + private boolean mIsStarted; + private MediaEncoder mVideoEncoder, mAudioEncoder; + + public MediaMuxerWrapper(String outputPath) throws IOException { + mOutputPath = outputPath; + mMediaMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + mEncoderCount = mStatredCount = 0; + mIsStarted = false; + } + + public String getOutputPath() { + return mOutputPath; + } + + public void prepare() throws IOException { + if (mVideoEncoder != null) + mVideoEncoder.prepare(); + if (mAudioEncoder != null) + mAudioEncoder.prepare(); + } + + public void startRecording() { + if (mVideoEncoder != null) + mVideoEncoder.startRecording(); + if (mAudioEncoder != null) + mAudioEncoder.startRecording(); + } + + public void stopRecording() { + if (mVideoEncoder != null) + mVideoEncoder.stopRecording(); + mVideoEncoder = null; + if (mAudioEncoder != null) + mAudioEncoder.stopRecording(); + mAudioEncoder = null; + } + + public synchronized boolean isStarted() { + return mIsStarted; + } + +//********************************************************************** +//********************************************************************** + /** + * assign encoder to this calss. this is called from encoder. + * @param encoder instance of MediaVideoEncoder or MediaAudioEncoder + */ + /*package*/ void addEncoder(final MediaEncoder encoder) { + if (encoder instanceof MediaVideoEncoder) { + if (mVideoEncoder != null) + throw new IllegalArgumentException("Video encoder already added."); + mVideoEncoder = encoder; + } else if (encoder instanceof MediaAudioEncoder) { + if (mAudioEncoder != null) + throw new IllegalArgumentException("Video encoder already added."); + mAudioEncoder = encoder; + } else + throw new IllegalArgumentException("unsupported encoder"); + mEncoderCount = (mVideoEncoder != null ? 1 : 0) + (mAudioEncoder != null ? 1 : 0); + } + + /** + * request start recording from encoder + * @return true when muxer is ready to write + */ + /*package*/ synchronized boolean start() { + if (DEBUG) Log.v(TAG, "start:"); + mStatredCount++; + if ((mEncoderCount > 0) && (mStatredCount == mEncoderCount)) { + mMediaMuxer.start(); + mIsStarted = true; + notifyAll(); + if (DEBUG) Log.v(TAG, "MediaMuxer started:"); + } + return mIsStarted; + } + + /** + * request stop recording from encoder when encoder received EOS + */ + /*package*/ synchronized boolean stop() { + if (DEBUG) Log.v(TAG, "stop:mStatredCount=" + mStatredCount); + mStatredCount--; + if ((mEncoderCount > 0) && (mStatredCount <= 0)) { + mMediaMuxer.stop(); + mMediaMuxer.release(); + mIsStarted = false; + if (DEBUG) Log.v(TAG, "MediaMuxer stopped:"); + return true; + } + return false; + } + + /** + * assign encoder to muxer + * @param format + * @return minus value indicate error + */ + /*package*/ synchronized int addTrack(final MediaFormat format) { + if (mIsStarted) + throw new IllegalStateException("muxer already started"); + final int trackIx = mMediaMuxer.addTrack(format); + if (DEBUG) Log.i(TAG, "addTrack:trackNum=" + mEncoderCount + ",trackIx=" + trackIx + ",format=" + format); + return trackIx; + } + + /** + * write encoded data to muxer + * @param trackIndex + * @param byteBuf + * @param bufferInfo + */ + /*package*/ synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) { + if (mStatredCount > 0) + mMediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo); + } + + synchronized void removeFailEncoder() { + mEncoderCount--; + + if ((mEncoderCount > 0) && (mStatredCount == mEncoderCount)) { + mMediaMuxer.start(); + mIsStarted = true; + notifyAll(); + if (DEBUG) Log.v(TAG, "MediaMuxer force start"); + } + } + +//********************************************************************** +//********************************************************************** + /** + * generate output file + * @param type Environment.DIRECTORY_MOVIES / Environment.DIRECTORY_DCIM etc. + * @param ext .mp4(.m4a for audio) or .png + * @return return null when this app has no writing permission to external storage. + */ + /*public static final File getCaptureFile(final String type, final String ext) { + final File dir = new File(Environment.getExternalStoragePublicDirectory(type), DIR_NAME); + Log.d(TAG, "path=" + dir.toString()); + dir.mkdirs(); + if (dir.canWrite()) { + return new File(dir, getDateTimeString() + ext); + } + return null; + }*/ + + /** + * get current date and time as String + * @return + */ + /*private static final String getDateTimeString() { + final GregorianCalendar now = new GregorianCalendar(); + return mDateTimeFormat.format(now.getTime()); + }*/ + +} diff --git a/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaVideoEncoder.java b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaVideoEncoder.java new file mode 100644 index 000000000..0c5fff8da --- /dev/null +++ b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/MediaVideoEncoder.java @@ -0,0 +1,240 @@ +package jp.co.cyberagent.android.gpuimage.encoder; +/* + * AudioVideoRecordingSample + * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file. + * + * Copyright (c) 2014-2015 saki t_saki@serenegiant.com + * + * File name: MediaVideoEncoder.java + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * All files in the folder are under this Apache License, Version 2.0. +*/ + +import android.annotation.TargetApi; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaCodecList; +import android.media.MediaFormat; +import android.opengl.EGLContext; +import android.util.Log; +import android.view.Surface; + +import java.io.IOException; + +@TargetApi(18) +public class MediaVideoEncoder extends MediaEncoder { + private static final boolean DEBUG = false; + private static final String TAG = "MediaVideoEncoder"; + + private static final String MIME_TYPE = "video/avc"; + // parameters for recording + private static final int FRAME_RATE = 25; + private static final float BPP = 0.25f; + //private static final float BPP_LOW = 0.15f; + + private final int mWidth; + private final int mHeight; + // private RenderHandler mRenderHandler; + private Surface mSurface; + + public MediaVideoEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener, + final int width, final int height) { + super(muxer, listener); + if (DEBUG) Log.i(TAG, "MediaVideoEncoder: "); + mWidth = width; + mHeight = height; + // mRenderHandler = RenderHandler.createHandler(TAG); + } + + public boolean frameAvailableSoon(final float[] tex_matrix) { + boolean result; + if (result = super.frameAvailableSoon()) { + // mRenderHandler.draw(tex_matrix); + } + return result; + } + + @Override + public boolean frameAvailableSoon() { + boolean result; + if (result = super.frameAvailableSoon()) { + // mRenderHandler.draw(null); + } + return result; + } + + @Override + protected void prepare() throws IOException { + if (DEBUG) Log.i(TAG, "prepare: "); + mTrackIndex = -1; + mMuxerStarted = mIsEOS = false; + + final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE); + if (videoCodecInfo == null) { + Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); + return; + } + if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName()); + + final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18 + format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate()); + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10); + if (DEBUG) Log.i(TAG, "format: " + format); + + mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); + mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + // get Surface for encoder input + // this method only can call between #configure and #start + mSurface = mMediaCodec.createInputSurface(); // API >= 18 + mMediaCodec.start(); + if (DEBUG) Log.i(TAG, "prepare finishing"); + if (mListener != null) { + try { + mListener.onPrepared(this); + } catch (final Exception e) { + Log.e(TAG, "prepare:", e); + } + } + } + + public void setEglContext(final EGLContext shared_context, + final int tex_id) { + // mRenderHandler.setEglContext(shared_context, tex_id, mSurface, true); + } + + public Surface getSurface() { + return mSurface; + } + + @Override + protected void release() { + if (DEBUG) Log.i(TAG, "release:"); + if (mSurface != null) { + mSurface.release(); + mSurface = null; + } + // if (mRenderHandler != null) { + // mRenderHandler.release(); + // mRenderHandler = null; + // } + super.release(); + } + + private int calcBitRate() { + /*float bpp = BPP; + if (!"US".equalsIgnoreCase(LocateManager.getInstance().getCountry())) { + bpp = BPP_LOW; + }*/ + final int bitrate = (int)(BPP * FRAME_RATE * mWidth * mHeight); + Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f)); + return bitrate; + } + + /** + * select the first codec that match a specific MIME type + * @param mimeType + * @return null if no codec matched + */ + protected static final MediaCodecInfo selectVideoCodec(final String mimeType) { + if (DEBUG) Log.v(TAG, "selectVideoCodec:"); + + // get the list of available codecs + final int numCodecs = MediaCodecList.getCodecCount(); + for (int i = 0; i < numCodecs; i++) { + final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); + + if (!codecInfo.isEncoder()) { // skipp decoder + continue; + } + // select first codec that match a specific MIME type and color format + final String[] types = codecInfo.getSupportedTypes(); + for (int j = 0; j < types.length; j++) { + if (types[j].equalsIgnoreCase(mimeType)) { + if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]); + final int format = selectColorFormat(codecInfo, mimeType); + if (format > 0) { + return codecInfo; + } + } + } + } + return null; + } + + /** + * select color format available on specific codec and we can use. + * @return 0 if no colorFormat is matched + */ + protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) { + if (DEBUG) Log.i(TAG, "selectColorFormat: "); + int result = 0; + final MediaCodecInfo.CodecCapabilities caps; + try { + Thread.currentThread().setPriority(Thread.MAX_PRIORITY); + caps = codecInfo.getCapabilitiesForType(mimeType); + } finally { + Thread.currentThread().setPriority(Thread.NORM_PRIORITY); + } + int colorFormat; + for (int i = 0; i < caps.colorFormats.length; i++) { + colorFormat = caps.colorFormats[i]; + if (isRecognizedViewoFormat(colorFormat)) { + if (result == 0) + result = colorFormat; + break; + } + } + if (result == 0) + Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType); + return result; + } + + /** + * color formats that we can use in this class + */ + protected static int[] recognizedFormats; + static { + recognizedFormats = new int[] { + MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface, +// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar, +// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar, +// MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, + + }; + } + + private static final boolean isRecognizedViewoFormat(final int colorFormat) { + if (DEBUG) Log.i(TAG, "isRecognizedViewoFormat:colorFormat=" + colorFormat); + final int n = recognizedFormats != null ? recognizedFormats.length : 0; + for (int i = 0; i < n; i++) { + if (recognizedFormats[i] == colorFormat) { + return true; + } + } + return false; + } + + @Override + protected void signalEndOfInputStream() { + if (DEBUG) Log.d(TAG, "sending EOS to encoder"); + if (mMediaCodec != null) { + mMediaCodec.signalEndOfInputStream(); // API >= 18 + } + mIsEOS = true; + } + +} diff --git a/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/WindowSurface.java b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/WindowSurface.java new file mode 100644 index 000000000..b3d013c19 --- /dev/null +++ b/library/src/main/java/jp/co/cyberagent/android/gpuimage/encoder/WindowSurface.java @@ -0,0 +1,92 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package jp.co.cyberagent.android.gpuimage.encoder; + +import android.annotation.TargetApi; +import android.graphics.SurfaceTexture; +import android.view.Surface; + +/** + * Recordable EGL window surface. + *

+ * It's good practice to explicitly release() the surface, preferably from a "finally" block. + */ +@TargetApi(18) +public class WindowSurface extends EglSurfaceBase { + private Surface mSurface; + private boolean mReleaseSurface; + + /** + * Associates an EGL surface with the native window surface. + *

+ * Set releaseSurface to true if you want the Surface to be released when release() is + * called. This is convenient, but can interfere with framework classes that expect to + * manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the + * surfaceDestroyed() callback won't fire). + */ + public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) { + super(eglCore); + createWindowSurface(surface); + mSurface = surface; + mReleaseSurface = releaseSurface; + } + + /** + * Associates an EGL surface with the SurfaceTexture. + */ + public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) { + super(eglCore); + createWindowSurface(surfaceTexture); + } + + /** + * Releases any resources associated with the EGL surface (and, if configured to do so, + * with the Surface as well). + *

+ * Does not require that the surface's EGL context be current. + */ + public void release() { + releaseEglSurface(); + if (mSurface != null) { + if (mReleaseSurface) { + mSurface.release(); + } + mSurface = null; + } + } + + /** + * Recreate the EGLSurface, using the new EglBase. The caller should have already + * freed the old EGLSurface with releaseEglSurface(). + *

+ * This is useful when we want to update the EGLSurface associated with a Surface. + * For example, if we want to share with a different EGLContext, which can only + * be done by tearing down and recreating the context. (That's handled by the caller; + * this just creates a new EGLSurface for the Surface we were handed earlier.) + *

+ * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a + * context somewhere, the create call will fail with complaints from the Surface + * about already being connected. + */ + public void recreate(EglCore newEglCore) { + if (mSurface == null) { + throw new RuntimeException("not yet implemented for SurfaceTexture"); + } + mEglCore = newEglCore; // switch to new context + createWindowSurface(mSurface); // create new surface + } +} diff --git a/library/src/main/java/jp/co/cyberagent/android/gpuimage/filter/GPUImageFilter.java b/library/src/main/java/jp/co/cyberagent/android/gpuimage/filter/GPUImageFilter.java index 6139c4a11..4e6042e1b 100644 --- a/library/src/main/java/jp/co/cyberagent/android/gpuimage/filter/GPUImageFilter.java +++ b/library/src/main/java/jp/co/cyberagent/android/gpuimage/filter/GPUImageFilter.java @@ -70,7 +70,7 @@ public GPUImageFilter(final String vertexShader, final String fragmentShader) { this.fragmentShader = fragmentShader; } - private final void init() { + private void init() { onInit(); onInitialized(); } diff --git a/library/src/main/java/jp/co/cyberagent/android/gpuimage/filter/GPUImageFilterGroup.java b/library/src/main/java/jp/co/cyberagent/android/gpuimage/filter/GPUImageFilterGroup.java index 8735dbcdc..37b831012 100644 --- a/library/src/main/java/jp/co/cyberagent/android/gpuimage/filter/GPUImageFilterGroup.java +++ b/library/src/main/java/jp/co/cyberagent/android/gpuimage/filter/GPUImageFilterGroup.java @@ -91,6 +91,14 @@ public void addFilter(GPUImageFilter aFilter) { updateMergedFilters(); } + public void addFilter(int index, GPUImageFilter aFilter) { + if (aFilter == null) { + return; + } + filters.add(index, aFilter); + updateMergedFilters(); + } + /* * (non-Javadoc) * @see jp.co.cyberagent.android.gpuimage.filter.GPUImageFilter#onInit() diff --git a/sample/build.gradle b/sample/build.gradle index ed3d3249c..138f48565 100644 --- a/sample/build.gradle +++ b/sample/build.gradle @@ -1,6 +1,6 @@ apply plugin: 'com.android.application' -apply plugin: 'kotlin-android-extensions' apply plugin: 'kotlin-android' +apply plugin: 'kotlin-android-extensions' android { compileSdkVersion COMPILE_SDK_VERSION as int diff --git a/sample/src/main/AndroidManifest.xml b/sample/src/main/AndroidManifest.xml index a45d39b0d..f29dd66b6 100644 --- a/sample/src/main/AndroidManifest.xml +++ b/sample/src/main/AndroidManifest.xml @@ -3,6 +3,8 @@ package="jp.co.cyberagent.android.gpuimage.sample"> + + diff --git a/sample/src/main/java/jp/co/cyberagent/android/gpuimage/sample/activity/CameraActivity.kt b/sample/src/main/java/jp/co/cyberagent/android/gpuimage/sample/activity/CameraActivity.kt index b40cd350d..4fbe1ed63 100644 --- a/sample/src/main/java/jp/co/cyberagent/android/gpuimage/sample/activity/CameraActivity.kt +++ b/sample/src/main/java/jp/co/cyberagent/android/gpuimage/sample/activity/CameraActivity.kt @@ -18,13 +18,17 @@ package jp.co.cyberagent.android.gpuimage.sample.activity import android.os.Build import android.os.Bundle +import android.os.Environment import android.view.View +import android.widget.Button import android.widget.SeekBar import android.widget.SeekBar.OnSeekBarChangeListener import android.widget.Toast import androidx.appcompat.app.AppCompatActivity +import jp.co.cyberagent.android.gpuimage.GPUImageMovieWriter import jp.co.cyberagent.android.gpuimage.GPUImageView import jp.co.cyberagent.android.gpuimage.filter.GPUImageFilter +import jp.co.cyberagent.android.gpuimage.filter.GPUImageFilterGroup import jp.co.cyberagent.android.gpuimage.sample.GPUImageFilterTools import jp.co.cyberagent.android.gpuimage.sample.GPUImageFilterTools.FilterAdjuster import jp.co.cyberagent.android.gpuimage.sample.R @@ -38,6 +42,7 @@ class CameraActivity : AppCompatActivity() { private val gpuImageView: GPUImageView by lazy { findViewById(R.id.surfaceView) } private val seekBar: SeekBar by lazy { findViewById(R.id.seekBar) } + private val recordBtn: Button by lazy { findViewById