Commit 9b011401 by Lee Munkyeong

Merge branch 'communication/feature/develop_encode_video' into 'communication/develop'

Communication/feature/develop encode video

See merge request !165
parents ae514ab7 a8183c7d
......@@ -3,7 +3,7 @@
-dontskipnonpubliclibraryclasses
-dontpreverify
-verbose
-optimizations !code/simplification/arithmetic,!field/*,!class/merging/*
-optimizations !code/simplification/arithmetic,!field/*,!class/merging/*,!code/removal/advanced,!method/inlining/short,!method/inlining/unique,!method/removal/*,!method/marking/*
-keep public class * extends android.app.Activity
-keep public class * extends android.app.Application
......
......@@ -14,6 +14,7 @@ import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.ConnectivityManager;
......@@ -26,6 +27,8 @@ import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.provider.BaseColumns;
import android.provider.MediaStore;
import android.support.annotation.RequiresApi;
import android.util.Log;
import android.view.KeyEvent;
......@@ -101,6 +104,7 @@ import jp.agentec.abook.abv.ui.common.dialog.ABookAlertDialog;
import jp.agentec.abook.abv.ui.common.util.ABVToastUtil;
import jp.agentec.abook.abv.ui.common.util.AlertDialogUtil;
import jp.agentec.abook.abv.ui.home.helper.ActivityHandlingHelper;
import jp.agentec.abook.abv.ui.home.helper.VideoEncoder;
import jp.agentec.abook.abv.ui.home.view.FullscreenableChromeClient;
import jp.agentec.abook.abv.ui.viewer.activity.CommunicationWebViewActivity;
import jp.agentec.abook.abv.ui.viewer.activity.ParentWebViewActivity;
......@@ -120,7 +124,6 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
private final String TAG = "ChatWebViewActivity";
private final String NETWORK_ERROR_PLACE_HOLDER = "file:///android_asset/chat/public_new/chat.html";
//private final String NETWORK_ERROR_PLACE_HOLDER = "file:///android_asset/chat/public/networkError.html";
private final String CHAT_PAGE_URL = "file:///android_asset/chat/public_new/chat.html";
private final String CHAT_ROOM_PAGE_URL = "file:///android_asset/chat/public_new/chat_room.html";
private final String COLLABORATION_PAGE_URL = "file:///android_asset/chat/public_new/collaboration.html";
......@@ -144,6 +147,8 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
private String mSkey;
private Integer joinMeetingId;
private boolean isPIP;
private String encodedFilePath;
private Uri encodedVideoPath;
// 0:協業生成, 1:協業参加
private Integer collaborationJoinFlg = 0;
......@@ -539,7 +544,7 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
File file = new File(filePath);
NetworkTask networkTask = new NetworkTask(url, filePath);
networkTask.execute();
} else if (url.contains("/file/getImage")) {
} else if (url.contains("/file/getImage") && !url.contains(".mp4")) {
Uri uri = Uri.parse(url);
String fileName = uri.getQueryParameter("fileName");
String filePath = getFilesDir().getAbsolutePath() + "/" + fileName;
......@@ -785,11 +790,80 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
if (mUploadMessage == null) {
return;
}
mUploadMessage.onReceiveValue(result);
mChatWebView.post(new Runnable() {
@Override
public void run() {
mChatWebView.loadUrl("javascript:CHAT_UI.showLoadingIndicator();");
}
});
if (dataUri != null) {
Cursor cursor = getContentResolver().query(dataUri, null, null, null, null);
cursor.moveToNext();
final String filePath = cursor.getString(cursor.getColumnIndex("_data"));
cursor.close();
Runnable r = new Runnable() {
@Override
public void run() {
try {
//CallBack
Runnable callBack = new Runnable() {
@Override
public void run() {
runOnUiThread(new Runnable() {
@Override
public void run() {
mChatWebView.loadUrl("javascript:CHAT_UI.videoEncodeEnd('" +encodedFilePath + "')");
}
});
}
};
Context c = getApplicationContext();
File file = new File(filePath);
String encodedPath = new VideoEncoder().changeResolution(file, callBack);
encodedFilePath = encodedPath;
} catch (Throwable throwable) {
mChatWebView.loadUrl("javascript:CHAT_UI.videoEncodeFail();)");
}
}
};
r.run();
mUploadMessage.onReceiveValue(null);
}
}
mUploadMessage = null;
}
public static Uri getImageContentUri(Context context, File file) {
if (!file.exists()) {
return null;
}
String filePath = file.getAbsolutePath();
Cursor cursor = context.getContentResolver().query(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
new String[] {BaseColumns._ID},
MediaStore.MediaColumns.DATA + "=? ",
new String[] { filePath }, null);
if (cursor != null && cursor.moveToFirst()) {
int id = cursor.getInt(cursor
.getColumnIndex(BaseColumns._ID));
Uri baseUri = Uri.parse("content://media/external/video/media");
return Uri.withAppendedPath(baseUri, "" + id);
} else {
if (!file.exists()) { return null; }
ContentValues values = new ContentValues();
values.put(MediaStore.MediaColumns.DATA, filePath);
return context.getContentResolver().insert(
MediaStore.Video.Media.EXTERNAL_CONTENT_URI,
values);
}
}
/**
* 未読のプッシュメッセージが存在するかチェック
* @param pushMessageDtoList チェックするリスト
......@@ -1501,6 +1575,14 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
public int getAndroidVersion() {
return Build.VERSION.SDK_INT;
}
@JavascriptInterface
public void removeEncodedVideo(String filePath) {
File encodedVideo = new File(filePath);
if (encodedVideo.exists()) {
encodedVideo.delete();
}
}
}
/**
......
package jp.agentec.abook.abv.ui.home.helper;
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.opengl.EGL14;
import android.opengl.EGLExt;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.util.Log;
import android.view.Surface;
/**
* Holds state associated with a Surface used for MediaCodec encoder input.
* <p>
* The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
* to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
* to the video encoder.
*/
class InputSurface {
private static final String TAG = "InputSurface";
private static final boolean VERBOSE = true;
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private static final int EGL_OPENGL_ES2_BIT = 4;
private EGLDisplay mEGLDisplay;
private EGLContext mEGLContext;
private EGLSurface mEGLSurface;
private Surface mSurface;
/**
* Creates an InputSurface from a Surface.
*/
public InputSurface(Surface surface) {
if (surface == null) {
throw new NullPointerException();
}
mSurface = surface;
eglSetup();
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
*/
private void eglSetup() {
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
mEGLDisplay = null;
throw new RuntimeException("unable to initialize EGL14");
}
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
// to be able to tell if the frame is reasonable.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0)) {
throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
}
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
attrib_list, 0);
checkEglError("eglCreateContext");
if (mEGLContext == null) {
throw new RuntimeException("null context");
}
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
if (mEGLSurface == null) {
throw new RuntimeException("surface was null");
}
}
/**
* Discard all resources held by this class, notably the EGL context. Also releases the
* Surface that was passed to our constructor.
*/
public void release() {
if (EGL14.eglGetCurrentContext().equals(mEGLContext)) {
// Clear the current context and surface to ensure they are discarded immediately.
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
}
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
//EGL14.eglTerminate(mEGLDisplay);
mSurface.release();
// null everything out so future attempts to use this object will cause an NPE
mEGLDisplay = null;
mEGLContext = null;
mEGLSurface = null;
mSurface = null;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*/
public boolean swapBuffers() {
return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
}
/**
* Returns the Surface that the MediaCodec receives buffers from.
*/
public Surface getSurface() {
return mSurface;
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime(long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
}
/**
* Checks for EGL errors.
*/
private void checkEglError(String msg) {
boolean failed = false;
int error;
while ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
failed = true;
}
if (failed) {
throw new RuntimeException("EGL error encountered (see log)");
}
}
}
package jp.agentec.abook.abv.ui.home.helper;
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLES11Ext;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import java.nio.ByteBuffer;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import javax.microedition.khronos.opengles.GL;
import javax.microedition.khronos.opengles.GL10;
/**
* Holds state associated with a Surface used for MediaCodec decoder output.
* <p>
* The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
* and then create a Surface for that SurfaceTexture. The Surface can be passed to
* MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
* texture with updateTexImage, then render the texture with GL to a pbuffer.
* <p>
* The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
* Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
* we just draw it on whatever surface is current.
* <p>
* By default, the Surface will be using a BufferQueue in asynchronous mode, so we
* can potentially drop frames.
*/
class OutputSurface implements SurfaceTexture.OnFrameAvailableListener {
private static final String TAG = "OutputSurface";
private static final boolean VERBOSE = true;
private static final int EGL_OPENGL_ES2_BIT = 4;
private EGL10 mEGL;
private EGLDisplay mEGLDisplay;
private EGLContext mEGLContext;
private EGLSurface mEGLSurface;
private SurfaceTexture mSurfaceTexture;
private Surface mSurface;
private Object mFrameSyncObject = new Object(); // guards mFrameAvailable
private boolean mFrameAvailable;
private TextureRender mTextureRender;
private HandlerThread mHandlerThread;
private Handler mHandler;
/**
* Creates an OutputSurface backed by a pbuffer with the specifed dimensions. The new
* EGL context and surface will be made current. Creates a Surface that can be passed
* to MediaCodec.configure().
*/
public OutputSurface(int width, int height) {
if (width <= 0 || height <= 0) {
throw new IllegalArgumentException();
}
eglSetup(width, height);
makeCurrent();
setup();
}
/**
* Creates an OutputSurface using the current EGL context. Creates a Surface that can be
* passed to MediaCodec.configure().
*/
public OutputSurface() {
setup();
}
/**
* Creates instances of TextureRender and SurfaceTexture, and a Surface associated
* with the SurfaceTexture.
*/
private void setup() {
mTextureRender = new TextureRender();
mTextureRender.surfaceCreated();
// Even if we don't access the SurfaceTexture after the constructor returns, we
// still need to keep a reference to it. The Surface doesn't retain a reference
// at the Java level, so if we don't either then the object can get GCed, which
// causes the native finalizer to run.
// mHandlerThread = new HandlerThread("callback-thread");
// mHandlerThread.start();
// mHandler = new Handler(mHandlerThread.getLooper());
if (VERBOSE) {
Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
}
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
// This doesn't work if OutputSurface is created on the thread that CTS started for
// these test cases.
//
// The CTS-created thread has a Looper, and the SurfaceTexture constructor will
// create a Handler that uses it. The "frame available" message is delivered
// there, but since we're not a Looper-based thread we'll never see it. For
// this to do anything useful, OutputSurface must be created on a thread without
// a Looper, so that SurfaceTexture uses the main application Looper instead.
//
// Java language note: passing "this" out of a constructor is generally unwise,
// but we should be able to get away with it here.
mSurfaceTexture.setOnFrameAvailableListener(this, mHandler);
mSurface = new Surface(mSurfaceTexture);
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer.
*/
private void eglSetup(int width, int height) {
mEGL = (EGL10)EGLContext.getEGL();
mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (!mEGL.eglInitialize(mEGLDisplay, null)) {
throw new RuntimeException("unable to initialize EGL10");
}
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
// to be able to tell if the frame is reasonable.
int[] attribList = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) {
throw new RuntimeException("unable to find RGB888+pbuffer EGL config");
}
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL10.EGL_NONE
};
mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT,
attrib_list);
checkEglError("eglCreateContext");
if (mEGLContext == null) {
throw new RuntimeException("null context");
}
// Create a pbuffer surface. By using this for output, we can use glReadPixels
// to test values in the output.
int[] surfaceAttribs = {
EGL10.EGL_WIDTH, width,
EGL10.EGL_HEIGHT, height,
EGL10.EGL_NONE
};
mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs);
checkEglError("eglCreatePbufferSurface");
if (mEGLSurface == null) {
throw new RuntimeException("surface was null");
}
}
/**
* Discard all resources held by this class, notably the EGL context.
*/
public void release() {
if (mEGL != null) {
if (mEGL.eglGetCurrentContext().equals(mEGLContext)) {
// Clear the current context and surface to ensure they are discarded immediately.
mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE,
EGL10.EGL_NO_CONTEXT);
}
mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface);
mEGL.eglDestroyContext(mEGLDisplay, mEGLContext);
//mEGL.eglTerminate(mEGLDisplay);
}
mSurface.release();
// this causes a bunch of warnings that appear harmless but might confuse someone:
// W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
//mSurfaceTexture.release();
// null everything out so future attempts to use this object will cause an NPE
mEGLDisplay = null;
mEGLContext = null;
mEGLSurface = null;
mEGL = null;
mTextureRender = null;
mSurface = null;
mSurfaceTexture = null;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
if (mEGL == null) {
throw new RuntimeException("not configured for makeCurrent");
}
checkEglError("before makeCurrent");
if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Returns the Surface that we draw onto.
*/
public Surface getSurface() {
return mSurface;
}
/**
* Replaces the fragment shader.
*/
public void changeFragmentShader(String fragmentShader) {
mTextureRender.changeFragmentShader(fragmentShader);
}
/**
* Latches the next buffer into the texture. Must be called from the thread that created
* the OutputSurface object, after the onFrameAvailable callback has signaled that new
* data is available.
*/
public void awaitNewImage() {
final int TIMEOUT_MS = 2500;
synchronized (mFrameSyncObject) {
while (!mFrameAvailable) {
try {
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid
// stalling the test if it doesn't arrive.
mFrameSyncObject.wait(TIMEOUT_MS);
if (!mFrameAvailable) {
// TODO: if "spurious wakeup", continue while loop
throw new RuntimeException("Surface frame wait timed out");
}
} catch (InterruptedException ie) {
// shouldn't happen
throw new RuntimeException(ie);
}
}
mFrameAvailable = false;
}
// Latch the data.
mTextureRender.checkGlError("before updateTexImage");
mSurfaceTexture.updateTexImage();
}
/**
* Draws the data from SurfaceTexture onto the current EGL surface.
*/
public void drawImage() {
mTextureRender.drawFrame(mSurfaceTexture);
}
@Override
public void onFrameAvailable(SurfaceTexture st) {
if (VERBOSE) {
Log.d(TAG, "new frame available");
}
synchronized (mFrameSyncObject) {
if (mFrameAvailable) {
throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
}
mFrameAvailable = true;
mFrameSyncObject.notifyAll();
}
}
/**
* Checks for EGL errors.
*/
private void checkEglError(String msg) {
boolean failed = false;
int error;
while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
failed = true;
}
if (failed) {
throw new RuntimeException("EGL error encountered (see log)");
}
}
}
package jp.agentec.abook.abv.ui.home.helper;
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.Log;
/**
* Code for rendering a texture onto a surface using OpenGL ES 2.0.
*/
class TextureRender {
private static final String TAG = "TextureRender";
private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};
private FloatBuffer mTriangleVertices;
private static final String VERTEX_SHADER =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" + // highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private float[] mMVPMatrix = new float[16];
private float[] mSTMatrix = new float[16];
private int mProgram;
private int mTextureID = -12345;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
public TextureRender() {
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
}
public int getTextureId() {
return mTextureID;
}
public void drawFrame(SurfaceTexture st) {
checkGlError("onDrawFrame start");
st.getTransformMatrix(mSTMatrix);
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
GLES20.glFinish();
}
/**
* Initializes GL state. Call this after the EGL surface has been created and made current.
*/
public void surfaceCreated() {
mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muSTMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameter");
}
/**
* Replaces the fragment shader.
*/
public void changeFragmentShader(String fragmentShader) {
GLES20.glDeleteProgram(mProgram);
mProgram = createProgram(VERTEX_SHADER, fragmentShader);
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
checkGlError("glCreateShader type=" + shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
checkGlError("glCreateProgram");
if (program == 0) {
Log.e(TAG, "Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
return program;
}
public void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
}
package jp.agentec.abook.abv.ui.home.helper;
import android.annotation.SuppressLint;
import android.graphics.Bitmap;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaMetadataRetriever;
import android.media.MediaMuxer;
import android.view.Surface;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicReference;
public class VideoEncoder {
private static final int TIMEOUT_USEC = 10000;
private static final String OUTPUT_VIDEO_MIME_TYPE = "video/avc";
private static final int OUTPUT_VIDEO_BIT_RATE = 2500 * 1024;
private static final int OUTPUT_VIDEO_FRAME_RATE = 30;
private static final int OUTPUT_VIDEO_IFRAME_INTERVAL = 10;
private static final int OUTPUT_VIDEO_COLOR_FORMAT =
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
private static final String OUTPUT_AUDIO_MIME_TYPE = "audio/mp4a-latm";
private static final int OUTPUT_AUDIO_CHANNEL_COUNT = 2;
private static final int OUTPUT_AUDIO_BIT_RATE = 128 * 1024;
private static final int OUTPUT_AUDIO_AAC_PROFILE =
MediaCodecInfo.CodecProfileLevel.AACObjectHE;
private static final int OUTPUT_AUDIO_SAMPLE_RATE_HZ = 44100;
private int mWidth = 854;
private int mHeight = 480;
private String mOutputFile;
private String mInputFile;
@SuppressLint("LongLogTag")
public String changeResolution(File f, Runnable callBack) throws Throwable {
mInputFile = f.getAbsolutePath();
String filePath = mInputFile.substring(0, mInputFile.lastIndexOf(File.separator));
String[] splitByDot = mInputFile.split("\\.");
String ext = "";
if (splitByDot.length > 1) {
ext = splitByDot[splitByDot.length-1];
}
String fileName = mInputFile.substring(mInputFile.lastIndexOf(File.separator)+1);
if (ext.length() > 0) {
fileName = fileName.replace("."+ext, System.currentTimeMillis() + "_out.mp4");
} else {
fileName = fileName.concat(System.currentTimeMillis() + "_out.mp4");
}
final File outFile = new File(filePath, fileName);
if (!outFile.exists()) {
outFile.createNewFile();
}
mOutputFile = outFile.getAbsolutePath();
ChangerWrapper.changeResolutionInSeparatedThread(this, callBack);
return mOutputFile;
}
private static class ChangerWrapper implements Runnable {
private Throwable mThrowable;
private VideoEncoder mChanger;
private Runnable mCallBack;
private ChangerWrapper(VideoEncoder changer, Runnable callBack) {
mChanger = changer;
mCallBack = callBack;
}
@Override
public void run() {
try {
mChanger.prepareAndChangeResolution(mCallBack);
} catch (Throwable th) {
mThrowable = th;
}
}
public static void changeResolutionInSeparatedThread(VideoEncoder encoder, Runnable callBack) throws Throwable {
ChangerWrapper wrapper = new ChangerWrapper(encoder, callBack);
Thread th = new Thread(wrapper, ChangerWrapper.class.getSimpleName());
th.start();
// th.join();
if (wrapper.mThrowable != null) {
throw wrapper.mThrowable;
}
}
}
private void prepareAndChangeResolution(Runnable callBack) throws Exception {
Exception exception = null;
MediaCodecInfo videoCodecInfo = selectCodec(OUTPUT_VIDEO_MIME_TYPE);
if (videoCodecInfo == null) {
return;
}
MediaCodecInfo audioCodecInfo = selectCodec(OUTPUT_AUDIO_MIME_TYPE);
if (audioCodecInfo == null) {
return;
}
MediaExtractor videoExtractor = null;
MediaExtractor audioExtractor = null;
OutputSurface outputSurface = null;
MediaCodec videoDecoder = null;
MediaCodec audioDecoder = null;
MediaCodec videoEncoder = null;
MediaCodec audioEncoder = null;
MediaMuxer muxer = null;
InputSurface inputSurface = null;
try {
videoExtractor = createExtractor();
int videoInputTrack = getAndSelectVideoTrackIndex(videoExtractor);
MediaFormat inputFormat = videoExtractor.getTrackFormat(videoInputTrack);
MediaMetadataRetriever m = new MediaMetadataRetriever();
m.setDataSource(mInputFile);
int inputWidth, inputHeight;
try {
inputWidth = Integer.parseInt(m.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH));
inputHeight = Integer.parseInt(m.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT));
} catch (Exception e) {
Bitmap thumbnail = m.getFrameAtTime();
inputWidth = thumbnail.getWidth();
inputHeight = thumbnail.getHeight();
thumbnail.recycle();
}
if (inputWidth > inputHeight){
if (mWidth < mHeight) {
int w = mWidth;
mWidth = mHeight;
mHeight = w;
}
} else {
if (mWidth > mHeight) {
int w = mWidth;
mWidth = mHeight;
mHeight = w;
}
}
MediaFormat outputVideoFormat =
MediaFormat.createVideoFormat(OUTPUT_VIDEO_MIME_TYPE, mWidth, mHeight);
outputVideoFormat.setInteger(
MediaFormat.KEY_COLOR_FORMAT, OUTPUT_VIDEO_COLOR_FORMAT);
outputVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_VIDEO_BIT_RATE);
outputVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, OUTPUT_VIDEO_FRAME_RATE);
outputVideoFormat.setInteger(
MediaFormat.KEY_I_FRAME_INTERVAL, OUTPUT_VIDEO_IFRAME_INTERVAL);
AtomicReference<Surface> inputSurfaceReference = new AtomicReference<Surface>();
videoEncoder = createVideoEncoder(
videoCodecInfo, outputVideoFormat, inputSurfaceReference);
inputSurface = new InputSurface(inputSurfaceReference.get());
inputSurface.makeCurrent();
outputSurface = new OutputSurface();
videoDecoder = createVideoDecoder(inputFormat, outputSurface.getSurface());
audioExtractor = createExtractor();
int audioInputTrack = getAndSelectAudioTrackIndex(audioExtractor);
MediaFormat inputAudioFormat = audioExtractor.getTrackFormat(audioInputTrack);
MediaFormat outputAudioFormat =
MediaFormat.createAudioFormat(inputAudioFormat.getString(MediaFormat.KEY_MIME),
inputAudioFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE),
inputAudioFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
outputAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_AUDIO_BIT_RATE);
outputAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, OUTPUT_AUDIO_AAC_PROFILE);
audioEncoder = createAudioEncoder(audioCodecInfo, outputAudioFormat);
audioDecoder = createAudioDecoder(inputAudioFormat);
muxer = new MediaMuxer(mOutputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
changeResolution(videoExtractor, audioExtractor,
videoDecoder, videoEncoder,
audioDecoder, audioEncoder,
muxer, inputSurface, outputSurface);
} finally {
try {
if (videoExtractor != null) {
videoExtractor.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (audioExtractor != null) {
audioExtractor.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (videoDecoder != null) {
videoDecoder.stop();
videoDecoder.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (outputSurface != null) {
outputSurface.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (videoEncoder != null) {
videoEncoder.stop();
videoEncoder.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (audioDecoder != null) {
audioDecoder.stop();
audioDecoder.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (muxer != null) {
muxer.stop();
muxer.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (inputSurface != null) {
inputSurface.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
callBack.run();
}
if (exception != null) {
throw exception;
}
}
private MediaExtractor createExtractor() throws IOException {
MediaExtractor extractor;
extractor = new MediaExtractor();
extractor.setDataSource(mInputFile);
return extractor;
}
private MediaCodec createVideoDecoder(MediaFormat inputFormat, Surface surface) throws IOException {
MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat));
decoder.configure(inputFormat, surface, null, 0);
decoder.start();
return decoder;
}
private MediaCodec createVideoEncoder(MediaCodecInfo codecInfo, MediaFormat format,
AtomicReference<Surface> surfaceReference) throws IOException {
MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
surfaceReference.set(encoder.createInputSurface());
encoder.start();
return encoder;
}
private MediaCodec createAudioDecoder(MediaFormat inputFormat) throws IOException {
MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat));
decoder.configure(inputFormat, null, null, 0);
decoder.start();
return decoder;
}
private MediaCodec createAudioEncoder(MediaCodecInfo codecInfo, MediaFormat format) throws IOException {
MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
encoder.start();
return encoder;
}
private int getAndSelectVideoTrackIndex(MediaExtractor extractor) {
for (int index = 0; index < extractor.getTrackCount(); ++index) {
if (isVideoFormat(extractor.getTrackFormat(index))) {
extractor.selectTrack(index);
return index;
}
}
return -1;
}
private int getAndSelectAudioTrackIndex(MediaExtractor extractor) {
for (int index = 0; index < extractor.getTrackCount(); ++index) {
if (isAudioFormat(extractor.getTrackFormat(index))) {
extractor.selectTrack(index);
return index;
}
}
return -1;
}
private void changeResolution(MediaExtractor videoExtractor, MediaExtractor audioExtractor,
MediaCodec videoDecoder, MediaCodec videoEncoder,
MediaCodec audioDecoder, MediaCodec audioEncoder,
MediaMuxer muxer,
InputSurface inputSurface, OutputSurface outputSurface) {
ByteBuffer[] videoDecoderInputBuffers;
ByteBuffer[] videoDecoderOutputBuffers;
ByteBuffer[] videoEncoderOutputBuffers;
MediaCodec.BufferInfo videoDecoderOutputBufferInfo;
MediaCodec.BufferInfo videoEncoderOutputBufferInfo;
videoDecoderInputBuffers = videoDecoder.getInputBuffers();
videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
videoDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
videoEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] audioDecoderInputBuffers;
ByteBuffer[] audioDecoderOutputBuffers;
ByteBuffer[] audioEncoderInputBuffers;
ByteBuffer[] audioEncoderOutputBuffers;
MediaCodec.BufferInfo audioDecoderOutputBufferInfo;
MediaCodec.BufferInfo audioEncoderOutputBufferInfo;
audioDecoderInputBuffers = audioDecoder.getInputBuffers();
audioDecoderOutputBuffers = audioDecoder.getOutputBuffers();
audioEncoderInputBuffers = audioEncoder.getInputBuffers();
audioEncoderOutputBuffers = audioEncoder.getOutputBuffers();
audioDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
audioEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
MediaFormat decoderOutputVideoFormat = null;
MediaFormat decoderOutputAudioFormat = null;
MediaFormat encoderOutputVideoFormat = null;
MediaFormat encoderOutputAudioFormat = null;
int outputVideoTrack = -1;
int outputAudioTrack = -1;
boolean videoExtractorDone = false;
boolean videoDecoderDone = false;
boolean videoEncoderDone = false;
boolean audioExtractorDone = false;
boolean audioDecoderDone = false;
boolean audioEncoderDone = false;
int pendingAudioDecoderOutputBufferIndex = -1;
boolean muxing = false;
while ((!videoEncoderDone) || (!audioEncoderDone)) {
while (!videoExtractorDone
&& (encoderOutputVideoFormat == null || muxing)) {
int decoderInputBufferIndex = videoDecoder.dequeueInputBuffer(TIMEOUT_USEC);
if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
ByteBuffer decoderInputBuffer = videoDecoderInputBuffers[decoderInputBufferIndex];
int size = videoExtractor.readSampleData(decoderInputBuffer, 0);
long presentationTime = videoExtractor.getSampleTime();
if (size >= 0) {
videoDecoder.queueInputBuffer(
decoderInputBufferIndex,
0,
size,
presentationTime,
videoExtractor.getSampleFlags());
}
videoExtractorDone = !videoExtractor.advance();
if (videoExtractorDone) {
videoDecoder.queueInputBuffer(decoderInputBufferIndex,
0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
break;
}
while (!audioExtractorDone
&& (encoderOutputAudioFormat == null || muxing)) {
int decoderInputBufferIndex = audioDecoder.dequeueInputBuffer(TIMEOUT_USEC);
if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
ByteBuffer decoderInputBuffer = audioDecoderInputBuffers[decoderInputBufferIndex];
int size = audioExtractor.readSampleData(decoderInputBuffer, 0);
long presentationTime = audioExtractor.getSampleTime();
if (size >= 0) {
audioDecoder.queueInputBuffer(decoderInputBufferIndex, 0, size,
presentationTime, audioExtractor.getSampleFlags());
}
audioExtractorDone = !audioExtractor.advance();
if (audioExtractorDone) {
audioDecoder.queueInputBuffer(decoderInputBufferIndex, 0, 0,
0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
break;
}
while (!videoDecoderDone
&& (encoderOutputVideoFormat == null || muxing)) {
int decoderOutputBufferIndex =
videoDecoder.dequeueOutputBuffer(
videoDecoderOutputBufferInfo, TIMEOUT_USEC);
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
decoderOutputVideoFormat = videoDecoder.getOutputFormat();
break;
}
ByteBuffer decoderOutputBuffer =
videoDecoderOutputBuffers[decoderOutputBufferIndex];
if ((videoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
!= 0) {
videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
break;
}
boolean render = videoDecoderOutputBufferInfo.size != 0;
videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, render);
if (render) {
outputSurface.awaitNewImage();
outputSurface.drawImage();
inputSurface.setPresentationTime(
videoDecoderOutputBufferInfo.presentationTimeUs * 1000);
inputSurface.swapBuffers();
}
if ((videoDecoderOutputBufferInfo.flags
& MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
videoDecoderDone = true;
videoEncoder.signalEndOfInputStream();
}
break;
}
while (!audioDecoderDone && pendingAudioDecoderOutputBufferIndex == -1
&& (encoderOutputAudioFormat == null || muxing)) {
int decoderOutputBufferIndex =
audioDecoder.dequeueOutputBuffer(
audioDecoderOutputBufferInfo, TIMEOUT_USEC);
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
audioDecoderOutputBuffers = audioDecoder.getOutputBuffers();
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
decoderOutputAudioFormat = audioDecoder.getOutputFormat();
break;
}
ByteBuffer decoderOutputBuffer =
audioDecoderOutputBuffers[decoderOutputBufferIndex];
if ((audioDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
!= 0) {
audioDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
break;
}
pendingAudioDecoderOutputBufferIndex = decoderOutputBufferIndex;
break;
}
while (pendingAudioDecoderOutputBufferIndex != -1) {
int encoderInputBufferIndex = audioEncoder.dequeueInputBuffer(TIMEOUT_USEC);
ByteBuffer encoderInputBuffer = audioEncoderInputBuffers[encoderInputBufferIndex];
int size = audioDecoderOutputBufferInfo.size;
long presentationTime = audioDecoderOutputBufferInfo.presentationTimeUs;
if (size >= 0) {
ByteBuffer decoderOutputBuffer =
audioDecoderOutputBuffers[pendingAudioDecoderOutputBufferIndex]
.duplicate();
decoderOutputBuffer.position(audioDecoderOutputBufferInfo.offset);
decoderOutputBuffer.limit(audioDecoderOutputBufferInfo.offset + size);
encoderInputBuffer.position(0);
encoderInputBuffer.put(decoderOutputBuffer);
audioEncoder.queueInputBuffer(
encoderInputBufferIndex,
0,
size,
presentationTime,
audioDecoderOutputBufferInfo.flags);
}
audioDecoder.releaseOutputBuffer(pendingAudioDecoderOutputBufferIndex, false);
pendingAudioDecoderOutputBufferIndex = -1;
if ((audioDecoderOutputBufferInfo.flags
& MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
audioDecoderDone = true;
}
break;
}
while (!videoEncoderDone
&& (encoderOutputVideoFormat == null || muxing)) {
int encoderOutputBufferIndex = videoEncoder.dequeueOutputBuffer(
videoEncoderOutputBufferInfo, TIMEOUT_USEC);
if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
encoderOutputVideoFormat = videoEncoder.getOutputFormat();
break;
}
ByteBuffer encoderOutputBuffer =
videoEncoderOutputBuffers[encoderOutputBufferIndex];
if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
!= 0) {
videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
if (videoEncoderOutputBufferInfo.size != 0) {
muxer.writeSampleData(
outputVideoTrack, encoderOutputBuffer, videoEncoderOutputBufferInfo);
}
if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
!= 0) {
videoEncoderDone = true;
}
videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
while (!audioEncoderDone
&& (encoderOutputAudioFormat == null || muxing)) {
int encoderOutputBufferIndex = audioEncoder.dequeueOutputBuffer(
audioEncoderOutputBufferInfo, TIMEOUT_USEC);
if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
audioEncoderOutputBuffers = audioEncoder.getOutputBuffers();
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
encoderOutputAudioFormat = audioEncoder.getOutputFormat();
break;
}
ByteBuffer encoderOutputBuffer =
audioEncoderOutputBuffers[encoderOutputBufferIndex];
if ((audioEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
!= 0) {
audioEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
if (audioEncoderOutputBufferInfo.size != 0) {
muxer.writeSampleData(
outputAudioTrack, encoderOutputBuffer, audioEncoderOutputBufferInfo);
}
if ((audioEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
!= 0) {
audioEncoderDone = true;
}
audioEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
if (!muxing && (encoderOutputAudioFormat != null)
&& (encoderOutputVideoFormat != null)) {
outputVideoTrack = muxer.addTrack(encoderOutputVideoFormat);
outputAudioTrack = muxer.addTrack(encoderOutputAudioFormat);
muxer.start();
muxing = true;
}
}
}
private static boolean isVideoFormat(MediaFormat format) {
return getMimeTypeFor(format).startsWith("video/");
}
private static boolean isAudioFormat(MediaFormat format) {
return getMimeTypeFor(format).startsWith("audio/");
}
private static String getMimeTypeFor(MediaFormat format) {
return format.getString(MediaFormat.KEY_MIME);
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment