Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
abook_check
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
abook_android
abook_check
Commits
d8023f43
Commit
d8023f43
authored
May 26, 2021
by
Lee Munkyeong
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
動画エンコード実装
parent
0a8ad4c6
Expand all
Show whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
460 additions
and
2 deletions
+460
-2
ABVJE_UI_Android/proguard.cfg
+1
-1
ABVJE_UI_Android/src/jp/agentec/abook/abv/ui/home/activity/ChatWebViewActivity.java
+82
-1
ABVJE_UI_Android/src/jp/agentec/abook/abv/ui/home/helper/InputSerface.java
+163
-0
ABVJE_UI_Android/src/jp/agentec/abook/abv/ui/home/helper/OutputSerface.java
+0
-0
ABVJE_UI_Android/src/jp/agentec/abook/abv/ui/home/helper/TextureRender.java
+214
-0
ABVJE_UI_Android/src/jp/agentec/abook/abv/ui/home/helper/VideoEncoder.java
+0
-0
No files found.
ABVJE_UI_Android/proguard.cfg
View file @
d8023f43
...
...
@@ -3,7 +3,7 @@
-dontskipnonpubliclibraryclasses
-dontpreverify
-verbose
-optimizations !code/simplification/arithmetic,!field/*,!class/merging/*
-optimizations !code/simplification/arithmetic,!field/*,!class/merging/*
,!code/removal/advanced,!method/inlining/short,!method/inlining/unique,!method/removal/*,!method/marking/*
-keep public class * extends android.app.Activity
-keep public class * extends android.app.Application
...
...
ABVJE_UI_Android/src/jp/agentec/abook/abv/ui/home/activity/ChatWebViewActivity.java
View file @
d8023f43
...
...
@@ -14,6 +14,7 @@ import android.content.Intent;
import
android.content.IntentFilter
;
import
android.content.SharedPreferences
;
import
android.content.res.Configuration
;
import
android.database.Cursor
;
import
android.graphics.Bitmap
;
import
android.graphics.BitmapFactory
;
import
android.net.ConnectivityManager
;
...
...
@@ -26,6 +27,8 @@ import android.os.AsyncTask;
import
android.os.Build
;
import
android.os.Bundle
;
import
android.os.Environment
;
import
android.provider.BaseColumns
;
import
android.provider.MediaStore
;
import
android.support.annotation.RequiresApi
;
import
android.util.Log
;
import
android.view.KeyEvent
;
...
...
@@ -101,6 +104,7 @@ import jp.agentec.abook.abv.ui.common.dialog.ABookAlertDialog;
import
jp.agentec.abook.abv.ui.common.util.ABVToastUtil
;
import
jp.agentec.abook.abv.ui.common.util.AlertDialogUtil
;
import
jp.agentec.abook.abv.ui.home.helper.ActivityHandlingHelper
;
import
jp.agentec.abook.abv.ui.home.helper.VideoEncoder
;
import
jp.agentec.abook.abv.ui.home.view.FullscreenableChromeClient
;
import
jp.agentec.abook.abv.ui.viewer.activity.CommunicationWebViewActivity
;
import
jp.agentec.abook.abv.ui.viewer.activity.ParentWebViewActivity
;
...
...
@@ -144,6 +148,8 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
private
String
mSkey
;
private
Integer
joinMeetingId
;
private
boolean
isPIP
;
private
String
encodedFilePath
;
private
Uri
encodedVideoPath
;
// 0:協業生成, 1:協業参加
private
Integer
collaborationJoinFlg
=
0
;
...
...
@@ -785,11 +791,86 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
if
(
mUploadMessage
==
null
)
{
return
;
}
mUploadMessage
.
onReceiveValue
(
result
);
/*mChatWebView.post(new Runnable() {
@Override
public void run() {
mChatWebView.loadUrl("javascript:CHAT_UI.showLoadingIndicator();");
}
});*/
if
(
dataUri
!=
null
)
{
Cursor
cursor
=
getContentResolver
().
query
(
dataUri
,
null
,
null
,
null
,
null
);
cursor
.
moveToNext
();
final
String
filePath
=
cursor
.
getString
(
cursor
.
getColumnIndex
(
"_data"
));
cursor
.
close
();
Runnable
r
=
new
Runnable
()
{
@Override
public
void
run
()
{
try
{
Context
c
=
getApplicationContext
();
File
file
=
new
File
(
filePath
);
String
encodedPath
=
new
VideoEncoder
().
changeResolution
(
file
);
encodedFilePath
=
encodedPath
;
final
Uri
_uri
=
getImageContentUri
(
c
,
new
File
(
encodedPath
));
sendBroadcast
(
new
Intent
(
Intent
.
ACTION_MEDIA_SCANNER_SCAN_FILE
,
_uri
));
if
(
_uri
!=
null
)
{
// result = new Uri[]{_uri};
// TODO: send result;
encodedVideoPath
=
_uri
;
//mUploadMessage.onReceiveValue(new Uri[]{_uri});
mChatWebView
.
post
(
new
Runnable
()
{
@Override
public
void
run
()
{
mChatWebView
.
loadUrl
(
"javascript:CHAT_UI.videoEncodeEnd('"
+
_uri
+
"');"
);
}
});
}
}
catch
(
Throwable
throwable
)
{
throwable
.
printStackTrace
();
}
}
};
r
.
run
();
mUploadMessage
.
onReceiveValue
(
null
);
}
}
mUploadMessage
=
null
;
}
public
static
Uri
getImageContentUri
(
Context
context
,
File
file
)
{
if
(!
file
.
exists
())
{
return
null
;
}
String
filePath
=
file
.
getAbsolutePath
();
Cursor
cursor
=
context
.
getContentResolver
().
query
(
MediaStore
.
Images
.
Media
.
EXTERNAL_CONTENT_URI
,
new
String
[]
{
BaseColumns
.
_ID
},
MediaStore
.
MediaColumns
.
DATA
+
"=? "
,
new
String
[]
{
filePath
},
null
);
if
(
cursor
!=
null
&&
cursor
.
moveToFirst
())
{
int
id
=
cursor
.
getInt
(
cursor
.
getColumnIndex
(
BaseColumns
.
_ID
));
Uri
baseUri
=
Uri
.
parse
(
"content://media/external/video/media"
);
return
Uri
.
withAppendedPath
(
baseUri
,
""
+
id
);
}
else
{
if
(
file
.
exists
())
{
ContentValues
values
=
new
ContentValues
();
values
.
put
(
MediaStore
.
MediaColumns
.
DATA
,
filePath
);
return
context
.
getContentResolver
().
insert
(
MediaStore
.
Video
.
Media
.
EXTERNAL_CONTENT_URI
,
values
);
}
else
{
return
null
;
}
}
}
/**
* 未読のプッシュメッセージが存在するかチェック
* @param pushMessageDtoList チェックするリスト
...
...
ABVJE_UI_Android/src/jp/agentec/abook/abv/ui/home/helper/InputSerface.java
0 → 100644
View file @
d8023f43
package
jp
.
agentec
.
abook
.
abv
.
ui
.
home
.
helper
;
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import
android.opengl.EGL14
;
import
android.opengl.EGLExt
;
import
android.opengl.EGLConfig
;
import
android.opengl.EGLContext
;
import
android.opengl.EGLDisplay
;
import
android.opengl.EGLSurface
;
import
android.util.Log
;
import
android.view.Surface
;
/**
* Holds state associated with a Surface used for MediaCodec encoder input.
* <p>
* The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
* to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
* to the video encoder.
*/
class
InputSurface
{
private
static
final
String
TAG
=
"InputSurface"
;
private
static
final
boolean
VERBOSE
=
true
;
private
static
final
int
EGL_RECORDABLE_ANDROID
=
0x3142
;
private
static
final
int
EGL_OPENGL_ES2_BIT
=
4
;
private
EGLDisplay
mEGLDisplay
;
private
EGLContext
mEGLContext
;
private
EGLSurface
mEGLSurface
;
private
Surface
mSurface
;
/**
* Creates an InputSurface from a Surface.
*/
public
InputSurface
(
Surface
surface
)
{
if
(
surface
==
null
)
{
throw
new
NullPointerException
();
}
mSurface
=
surface
;
eglSetup
();
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
*/
private
void
eglSetup
()
{
mEGLDisplay
=
EGL14
.
eglGetDisplay
(
EGL14
.
EGL_DEFAULT_DISPLAY
);
if
(
mEGLDisplay
==
EGL14
.
EGL_NO_DISPLAY
)
{
throw
new
RuntimeException
(
"unable to get EGL14 display"
);
}
int
[]
version
=
new
int
[
2
];
if
(!
EGL14
.
eglInitialize
(
mEGLDisplay
,
version
,
0
,
version
,
1
))
{
mEGLDisplay
=
null
;
throw
new
RuntimeException
(
"unable to initialize EGL14"
);
}
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
// to be able to tell if the frame is reasonable.
int
[]
attribList
=
{
EGL14
.
EGL_RED_SIZE
,
8
,
EGL14
.
EGL_GREEN_SIZE
,
8
,
EGL14
.
EGL_BLUE_SIZE
,
8
,
EGL14
.
EGL_RENDERABLE_TYPE
,
EGL_OPENGL_ES2_BIT
,
EGL_RECORDABLE_ANDROID
,
1
,
EGL14
.
EGL_NONE
};
EGLConfig
[]
configs
=
new
EGLConfig
[
1
];
int
[]
numConfigs
=
new
int
[
1
];
if
(!
EGL14
.
eglChooseConfig
(
mEGLDisplay
,
attribList
,
0
,
configs
,
0
,
configs
.
length
,
numConfigs
,
0
))
{
throw
new
RuntimeException
(
"unable to find RGB888+recordable ES2 EGL config"
);
}
// Configure context for OpenGL ES 2.0.
int
[]
attrib_list
=
{
EGL14
.
EGL_CONTEXT_CLIENT_VERSION
,
2
,
EGL14
.
EGL_NONE
};
mEGLContext
=
EGL14
.
eglCreateContext
(
mEGLDisplay
,
configs
[
0
],
EGL14
.
EGL_NO_CONTEXT
,
attrib_list
,
0
);
checkEglError
(
"eglCreateContext"
);
if
(
mEGLContext
==
null
)
{
throw
new
RuntimeException
(
"null context"
);
}
// Create a window surface, and attach it to the Surface we received.
int
[]
surfaceAttribs
=
{
EGL14
.
EGL_NONE
};
mEGLSurface
=
EGL14
.
eglCreateWindowSurface
(
mEGLDisplay
,
configs
[
0
],
mSurface
,
surfaceAttribs
,
0
);
checkEglError
(
"eglCreateWindowSurface"
);
if
(
mEGLSurface
==
null
)
{
throw
new
RuntimeException
(
"surface was null"
);
}
}
/**
* Discard all resources held by this class, notably the EGL context. Also releases the
* Surface that was passed to our constructor.
*/
public
void
release
()
{
if
(
EGL14
.
eglGetCurrentContext
().
equals
(
mEGLContext
))
{
// Clear the current context and surface to ensure they are discarded immediately.
EGL14
.
eglMakeCurrent
(
mEGLDisplay
,
EGL14
.
EGL_NO_SURFACE
,
EGL14
.
EGL_NO_SURFACE
,
EGL14
.
EGL_NO_CONTEXT
);
}
EGL14
.
eglDestroySurface
(
mEGLDisplay
,
mEGLSurface
);
EGL14
.
eglDestroyContext
(
mEGLDisplay
,
mEGLContext
);
//EGL14.eglTerminate(mEGLDisplay);
mSurface
.
release
();
// null everything out so future attempts to use this object will cause an NPE
mEGLDisplay
=
null
;
mEGLContext
=
null
;
mEGLSurface
=
null
;
mSurface
=
null
;
}
/**
* Makes our EGL context and surface current.
*/
public
void
makeCurrent
()
{
if
(!
EGL14
.
eglMakeCurrent
(
mEGLDisplay
,
mEGLSurface
,
mEGLSurface
,
mEGLContext
))
{
throw
new
RuntimeException
(
"eglMakeCurrent failed"
);
}
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*/
public
boolean
swapBuffers
()
{
return
EGL14
.
eglSwapBuffers
(
mEGLDisplay
,
mEGLSurface
);
}
/**
* Returns the Surface that the MediaCodec receives buffers from.
*/
public
Surface
getSurface
()
{
return
mSurface
;
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public
void
setPresentationTime
(
long
nsecs
)
{
EGLExt
.
eglPresentationTimeANDROID
(
mEGLDisplay
,
mEGLSurface
,
nsecs
);
}
/**
* Checks for EGL errors.
*/
private
void
checkEglError
(
String
msg
)
{
boolean
failed
=
false
;
int
error
;
while
((
error
=
EGL14
.
eglGetError
())
!=
EGL14
.
EGL_SUCCESS
)
{
Log
.
e
(
TAG
,
msg
+
": EGL error: 0x"
+
Integer
.
toHexString
(
error
));
failed
=
true
;
}
if
(
failed
)
{
throw
new
RuntimeException
(
"EGL error encountered (see log)"
);
}
}
}
ABVJE_UI_Android/src/jp/agentec/abook/abv/ui/home/helper/OutputSerface.java
0 → 100644
View file @
d8023f43
This diff is collapsed.
Click to expand it.
ABVJE_UI_Android/src/jp/agentec/abook/abv/ui/home/helper/TextureRender.java
0 → 100644
View file @
d8023f43
package
jp
.
agentec
.
abook
.
abv
.
ui
.
home
.
helper
;
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import
java.nio.ByteBuffer
;
import
java.nio.ByteOrder
;
import
java.nio.FloatBuffer
;
import
javax.microedition.khronos.egl.EGLConfig
;
import
javax.microedition.khronos.opengles.GL10
;
import
android.graphics.SurfaceTexture
;
import
android.opengl.GLES11Ext
;
import
android.opengl.GLES20
;
import
android.opengl.GLSurfaceView
;
import
android.opengl.Matrix
;
import
android.util.Log
;
/**
* Code for rendering a texture onto a surface using OpenGL ES 2.0.
*/
class
TextureRender
{
private
static
final
String
TAG
=
"TextureRender"
;
private
static
final
int
FLOAT_SIZE_BYTES
=
4
;
private
static
final
int
TRIANGLE_VERTICES_DATA_STRIDE_BYTES
=
5
*
FLOAT_SIZE_BYTES
;
private
static
final
int
TRIANGLE_VERTICES_DATA_POS_OFFSET
=
0
;
private
static
final
int
TRIANGLE_VERTICES_DATA_UV_OFFSET
=
3
;
private
final
float
[]
mTriangleVerticesData
=
{
// X, Y, Z, U, V
-
1.0f
,
-
1.0f
,
0
,
0
.
f
,
0
.
f
,
1.0f
,
-
1.0f
,
0
,
1
.
f
,
0
.
f
,
-
1.0f
,
1.0f
,
0
,
0
.
f
,
1
.
f
,
1.0f
,
1.0f
,
0
,
1
.
f
,
1
.
f
,
};
private
FloatBuffer
mTriangleVertices
;
private
static
final
String
VERTEX_SHADER
=
"uniform mat4 uMVPMatrix;\n"
+
"uniform mat4 uSTMatrix;\n"
+
"attribute vec4 aPosition;\n"
+
"attribute vec4 aTextureCoord;\n"
+
"varying vec2 vTextureCoord;\n"
+
"void main() {\n"
+
" gl_Position = uMVPMatrix * aPosition;\n"
+
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n"
+
"}\n"
;
private
static
final
String
FRAGMENT_SHADER
=
"#extension GL_OES_EGL_image_external : require\n"
+
"precision mediump float;\n"
+
// highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n"
+
"uniform samplerExternalOES sTexture;\n"
+
"void main() {\n"
+
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
+
"}\n"
;
private
float
[]
mMVPMatrix
=
new
float
[
16
];
private
float
[]
mSTMatrix
=
new
float
[
16
];
private
int
mProgram
;
private
int
mTextureID
=
-
12345
;
private
int
muMVPMatrixHandle
;
private
int
muSTMatrixHandle
;
private
int
maPositionHandle
;
private
int
maTextureHandle
;
public
TextureRender
()
{
mTriangleVertices
=
ByteBuffer
.
allocateDirect
(
mTriangleVerticesData
.
length
*
FLOAT_SIZE_BYTES
)
.
order
(
ByteOrder
.
nativeOrder
()).
asFloatBuffer
();
mTriangleVertices
.
put
(
mTriangleVerticesData
).
position
(
0
);
Matrix
.
setIdentityM
(
mSTMatrix
,
0
);
}
public
int
getTextureId
()
{
return
mTextureID
;
}
public
void
drawFrame
(
SurfaceTexture
st
)
{
checkGlError
(
"onDrawFrame start"
);
st
.
getTransformMatrix
(
mSTMatrix
);
GLES20
.
glClearColor
(
0.0f
,
1.0f
,
0.0f
,
1.0f
);
GLES20
.
glClear
(
GLES20
.
GL_DEPTH_BUFFER_BIT
|
GLES20
.
GL_COLOR_BUFFER_BIT
);
GLES20
.
glUseProgram
(
mProgram
);
checkGlError
(
"glUseProgram"
);
GLES20
.
glActiveTexture
(
GLES20
.
GL_TEXTURE0
);
GLES20
.
glBindTexture
(
GLES11Ext
.
GL_TEXTURE_EXTERNAL_OES
,
mTextureID
);
mTriangleVertices
.
position
(
TRIANGLE_VERTICES_DATA_POS_OFFSET
);
GLES20
.
glVertexAttribPointer
(
maPositionHandle
,
3
,
GLES20
.
GL_FLOAT
,
false
,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES
,
mTriangleVertices
);
checkGlError
(
"glVertexAttribPointer maPosition"
);
GLES20
.
glEnableVertexAttribArray
(
maPositionHandle
);
checkGlError
(
"glEnableVertexAttribArray maPositionHandle"
);
mTriangleVertices
.
position
(
TRIANGLE_VERTICES_DATA_UV_OFFSET
);
GLES20
.
glVertexAttribPointer
(
maTextureHandle
,
2
,
GLES20
.
GL_FLOAT
,
false
,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES
,
mTriangleVertices
);
checkGlError
(
"glVertexAttribPointer maTextureHandle"
);
GLES20
.
glEnableVertexAttribArray
(
maTextureHandle
);
checkGlError
(
"glEnableVertexAttribArray maTextureHandle"
);
Matrix
.
setIdentityM
(
mMVPMatrix
,
0
);
GLES20
.
glUniformMatrix4fv
(
muMVPMatrixHandle
,
1
,
false
,
mMVPMatrix
,
0
);
GLES20
.
glUniformMatrix4fv
(
muSTMatrixHandle
,
1
,
false
,
mSTMatrix
,
0
);
GLES20
.
glDrawArrays
(
GLES20
.
GL_TRIANGLE_STRIP
,
0
,
4
);
checkGlError
(
"glDrawArrays"
);
GLES20
.
glFinish
();
}
/**
* Initializes GL state. Call this after the EGL surface has been created and made current.
*/
public
void
surfaceCreated
()
{
mProgram
=
createProgram
(
VERTEX_SHADER
,
FRAGMENT_SHADER
);
if
(
mProgram
==
0
)
{
throw
new
RuntimeException
(
"failed creating program"
);
}
maPositionHandle
=
GLES20
.
glGetAttribLocation
(
mProgram
,
"aPosition"
);
checkGlError
(
"glGetAttribLocation aPosition"
);
if
(
maPositionHandle
==
-
1
)
{
throw
new
RuntimeException
(
"Could not get attrib location for aPosition"
);
}
maTextureHandle
=
GLES20
.
glGetAttribLocation
(
mProgram
,
"aTextureCoord"
);
checkGlError
(
"glGetAttribLocation aTextureCoord"
);
if
(
maTextureHandle
==
-
1
)
{
throw
new
RuntimeException
(
"Could not get attrib location for aTextureCoord"
);
}
muMVPMatrixHandle
=
GLES20
.
glGetUniformLocation
(
mProgram
,
"uMVPMatrix"
);
checkGlError
(
"glGetUniformLocation uMVPMatrix"
);
if
(
muMVPMatrixHandle
==
-
1
)
{
throw
new
RuntimeException
(
"Could not get attrib location for uMVPMatrix"
);
}
muSTMatrixHandle
=
GLES20
.
glGetUniformLocation
(
mProgram
,
"uSTMatrix"
);
checkGlError
(
"glGetUniformLocation uSTMatrix"
);
if
(
muSTMatrixHandle
==
-
1
)
{
throw
new
RuntimeException
(
"Could not get attrib location for uSTMatrix"
);
}
int
[]
textures
=
new
int
[
1
];
GLES20
.
glGenTextures
(
1
,
textures
,
0
);
mTextureID
=
textures
[
0
];
GLES20
.
glBindTexture
(
GLES11Ext
.
GL_TEXTURE_EXTERNAL_OES
,
mTextureID
);
checkGlError
(
"glBindTexture mTextureID"
);
GLES20
.
glTexParameterf
(
GLES11Ext
.
GL_TEXTURE_EXTERNAL_OES
,
GLES20
.
GL_TEXTURE_MIN_FILTER
,
GLES20
.
GL_NEAREST
);
GLES20
.
glTexParameterf
(
GLES11Ext
.
GL_TEXTURE_EXTERNAL_OES
,
GLES20
.
GL_TEXTURE_MAG_FILTER
,
GLES20
.
GL_LINEAR
);
GLES20
.
glTexParameteri
(
GLES11Ext
.
GL_TEXTURE_EXTERNAL_OES
,
GLES20
.
GL_TEXTURE_WRAP_S
,
GLES20
.
GL_CLAMP_TO_EDGE
);
GLES20
.
glTexParameteri
(
GLES11Ext
.
GL_TEXTURE_EXTERNAL_OES
,
GLES20
.
GL_TEXTURE_WRAP_T
,
GLES20
.
GL_CLAMP_TO_EDGE
);
checkGlError
(
"glTexParameter"
);
}
/**
* Replaces the fragment shader.
*/
public
void
changeFragmentShader
(
String
fragmentShader
)
{
GLES20
.
glDeleteProgram
(
mProgram
);
mProgram
=
createProgram
(
VERTEX_SHADER
,
fragmentShader
);
if
(
mProgram
==
0
)
{
throw
new
RuntimeException
(
"failed creating program"
);
}
}
private
int
loadShader
(
int
shaderType
,
String
source
)
{
int
shader
=
GLES20
.
glCreateShader
(
shaderType
);
checkGlError
(
"glCreateShader type="
+
shaderType
);
GLES20
.
glShaderSource
(
shader
,
source
);
GLES20
.
glCompileShader
(
shader
);
int
[]
compiled
=
new
int
[
1
];
GLES20
.
glGetShaderiv
(
shader
,
GLES20
.
GL_COMPILE_STATUS
,
compiled
,
0
);
if
(
compiled
[
0
]
==
0
)
{
Log
.
e
(
TAG
,
"Could not compile shader "
+
shaderType
+
":"
);
Log
.
e
(
TAG
,
" "
+
GLES20
.
glGetShaderInfoLog
(
shader
));
GLES20
.
glDeleteShader
(
shader
);
shader
=
0
;
}
return
shader
;
}
private
int
createProgram
(
String
vertexSource
,
String
fragmentSource
)
{
int
vertexShader
=
loadShader
(
GLES20
.
GL_VERTEX_SHADER
,
vertexSource
);
if
(
vertexShader
==
0
)
{
return
0
;
}
int
pixelShader
=
loadShader
(
GLES20
.
GL_FRAGMENT_SHADER
,
fragmentSource
);
if
(
pixelShader
==
0
)
{
return
0
;
}
int
program
=
GLES20
.
glCreateProgram
();
checkGlError
(
"glCreateProgram"
);
if
(
program
==
0
)
{
Log
.
e
(
TAG
,
"Could not create program"
);
}
GLES20
.
glAttachShader
(
program
,
vertexShader
);
checkGlError
(
"glAttachShader"
);
GLES20
.
glAttachShader
(
program
,
pixelShader
);
checkGlError
(
"glAttachShader"
);
GLES20
.
glLinkProgram
(
program
);
int
[]
linkStatus
=
new
int
[
1
];
GLES20
.
glGetProgramiv
(
program
,
GLES20
.
GL_LINK_STATUS
,
linkStatus
,
0
);
if
(
linkStatus
[
0
]
!=
GLES20
.
GL_TRUE
)
{
Log
.
e
(
TAG
,
"Could not link program: "
);
Log
.
e
(
TAG
,
GLES20
.
glGetProgramInfoLog
(
program
));
GLES20
.
glDeleteProgram
(
program
);
program
=
0
;
}
return
program
;
}
public
void
checkGlError
(
String
op
)
{
int
error
;
while
((
error
=
GLES20
.
glGetError
())
!=
GLES20
.
GL_NO_ERROR
)
{
Log
.
e
(
TAG
,
op
+
": glError "
+
error
);
throw
new
RuntimeException
(
op
+
": glError "
+
error
);
}
}
}
ABVJE_UI_Android/src/jp/agentec/abook/abv/ui/home/helper/VideoEncoder.java
0 → 100644
View file @
d8023f43
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment