Commit b2e86c83 by Lee Munkyeong

Merge branch 'communication/develop' of…

Merge branch 'communication/develop' of https://gitlab.agentec.jp/abook_android/abook_check into communication/feature/finish_all_collaboration
parents d055a103 a50b86be
......@@ -124,7 +124,7 @@ public class OperationDao extends AbstractDao {
* @return
*/
public List<OperationDto> getAllOperation() {
return rawQueryGetDtoList("select tp.*, rpc.content_id from t_operation AS tp left outer join r_operation_content AS rpc on tp.operation_id = rpc.operation_id ORDER BY operation_id DESC", null, OperationDto.class);
return rawQueryGetDtoList("select tp.*, rpc.content_id from t_operation AS tp left outer join r_operation_content AS rpc on tp.operation_id = rpc.operation_id AND rpc.operation_content_flg = 1 ORDER BY operation_id DESC", null, OperationDto.class);
}
public List<OperationDto> getLatestOperations() {
......
Subproject commit e23b43b521be6cebe40acc0857528aa01fa9de2a
Subproject commit 76f2642396962fb9b6e8bb35a2aaa8a9949075d0
......@@ -1478,8 +1478,8 @@
<string name="msg_error_chat_room_name_too_long">ルーム名は文字列20字以内に入力してください。</string>
<string name="msg_error_chat_name_has_invalid_character">特殊文字 ;/?:@&amp;=+$,-_.!~*\'()#\\\"` はルーム名に含めることができません。</string>
<string name="msg_error_chat_room_sc_forbidden">認証に失敗しました。再度ログインが必要です。</string>
<string name="msg_error_already_exist_same_room">同じルームが既に存在します。</string>
<string name="msg_confirm_send_host_change">ホスト権限を渡しますか?</string>
<string name="msg_error_already_exist_same_room">同じルームが既に存在します。\n入室しますか</string>
<string name="msg_confirm_send_host_change">ホスト権限変更の依頼があります。変更しますか?</string>
<string name="msg_confirm_share_image">キャプチャーした写真を共有しますか?</string>
<string name="msg_invite_collaboration">協業に招待されました。</string>
<!-- 連続作業 -->
......
......@@ -237,7 +237,7 @@
<string name="L120">서버와의 통신에 실패하였습니다.</string>
<string name="L121">다시 시작할 수 없습니다.</string>
<string name="L122">현재 이 기능은 Android 버전 앱에서 \n 대응하고 있지 않습니다.</string>
<string name="L123">표시가 제한된 컨츠 입니다.\n로그인 패스워드를 입력해 주세요.</string>
<string name="L123">표시가 제한된 컨츠 입니다.\n로그인 패스워드를 입력해 주세요.</string>
<string name="L124">시스템에러가 발생하였습니다.앱을 리로드합니다.</string>
<string name="C_E_SYSTEM_0001">예기치 않은 오류가 발생하였습니다.</string>
......@@ -1485,7 +1485,7 @@
<string name="msg_error_chat_name_has_invalid_character">특수문자 ;/?:@&amp;=+$,-_.!~*\'()#\\\"` 는 방 제목에 포함될 수 없습니다.</string>
<string name="msg_error_chat_room_sc_forbidden">사용자 정보를 확인할 수 없습니다. 다시 로그인하시기 바랍니다.</string>
<string name="msg_error_all_process_delete">모두 삭제 송신에 실패하였습니다.</string>
<string name="msg_error_already_exist_same_room">동일한 채팅방이 이미 존재합니다.</string>
<string name="msg_error_already_exist_same_room">동일한 채팅방이 이미 존재합니다.\n 입장하시겠습니까?</string>
<string name="msg_ozd_file_could_not_opened">장표 파일을 열 수 없습니다.</string>
<string name="msg_confirm_send_host_change">방장 권한을 전달 받으시겠습니까?</string>
<string name="msg_confirm_share_image">캡쳐한 사진을 공유 하시겠습니까?</string>
......
......@@ -1482,7 +1482,7 @@
<string name="msg_error_chat_name_has_invalid_character">The character ;/?:@&amp;=+$,-_.!~*\'()#\\\"` cannot be included in the roomname.</string>
<string name="msg_error_chat_room_sc_forbidden">Failed to authenticate. Please login again.</string>
<string name="msg_error_all_process_delete">Failed to send all deletes.</string>
<string name="msg_error_already_exist_same_room">Already exist same room.</string>
<string name="msg_error_already_exist_same_room">Already exist same room.\n do you want enter exist room?</string>
<string name="msg_ozd_file_could_not_opened">Report file could not opened.</string>
<string name="msg_confirm_send_host_change">Do you want to receive host permissions?</string>
<string name="msg_confirm_share_image">Do you want to share captured image?</string>
......
......@@ -3,7 +3,7 @@
-dontskipnonpubliclibraryclasses
-dontpreverify
-verbose
-optimizations !code/simplification/arithmetic,!field/*,!class/merging/*
-optimizations !code/simplification/arithmetic,!field/*,!class/merging/*,!code/removal/advanced,!method/inlining/short,!method/inlining/unique,!method/removal/*,!method/marking/*
-keep public class * extends android.app.Activity
-keep public class * extends android.app.Application
......
......@@ -20,7 +20,7 @@
<shape android:shape="rectangle">
<solid android:color="@android:color/white" />
<corners android:radius="1dp" />
<stroke android:width="1dp" android:color="@android:color/white" />
<stroke android:width="0.5dp" android:color="@android:color/black" />
<padding
android:bottom="7dp"
android:left="7dp"
......
......@@ -11,9 +11,8 @@
android:orientation="horizontal">
<LinearLayout
android:layout_width="match_parent"
android:layout_width="250dp"
android:layout_height="wrap_content"
android:layout_weight="10"
android:orientation="vertical">
<LinearLayout
......
......@@ -127,5 +127,7 @@
<ListView
android:id="@+id/lv_push_message"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
android:layout_height="wrap_content"
android:divider="@color/line"
android:dividerHeight="1px"/>
</LinearLayout>
\ No newline at end of file
......@@ -12,7 +12,7 @@
android:background="@color/app_color"
android:minHeight="50dp" >
<Button
<ImageButton
android:id="@+id/closeBtn"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
......@@ -22,7 +22,7 @@
android:background="@drawable/btn_close"
android:contentDescription="@string/cont_desc" />
<Button
<ImageButton
android:id="@+id/btn_link_original_back"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
......@@ -79,7 +79,7 @@
android:background="@drawable/btn_remote_start"
android:visibility="gone" />
<Button
<ImageButton
android:id="@+id/btn_history_list"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
......@@ -95,7 +95,7 @@
android:layout_centerVertical="true"
android:layout_toLeftOf="@+id/menuLayout"
android:layout_marginRight="10dp" >
<Button
<ImageButton
android:id="@+id/btnWebBack"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
......@@ -103,7 +103,7 @@
android:background="@drawable/btn_history_back"
android:contentDescription="@string/cont_desc" />
<Button
<ImageButton
android:id="@+id/btnWebForward"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
......
......@@ -8,7 +8,7 @@
<Button
android:id="@+id/btn_show_list_view"
android:layout_width="wrap_content"
android:layout_width="60dp"
android:layout_height="wrap_content"
android:background="@drawable/btn_show_list" />
......
......@@ -203,7 +203,7 @@
<ToggleButton
android:id="@+id/tbtn_slide"
android:layout_width="wrap_content"
android:layout_width="60dp"
android:layout_height="wrap_content"
android:layout_alignParentLeft="true"
android:layout_below="@+id/RelativeLayout2"
......@@ -215,7 +215,7 @@
<ToggleButton
android:id="@+id/tbtn_marking"
android:layout_width="wrap_content"
android:layout_width="60dp"
android:layout_height="wrap_content"
android:layout_alignParentLeft="true"
android:layout_below="@+id/tbtn_slide"
......
......@@ -170,7 +170,8 @@
android:layout_height="30dp"
android:layout_gravity="center"
android:background="@drawable/operation_radius_frame"
android:text="@string/save" />
android:text="@string/save"
android:textColor="@android:color/white"/>
<Button
android:id="@+id/pause_save"
......@@ -179,7 +180,8 @@
android:layout_gravity="center"
android:background="@drawable/operation_radius_frame"
android:text="@string/pause"
android:visibility="gone" />
android:visibility="gone"
android:textColor="@android:color/white"/>
<Button
android:id="@+id/restart_save"
......@@ -188,7 +190,8 @@
android:layout_gravity="center"
android:background="@drawable/operation_radius_frame"
android:text="@string/restart"
android:visibility="gone" />
android:visibility="gone"
android:textColor="@android:color/white"/>
<Button
android:id="@+id/open_content"
......@@ -197,7 +200,8 @@
android:layout_gravity="center"
android:background="@drawable/operation_radius_frame"
android:text="@string/open"
android:visibility="gone" />
android:visibility="gone"
android:textColor="@android:color/white"/>
<Button
android:id="@+id/delete_content"
......@@ -229,7 +233,8 @@
android:layout_marginTop="18dp"
android:background="@drawable/operation_radius_frame"
android:text="@string/content_update"
android:visibility="gone" />
android:visibility="gone"
android:textColor="@android:color/white" />
</LinearLayout>
......
......@@ -50,7 +50,7 @@
android:text="@string/save"
android:visibility="gone" />
<Button
<ImageButton
android:id="@+id/memo_delete"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
......
......@@ -38,6 +38,8 @@
<ListView
android:id="@+id/lv_operation_select"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
android:layout_height="wrap_content"
android:divider="@color/line"
android:dividerHeight="1px"/>
</LinearLayout>
\ No newline at end of file
......@@ -126,5 +126,7 @@
<ListView
android:id="@+id/lv_push_message"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
android:layout_height="wrap_content"
android:divider="@color/line"
android:dividerHeight="1px"/>
</LinearLayout>
\ No newline at end of file
......@@ -234,7 +234,9 @@
android:id="@+id/listView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:visibility="gone" />
android:visibility="gone"
android:divider="@color/line"
android:dividerHeight="1px"/>
</LinearLayout>
\ No newline at end of file
......@@ -69,6 +69,7 @@ public class ABVFcmListenerService extends FirebaseMessagingService {
Integer insertId = 0;
Integer messageType = 0;
Integer messageId = 0;
String roomType = "";
Log.d(TAG,"tempMsg : "+ tempMsg);
......@@ -97,6 +98,7 @@ public class ABVFcmListenerService extends FirebaseMessagingService {
}
if (pushMsg.contains(INVITE_COLLABORATION)) {
roomType = Integer.toString(json.getInt("roomType"));
String[] inviteMessage = pushMsg.split("<::split>");
tempMsg = getString(R.string.msg_invite_collaboration);
Intent pushMsgDialog = new Intent(ABVFcmListenerService.this, ShowPushMessageDailogActivity.class);
......@@ -107,6 +109,7 @@ public class ABVFcmListenerService extends FirebaseMessagingService {
pushMsgDialog.putExtra(AppDefType.ChatPushMessageKey.pushSendLoginId, pushSendLoginId); // sendLoginId
pushMsgDialog.putExtra(AppDefType.ChatPushMessageKey.pushSendDate, pushSendDate);
pushMsgDialog.putExtra(AppDefType.ChatPushMessageKey.collaborationType, inviteMessage[1]);
pushMsgDialog.putExtra(AppDefType.ChatPushMessageKey.roomType, roomType);
startActivity(pushMsgDialog);
return;
}
......
......@@ -1029,8 +1029,12 @@ public abstract class ABVAuthenticatedActivity extends ABVActivity implements Co
mPushMessageSendDialog.findViewById(R.id.back_btn).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
showOperationSelectDialog();
mPushMessageSendDialog.dismiss();
if (mFixPushMessageListView.getVisibility() == View.VISIBLE) { //定型文表示状態
showSendPushMessgeView(true);
} else { //送信画面表示状態
showOperationSelectDialog();
mPushMessageSendDialog.dismiss();
}
}
});
......@@ -1086,8 +1090,6 @@ public abstract class ABVAuthenticatedActivity extends ABVActivity implements Co
private void showSendPushMessgeView(boolean sendPushMessageViewFlg) {
if (sendPushMessageViewFlg) {
mFixPushMessageListView.setVisibility(View.GONE);
mPushMessageSendDialog.findViewById(R.id.back_btn).setVisibility(View.GONE);
mPushMessageSendDialog.findViewById(R.id.push_send_layout).setVisibility(View.VISIBLE);
mPushMessageSendDialog.findViewById(R.id.close_btn).setVisibility(View.VISIBLE);
EditText messageEditText = (EditText) mPushMessageSendDialog.findViewById(R.id.message);
......
......@@ -71,6 +71,7 @@ import jp.agentec.abook.abv.ui.common.util.ABVToastUtil;
import jp.agentec.abook.abv.ui.common.util.AlertDialogUtil;
import jp.agentec.abook.abv.ui.common.util.PatternStringUtil;
import jp.agentec.abook.abv.ui.common.view.ABVPopupListWindow;
import jp.agentec.abook.abv.ui.home.activity.ChatWebViewActivity;
import jp.agentec.abook.abv.ui.home.helper.ABookCheckWebViewHelper;
import jp.agentec.abook.abv.ui.home.helper.ABookPermissionHelper;
import jp.agentec.abook.abv.ui.home.helper.ActivityHandlingHelper;
......@@ -147,11 +148,17 @@ public abstract class ABVContentViewActivity extends ABVAuthenticatedActivity {
protected String mEnablePhotoEdit; //最初ファイルをアップロードする時、Webからもらう編集可能可否のパラメタ 0:編集する。 1:編集しない。
protected PhotoEditActivity mPhotoEditDialog;
protected boolean isCollaboration = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ChatWebViewActivity chatWebViewActivity = ActivityHandlingHelper.getInstance().getActivity(ChatWebViewActivity.class);
if (chatWebViewActivity != null && chatWebViewActivity.isInPictureInPictureMode()) {
isCollaboration = true;
}
Intent intent = getIntent();
contentId = intent.getLongExtra(ABookKeys.CONTENT_ID, 0);
contentType = intent.getStringExtra(ABookKeys.CONTENT_TYPE);
......@@ -462,7 +469,12 @@ public abstract class ABVContentViewActivity extends ABVAuthenticatedActivity {
params.addRule(RelativeLayout.CENTER_VERTICAL);
params.rightMargin = (int) (getResources().getDisplayMetrics().density * 5);
subMenuBtn.setLayoutParams(params);
exitMeetingBtn.setVisibility(View.VISIBLE);
if (isCollaboration) {
exitMeetingBtn.setVisibility(View.GONE);
} else {
exitMeetingBtn.setVisibility(View.VISIBLE);
}
// if (helpButton != null && helpButton.getVisibility() == View.VISIBLE) {
// helpButton.setLayoutParams(params);
......
......@@ -115,7 +115,9 @@ public class ShowPushMessageDailogActivity extends ABVUIActivity {
final ABookAlertDialog alertDialog = AlertDialogUtil.createAlertDialog(ShowPushMessageDailogActivity.this, getRString(R.string.app_name), getIntent().getExtras().getString(PushMessageKey.message));
Bundle extras = getIntent().getExtras();
Long roomId = extras.getLong(AppDefType.ChatPushMessageKey.roomId, 0);
if (roomId != 0) {
if (roomId == 0) {
alertDialog.setPositiveButton(R.string.ok, null);
} else {
alertDialog.setPositiveButton(R.string.move, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
......@@ -127,25 +129,26 @@ public class ShowPushMessageDailogActivity extends ABVUIActivity {
String pushSendLoginId = extras.getString(AppDefType.ChatPushMessageKey.pushSendLoginId);
long pushSendDate = extras.getLong(AppDefType.ChatPushMessageKey.pushSendDate);
String collaborationType = extras.getString(AppDefType.ChatPushMessageKey.collaborationType);
String roomType = extras.getString(AppDefType.ChatPushMessageKey.roomType);
if (roomId > 0 && !StringUtil.isNullOrEmpty(roomName)) {
List<PushMessageDto> pushMessageDtoList = AbstractLogic.getLogic(PushMessageLogic.class).getAllPushMessageList();
Logger.d("pushSendDate","pushSendDate : " + pushSendDate);
Logger.d("pushSendDate", "pushSendDate : " + pushSendDate);
String pushSendDateDate = DateTimeUtil.toString(new Date(pushSendDate), DateTimeFormat.yyyyMMddHHmmssSSS_none);
Logger.d("pushSendDate","pushSendDateDate : " + pushSendDateDate);
Logger.d("pushSendDate", "pushSendDateDate : " + pushSendDateDate);
String pushSendDateDate2 = DateTimeUtil.toString(DateTimeUtil.toDate(pushSendDateDate, DateTimeFormat.yyyyMMddHHmmssSSS_none), DateTimeFormat.yyyyMMddHHmmssSSS_none);
Logger.d("pushSendDate","pushSendDateDate2 : " + pushSendDateDate2);
Logger.d("pushSendDate", "pushSendDateDate2 : " + pushSendDateDate2);
for (int i=0; i < pushMessageDtoList.size() - 1; i++) {
for (int i = 0; i < pushMessageDtoList.size() - 1; i++) {
String tempDate = DateTimeUtil.toString(pushMessageDtoList.get(i).pushSendDate, DateTimeFormat.yyyyMMddHHmmssSSS_none);
Logger.d("tempDate","date : " + tempDate);
Logger.d("tempDate", "date : " + tempDate);
}
if (StringUtil.isNullOrEmpty(collaborationType)) {
ActivityHandlingHelper.getInstance().startChatWebViewActivity(roomId, roomName);
} else {
ActivityHandlingHelper.getInstance().startChatWebViewActivityWithCollaboration(roomId, roomName, collaborationType);
ActivityHandlingHelper.getInstance().startChatWebViewActivityWithCollaboration(roomId, roomName, collaborationType, roomType);
}
}
}
......@@ -159,8 +162,6 @@ public class ShowPushMessageDailogActivity extends ABVUIActivity {
finish();
}
});
} else {
alertDialog.setPositiveButton(R.string.ok, null);
}
alertDialog.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
......
......@@ -144,5 +144,6 @@ public interface AppDefType {
String pushSendLoginId = "pushSendLoginId";
String pushSendDate = "pushSendDate";
String collaborationType = "collaborationType";
String roomType = "roomType";
}
}
......@@ -31,7 +31,7 @@ public class ABVBatchSyncView extends ProgressDialog {
private boolean isDestroy = false;
public ABVBatchSyncView(Context context) {
super(context);
super(context, R.style.MyDialogTheme);
mOperationListActivity = (OperationListActivity)context;
init();
isDestroy = false;
......
......@@ -14,6 +14,7 @@ import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.ConnectivityManager;
......@@ -26,6 +27,8 @@ import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.provider.BaseColumns;
import android.provider.MediaStore;
import android.support.annotation.RequiresApi;
import android.util.Log;
import android.view.KeyEvent;
......@@ -101,6 +104,7 @@ import jp.agentec.abook.abv.ui.common.dialog.ABookAlertDialog;
import jp.agentec.abook.abv.ui.common.util.ABVToastUtil;
import jp.agentec.abook.abv.ui.common.util.AlertDialogUtil;
import jp.agentec.abook.abv.ui.home.helper.ActivityHandlingHelper;
import jp.agentec.abook.abv.ui.home.helper.VideoEncoder;
import jp.agentec.abook.abv.ui.home.view.FullscreenableChromeClient;
import jp.agentec.abook.abv.ui.viewer.activity.CommunicationWebViewActivity;
import jp.agentec.abook.abv.ui.viewer.activity.ParentWebViewActivity;
......@@ -120,7 +124,6 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
private final String TAG = "ChatWebViewActivity";
private final String NETWORK_ERROR_PLACE_HOLDER = "file:///android_asset/chat/public_new/chat.html";
//private final String NETWORK_ERROR_PLACE_HOLDER = "file:///android_asset/chat/public/networkError.html";
private final String CHAT_PAGE_URL = "file:///android_asset/chat/public_new/chat.html";
private final String CHAT_ROOM_PAGE_URL = "file:///android_asset/chat/public_new/chat_room.html";
private final String COLLABORATION_PAGE_URL = "file:///android_asset/chat/public_new/collaboration.html";
......@@ -144,6 +147,8 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
private String mSkey;
private Integer joinMeetingId;
private boolean isPIP;
private String encodedFilePath;
private Uri encodedVideoPath;
// 0:協業生成, 1:協業参加
private Integer collaborationJoinFlg = 0;
......@@ -158,6 +163,9 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
private CommunicationLogic communicationLogic = AbstractLogic.getLogic(CommunicationLogic.class);
private int mCollaborationType;
private int mHostRequestFlg = 0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
......@@ -183,6 +191,7 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
sid = intent.getStringExtra("sid");
roomId = intent.getLongExtra("roomId", 0);
roomName = intent.getStringExtra("roomName");
roomType = intent.getStringExtra("roomType");
loginId = intent.getStringExtra("loginId");
shopName = intent.getStringExtra("shopName");
collaborationType = intent.getStringExtra("collaborationType");
......@@ -281,7 +290,9 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
e.printStackTrace();
}
Integer integerRoomId = Integer.parseInt(roomId.toString());
roomType = communicationLogic.getChatRoom(integerRoomId).type.toString();
if (StringUtil.isNullOrEmpty(roomType)) {
roomType = communicationLogic.getChatRoom(integerRoomId).type.toString();
}
if (StringUtil.isNullOrEmpty(collaborationType)) {
String parameterData = "sid=" + sid + "&loginId=" + loginId + "&shopName=" + shopName + "&roomId=" + roomId + "&roomName=" + roomName + fixedParam;
mChatWebView.postUrl(CHAT_ROOM_PAGE_URL, parameterData.getBytes());
......@@ -421,6 +432,13 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
message = getString(R.string.msg_error_already_exist_same_room);
} else if (message.equals("방장 권한을 전달 받으시겠습니까?")){
message = getString(R.string.msg_confirm_send_host_change);
if (mHostRequestFlg == 1) {
mChatWebView.loadUrl("javascript:penOff();");
mChatWebView.loadUrl("javascript:$('#hostRequestModal').modal('hide');");
mChatWebView.loadUrl("javascript:$('#userNameCardInCollaboration').modal('hide');");
result.confirm();
return true;
}
} else if (message.equals("캡쳐한 사진을 공유 하시겠습니까?")) {
message = getString(R.string.msg_confirm_share_image);
}
......@@ -539,7 +557,7 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
File file = new File(filePath);
NetworkTask networkTask = new NetworkTask(url, filePath);
networkTask.execute();
} else if (url.contains("/file/getImage")) {
} else if (url.contains("/file/getImage") && !url.contains(".mp4")) {
Uri uri = Uri.parse(url);
String fileName = uri.getQueryParameter("fileName");
String filePath = getFilesDir().getAbsolutePath() + "/" + fileName;
......@@ -785,11 +803,80 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
if (mUploadMessage == null) {
return;
}
mUploadMessage.onReceiveValue(result);
mChatWebView.post(new Runnable() {
@Override
public void run() {
mChatWebView.loadUrl("javascript:CHAT_UI.showLoadingIndicator();");
}
});
if (dataUri != null) {
Cursor cursor = getContentResolver().query(dataUri, null, null, null, null);
cursor.moveToNext();
final String filePath = cursor.getString(cursor.getColumnIndex("_data"));
cursor.close();
Runnable r = new Runnable() {
@Override
public void run() {
try {
//CallBack
Runnable callBack = new Runnable() {
@Override
public void run() {
runOnUiThread(new Runnable() {
@Override
public void run() {
mChatWebView.loadUrl("javascript:CHAT_UI.videoEncodeEnd('" +encodedFilePath + "')");
}
});
}
};
Context c = getApplicationContext();
File file = new File(filePath);
String encodedPath = new VideoEncoder().changeResolution(file, callBack);
encodedFilePath = encodedPath;
} catch (Throwable throwable) {
mChatWebView.loadUrl("javascript:CHAT_UI.videoEncodeFail();)");
}
}
};
r.run();
mUploadMessage.onReceiveValue(null);
}
}
mUploadMessage = null;
}
public static Uri getImageContentUri(Context context, File file) {
if (!file.exists()) {
return null;
}
String filePath = file.getAbsolutePath();
Cursor cursor = context.getContentResolver().query(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
new String[] {BaseColumns._ID},
MediaStore.MediaColumns.DATA + "=? ",
new String[] { filePath }, null);
if (cursor != null && cursor.moveToFirst()) {
int id = cursor.getInt(cursor
.getColumnIndex(BaseColumns._ID));
Uri baseUri = Uri.parse("content://media/external/video/media");
return Uri.withAppendedPath(baseUri, "" + id);
} else {
if (!file.exists()) { return null; }
ContentValues values = new ContentValues();
values.put(MediaStore.MediaColumns.DATA, filePath);
return context.getContentResolver().insert(
MediaStore.Video.Media.EXTERNAL_CONTENT_URI,
values);
}
}
/**
* 未読のプッシュメッセージが存在するかチェック
* @param pushMessageDtoList チェックするリスト
......@@ -1419,9 +1506,6 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
public int createContentView() {
Integer meetingId = null;
meetingManager.close();
Stack<ABVAuthenticatedActivity> stack = activityHandlingHelper.getCurrentActivityStack();
activityHandlingHelper.removeContentViewActivity(activityHandlingHelper.getContentViewActivity());
Stack<ABVAuthenticatedActivity> stack2 = activityHandlingHelper.getCurrentActivityStack();
try {
connectMeetingServer();
List<MeetingDto> meetingList= meetingManager.getMeetingList(mSkey);
......@@ -1443,7 +1527,6 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
@JavascriptInterface
public void startContentView() {
activityHandlingHelper.removeContentViewActivity(activityHandlingHelper.getContentViewActivity());
meetingManager.close();
try {
connectMeetingServer();
......@@ -1459,8 +1542,6 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
@JavascriptInterface
public void joinMeetingRoom(int newMeetingId) {
activityHandlingHelper.removeContentViewActivity(activityHandlingHelper.getContentViewActivity());
finishBeforeContentListActivity();
meetingManager.close();
try {
connectMeetingServer();
......@@ -1506,6 +1587,24 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
public int getAndroidVersion() {
return Build.VERSION.SDK_INT;
}
@JavascriptInterface
public void removeEncodedVideo(String filePath) {
File encodedVideo = new File(filePath);
if (encodedVideo.exists()) {
encodedVideo.delete();
}
}
@JavascriptInterface
public void setHostRequestFlg(int hostRequestFlg) {
mHostRequestFlg = hostRequestFlg;
}
@JavascriptInterface
public int getHostRequestFlg() {
return mHostRequestFlg;
}
}
/**
......@@ -1555,7 +1654,6 @@ public class ChatWebViewActivity extends CommunicationWebViewActivity {
targetActivity = stack.get(stack.size()-2);
}
}
Intent intent = new Intent();
intent.setFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
intent.setClass(targetActivity, OperationRelatedContentActivity.class);
......
......@@ -96,7 +96,7 @@ public class OperationRelatedContentActivity extends ABVUIActivity {
mAllSaveButton.setVisibility(View.GONE);
setOnButtonEvent();
settingBottomToolbar();
refreshCollaborationUI();
}
// 下辺のツールバー設定
......@@ -506,25 +506,39 @@ public class OperationRelatedContentActivity extends ABVUIActivity {
});
}
private void refreshCollaborationUI() {
if (isCollaboration) {
activityHandlingHelper.removeContentViewActivity(activityHandlingHelper.getContentViewActivity());
hideBottomToolbar();
if (isCollaborationOwner) {
closeProgressPopup();
showContentList();
} else {
showProgressPopup();
hideContentList();
}
} else {
closeProgressPopup();
showBottomToolbar();
showContentList();
}
}
@Override
public void onResume() {
Logger.i(TAG, "onResume:start");
super.onResume();
if (isCollaboration) {
hideBottomToolbar();
if (!isCollaborationOwner) {
showProgressPopup();
hideContentList();
}
} else {
showBottomToolbar();
showContentList();
}
showOperationRelatedContentList();
}
@Override
protected void onNewIntent(Intent intent) {
isCollaboration = intent.getBooleanExtra("isCollaboration", false);
isCollaborationOwner = intent.getBooleanExtra("isCollaborationOwner", false);
refreshCollaborationUI();
super.onNewIntent(intent);
}
// 設定画面へ遷移
public void onClickSetting(View v) {
showSetting();
......
......@@ -398,9 +398,17 @@ public class ActivityHandlingHelper extends ABookHelper implements RemoteObserve
}
}
}
if (!StringUtil.equalsAny(contentType,
ContentJSON.KEY_HTML_TYPE, ContentJSON.KEY_LINK_TYPE, ContentJSON.KEY_ENQUETE_TYPE, ContentJSON.KEY_EXAM_TYPE,
ContentJSON.KEY_PANO_MOVIE_TYPE, ContentJSON.KEY_PANO_IMAGE_TYPE, ContentJSON.KEY_OBJECTVR_TYPE)) {
if (!StringUtil.equalsAny(
contentType,
ContentJSON.KEY_HTML_TYPE,
ContentJSON.KEY_LINK_TYPE,
ContentJSON.KEY_ENQUETE_TYPE,
ContentJSON.KEY_EXAM_TYPE,
ContentJSON.KEY_PANO_MOVIE_TYPE,
ContentJSON.KEY_PANO_IMAGE_TYPE,
ContentJSON.KEY_OBJECTVR_TYPE)
) {
context.startActivity(intent);
}
}
......@@ -1926,7 +1934,7 @@ public class ActivityHandlingHelper extends ABookHelper implements RemoteObserve
this.previousOfSettingActivity2 = activity;
}
public void startChatWebViewActivityWithCollaboration(Long roomId, String roomName, String collaborationType) {
public void startChatWebViewActivityWithCollaboration(Long roomId, String roomName, String collaborationType, String roomType) {
String className = ChatWebViewActivity.class.getName();
boolean isNormalSize = (mContext.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) == Configuration.SCREENLAYOUT_SIZE_NORMAL;
Intent intent = new Intent();
......@@ -1940,7 +1948,7 @@ public class ActivityHandlingHelper extends ABookHelper implements RemoteObserve
intent.putExtra("shopName", shopName);
intent.putExtra("roomId", roomId);
intent.putExtra("roomName", roomName);
intent.putExtra("roomType", roomType);
intent.setClassName(mContext.getPackageName(), className);
Activity activity2 = null;
......
package jp.agentec.abook.abv.ui.home.helper;
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.opengl.EGL14;
import android.opengl.EGLExt;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.util.Log;
import android.view.Surface;
/**
* Holds state associated with a Surface used for MediaCodec encoder input.
* <p>
* The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
* to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
* to the video encoder.
*/
class InputSurface {
private static final String TAG = "InputSurface";
private static final boolean VERBOSE = true;
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private static final int EGL_OPENGL_ES2_BIT = 4;
private EGLDisplay mEGLDisplay;
private EGLContext mEGLContext;
private EGLSurface mEGLSurface;
private Surface mSurface;
/**
* Creates an InputSurface from a Surface.
*/
public InputSurface(Surface surface) {
if (surface == null) {
throw new NullPointerException();
}
mSurface = surface;
eglSetup();
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
*/
private void eglSetup() {
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
mEGLDisplay = null;
throw new RuntimeException("unable to initialize EGL14");
}
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
// to be able to tell if the frame is reasonable.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0)) {
throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
}
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
attrib_list, 0);
checkEglError("eglCreateContext");
if (mEGLContext == null) {
throw new RuntimeException("null context");
}
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
if (mEGLSurface == null) {
throw new RuntimeException("surface was null");
}
}
/**
* Discard all resources held by this class, notably the EGL context. Also releases the
* Surface that was passed to our constructor.
*/
public void release() {
if (EGL14.eglGetCurrentContext().equals(mEGLContext)) {
// Clear the current context and surface to ensure they are discarded immediately.
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
}
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
//EGL14.eglTerminate(mEGLDisplay);
mSurface.release();
// null everything out so future attempts to use this object will cause an NPE
mEGLDisplay = null;
mEGLContext = null;
mEGLSurface = null;
mSurface = null;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*/
public boolean swapBuffers() {
return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
}
/**
* Returns the Surface that the MediaCodec receives buffers from.
*/
public Surface getSurface() {
return mSurface;
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime(long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
}
/**
* Checks for EGL errors.
*/
private void checkEglError(String msg) {
boolean failed = false;
int error;
while ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
failed = true;
}
if (failed) {
throw new RuntimeException("EGL error encountered (see log)");
}
}
}
package jp.agentec.abook.abv.ui.home.helper;
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLES11Ext;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import java.nio.ByteBuffer;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import javax.microedition.khronos.opengles.GL;
import javax.microedition.khronos.opengles.GL10;
/**
* Holds state associated with a Surface used for MediaCodec decoder output.
* <p>
* The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
* and then create a Surface for that SurfaceTexture. The Surface can be passed to
* MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
* texture with updateTexImage, then render the texture with GL to a pbuffer.
* <p>
* The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
* Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
* we just draw it on whatever surface is current.
* <p>
* By default, the Surface will be using a BufferQueue in asynchronous mode, so we
* can potentially drop frames.
*/
class OutputSurface implements SurfaceTexture.OnFrameAvailableListener {
private static final String TAG = "OutputSurface";
private static final boolean VERBOSE = true;
private static final int EGL_OPENGL_ES2_BIT = 4;
private EGL10 mEGL;
private EGLDisplay mEGLDisplay;
private EGLContext mEGLContext;
private EGLSurface mEGLSurface;
private SurfaceTexture mSurfaceTexture;
private Surface mSurface;
private Object mFrameSyncObject = new Object(); // guards mFrameAvailable
private boolean mFrameAvailable;
private TextureRender mTextureRender;
private HandlerThread mHandlerThread;
private Handler mHandler;
/**
* Creates an OutputSurface backed by a pbuffer with the specifed dimensions. The new
* EGL context and surface will be made current. Creates a Surface that can be passed
* to MediaCodec.configure().
*/
public OutputSurface(int width, int height) {
if (width <= 0 || height <= 0) {
throw new IllegalArgumentException();
}
eglSetup(width, height);
makeCurrent();
setup();
}
/**
* Creates an OutputSurface using the current EGL context. Creates a Surface that can be
* passed to MediaCodec.configure().
*/
public OutputSurface() {
setup();
}
/**
* Creates instances of TextureRender and SurfaceTexture, and a Surface associated
* with the SurfaceTexture.
*/
private void setup() {
mTextureRender = new TextureRender();
mTextureRender.surfaceCreated();
// Even if we don't access the SurfaceTexture after the constructor returns, we
// still need to keep a reference to it. The Surface doesn't retain a reference
// at the Java level, so if we don't either then the object can get GCed, which
// causes the native finalizer to run.
// mHandlerThread = new HandlerThread("callback-thread");
// mHandlerThread.start();
// mHandler = new Handler(mHandlerThread.getLooper());
if (VERBOSE) {
Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
}
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
// This doesn't work if OutputSurface is created on the thread that CTS started for
// these test cases.
//
// The CTS-created thread has a Looper, and the SurfaceTexture constructor will
// create a Handler that uses it. The "frame available" message is delivered
// there, but since we're not a Looper-based thread we'll never see it. For
// this to do anything useful, OutputSurface must be created on a thread without
// a Looper, so that SurfaceTexture uses the main application Looper instead.
//
// Java language note: passing "this" out of a constructor is generally unwise,
// but we should be able to get away with it here.
mSurfaceTexture.setOnFrameAvailableListener(this, mHandler);
mSurface = new Surface(mSurfaceTexture);
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer.
*/
private void eglSetup(int width, int height) {
mEGL = (EGL10)EGLContext.getEGL();
mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (!mEGL.eglInitialize(mEGLDisplay, null)) {
throw new RuntimeException("unable to initialize EGL10");
}
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
// to be able to tell if the frame is reasonable.
int[] attribList = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) {
throw new RuntimeException("unable to find RGB888+pbuffer EGL config");
}
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL10.EGL_NONE
};
mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT,
attrib_list);
checkEglError("eglCreateContext");
if (mEGLContext == null) {
throw new RuntimeException("null context");
}
// Create a pbuffer surface. By using this for output, we can use glReadPixels
// to test values in the output.
int[] surfaceAttribs = {
EGL10.EGL_WIDTH, width,
EGL10.EGL_HEIGHT, height,
EGL10.EGL_NONE
};
mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs);
checkEglError("eglCreatePbufferSurface");
if (mEGLSurface == null) {
throw new RuntimeException("surface was null");
}
}
/**
* Discard all resources held by this class, notably the EGL context.
*/
public void release() {
if (mEGL != null) {
if (mEGL.eglGetCurrentContext().equals(mEGLContext)) {
// Clear the current context and surface to ensure they are discarded immediately.
mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE,
EGL10.EGL_NO_CONTEXT);
}
mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface);
mEGL.eglDestroyContext(mEGLDisplay, mEGLContext);
//mEGL.eglTerminate(mEGLDisplay);
}
mSurface.release();
// this causes a bunch of warnings that appear harmless but might confuse someone:
// W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
//mSurfaceTexture.release();
// null everything out so future attempts to use this object will cause an NPE
mEGLDisplay = null;
mEGLContext = null;
mEGLSurface = null;
mEGL = null;
mTextureRender = null;
mSurface = null;
mSurfaceTexture = null;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
if (mEGL == null) {
throw new RuntimeException("not configured for makeCurrent");
}
checkEglError("before makeCurrent");
if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Returns the Surface that we draw onto.
*/
public Surface getSurface() {
return mSurface;
}
/**
* Replaces the fragment shader.
*/
public void changeFragmentShader(String fragmentShader) {
mTextureRender.changeFragmentShader(fragmentShader);
}
/**
* Latches the next buffer into the texture. Must be called from the thread that created
* the OutputSurface object, after the onFrameAvailable callback has signaled that new
* data is available.
*/
public void awaitNewImage() {
final int TIMEOUT_MS = 2500;
synchronized (mFrameSyncObject) {
while (!mFrameAvailable) {
try {
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid
// stalling the test if it doesn't arrive.
mFrameSyncObject.wait(TIMEOUT_MS);
if (!mFrameAvailable) {
// TODO: if "spurious wakeup", continue while loop
throw new RuntimeException("Surface frame wait timed out");
}
} catch (InterruptedException ie) {
// shouldn't happen
throw new RuntimeException(ie);
}
}
mFrameAvailable = false;
}
// Latch the data.
mTextureRender.checkGlError("before updateTexImage");
mSurfaceTexture.updateTexImage();
}
/**
* Draws the data from SurfaceTexture onto the current EGL surface.
*/
public void drawImage() {
mTextureRender.drawFrame(mSurfaceTexture);
}
@Override
public void onFrameAvailable(SurfaceTexture st) {
if (VERBOSE) {
Log.d(TAG, "new frame available");
}
synchronized (mFrameSyncObject) {
if (mFrameAvailable) {
throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
}
mFrameAvailable = true;
mFrameSyncObject.notifyAll();
}
}
/**
* Checks for EGL errors.
*/
private void checkEglError(String msg) {
boolean failed = false;
int error;
while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
failed = true;
}
if (failed) {
throw new RuntimeException("EGL error encountered (see log)");
}
}
}
package jp.agentec.abook.abv.ui.home.helper;
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.Log;
/**
* Code for rendering a texture onto a surface using OpenGL ES 2.0.
*/
class TextureRender {
private static final String TAG = "TextureRender";
private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};
private FloatBuffer mTriangleVertices;
private static final String VERTEX_SHADER =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" + // highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private float[] mMVPMatrix = new float[16];
private float[] mSTMatrix = new float[16];
private int mProgram;
private int mTextureID = -12345;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
public TextureRender() {
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
}
public int getTextureId() {
return mTextureID;
}
public void drawFrame(SurfaceTexture st) {
checkGlError("onDrawFrame start");
st.getTransformMatrix(mSTMatrix);
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
GLES20.glFinish();
}
/**
* Initializes GL state. Call this after the EGL surface has been created and made current.
*/
public void surfaceCreated() {
mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muSTMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameter");
}
/**
* Replaces the fragment shader.
*/
public void changeFragmentShader(String fragmentShader) {
GLES20.glDeleteProgram(mProgram);
mProgram = createProgram(VERTEX_SHADER, fragmentShader);
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
checkGlError("glCreateShader type=" + shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
checkGlError("glCreateProgram");
if (program == 0) {
Log.e(TAG, "Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
return program;
}
public void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
}
package jp.agentec.abook.abv.ui.home.helper;
import android.annotation.SuppressLint;
import android.graphics.Bitmap;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaMetadataRetriever;
import android.media.MediaMuxer;
import android.view.Surface;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicReference;
public class VideoEncoder {
private static final int TIMEOUT_USEC = 10000;
private static final String OUTPUT_VIDEO_MIME_TYPE = "video/avc";
private static final int OUTPUT_VIDEO_BIT_RATE = 2500 * 1024;
private static final int OUTPUT_VIDEO_FRAME_RATE = 30;
private static final int OUTPUT_VIDEO_IFRAME_INTERVAL = 10;
private static final int OUTPUT_VIDEO_COLOR_FORMAT =
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
private static final String OUTPUT_AUDIO_MIME_TYPE = "audio/mp4a-latm";
private static final int OUTPUT_AUDIO_CHANNEL_COUNT = 2;
private static final int OUTPUT_AUDIO_BIT_RATE = 128 * 1024;
private static final int OUTPUT_AUDIO_AAC_PROFILE =
MediaCodecInfo.CodecProfileLevel.AACObjectHE;
private static final int OUTPUT_AUDIO_SAMPLE_RATE_HZ = 44100;
private int mWidth = 854;
private int mHeight = 480;
private String mOutputFile;
private String mInputFile;
@SuppressLint("LongLogTag")
public String changeResolution(File f, Runnable callBack) throws Throwable {
mInputFile = f.getAbsolutePath();
String filePath = mInputFile.substring(0, mInputFile.lastIndexOf(File.separator));
String[] splitByDot = mInputFile.split("\\.");
String ext = "";
if (splitByDot.length > 1) {
ext = splitByDot[splitByDot.length-1];
}
String fileName = mInputFile.substring(mInputFile.lastIndexOf(File.separator)+1);
if (ext.length() > 0) {
fileName = fileName.replace("."+ext, System.currentTimeMillis() + "_out.mp4");
} else {
fileName = fileName.concat(System.currentTimeMillis() + "_out.mp4");
}
final File outFile = new File(filePath, fileName);
if (!outFile.exists()) {
outFile.createNewFile();
}
mOutputFile = outFile.getAbsolutePath();
ChangerWrapper.changeResolutionInSeparatedThread(this, callBack);
return mOutputFile;
}
private static class ChangerWrapper implements Runnable {
private Throwable mThrowable;
private VideoEncoder mChanger;
private Runnable mCallBack;
private ChangerWrapper(VideoEncoder changer, Runnable callBack) {
mChanger = changer;
mCallBack = callBack;
}
@Override
public void run() {
try {
mChanger.prepareAndChangeResolution(mCallBack);
} catch (Throwable th) {
mThrowable = th;
}
}
public static void changeResolutionInSeparatedThread(VideoEncoder encoder, Runnable callBack) throws Throwable {
ChangerWrapper wrapper = new ChangerWrapper(encoder, callBack);
Thread th = new Thread(wrapper, ChangerWrapper.class.getSimpleName());
th.start();
// th.join();
if (wrapper.mThrowable != null) {
throw wrapper.mThrowable;
}
}
}
private void prepareAndChangeResolution(Runnable callBack) throws Exception {
Exception exception = null;
MediaCodecInfo videoCodecInfo = selectCodec(OUTPUT_VIDEO_MIME_TYPE);
if (videoCodecInfo == null) {
return;
}
MediaCodecInfo audioCodecInfo = selectCodec(OUTPUT_AUDIO_MIME_TYPE);
if (audioCodecInfo == null) {
return;
}
MediaExtractor videoExtractor = null;
MediaExtractor audioExtractor = null;
OutputSurface outputSurface = null;
MediaCodec videoDecoder = null;
MediaCodec audioDecoder = null;
MediaCodec videoEncoder = null;
MediaCodec audioEncoder = null;
MediaMuxer muxer = null;
InputSurface inputSurface = null;
try {
videoExtractor = createExtractor();
int videoInputTrack = getAndSelectVideoTrackIndex(videoExtractor);
MediaFormat inputFormat = videoExtractor.getTrackFormat(videoInputTrack);
MediaMetadataRetriever m = new MediaMetadataRetriever();
m.setDataSource(mInputFile);
int inputWidth, inputHeight;
try {
inputWidth = Integer.parseInt(m.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH));
inputHeight = Integer.parseInt(m.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT));
} catch (Exception e) {
Bitmap thumbnail = m.getFrameAtTime();
inputWidth = thumbnail.getWidth();
inputHeight = thumbnail.getHeight();
thumbnail.recycle();
}
if (inputWidth > inputHeight){
if (mWidth < mHeight) {
int w = mWidth;
mWidth = mHeight;
mHeight = w;
}
} else {
if (mWidth > mHeight) {
int w = mWidth;
mWidth = mHeight;
mHeight = w;
}
}
MediaFormat outputVideoFormat =
MediaFormat.createVideoFormat(OUTPUT_VIDEO_MIME_TYPE, mWidth, mHeight);
outputVideoFormat.setInteger(
MediaFormat.KEY_COLOR_FORMAT, OUTPUT_VIDEO_COLOR_FORMAT);
outputVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_VIDEO_BIT_RATE);
outputVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, OUTPUT_VIDEO_FRAME_RATE);
outputVideoFormat.setInteger(
MediaFormat.KEY_I_FRAME_INTERVAL, OUTPUT_VIDEO_IFRAME_INTERVAL);
AtomicReference<Surface> inputSurfaceReference = new AtomicReference<Surface>();
videoEncoder = createVideoEncoder(
videoCodecInfo, outputVideoFormat, inputSurfaceReference);
inputSurface = new InputSurface(inputSurfaceReference.get());
inputSurface.makeCurrent();
outputSurface = new OutputSurface();
videoDecoder = createVideoDecoder(inputFormat, outputSurface.getSurface());
audioExtractor = createExtractor();
int audioInputTrack = getAndSelectAudioTrackIndex(audioExtractor);
MediaFormat inputAudioFormat = audioExtractor.getTrackFormat(audioInputTrack);
MediaFormat outputAudioFormat =
MediaFormat.createAudioFormat(inputAudioFormat.getString(MediaFormat.KEY_MIME),
inputAudioFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE),
inputAudioFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
outputAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_AUDIO_BIT_RATE);
outputAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, OUTPUT_AUDIO_AAC_PROFILE);
audioEncoder = createAudioEncoder(audioCodecInfo, outputAudioFormat);
audioDecoder = createAudioDecoder(inputAudioFormat);
muxer = new MediaMuxer(mOutputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
changeResolution(videoExtractor, audioExtractor,
videoDecoder, videoEncoder,
audioDecoder, audioEncoder,
muxer, inputSurface, outputSurface);
} finally {
try {
if (videoExtractor != null) {
videoExtractor.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (audioExtractor != null) {
audioExtractor.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (videoDecoder != null) {
videoDecoder.stop();
videoDecoder.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (outputSurface != null) {
outputSurface.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (videoEncoder != null) {
videoEncoder.stop();
videoEncoder.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (audioDecoder != null) {
audioDecoder.stop();
audioDecoder.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (muxer != null) {
muxer.stop();
muxer.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (inputSurface != null) {
inputSurface.release();
}
} catch(Exception e) {
if (exception == null) {
exception = e;
}
}
callBack.run();
}
if (exception != null) {
throw exception;
}
}
private MediaExtractor createExtractor() throws IOException {
MediaExtractor extractor;
extractor = new MediaExtractor();
extractor.setDataSource(mInputFile);
return extractor;
}
private MediaCodec createVideoDecoder(MediaFormat inputFormat, Surface surface) throws IOException {
MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat));
decoder.configure(inputFormat, surface, null, 0);
decoder.start();
return decoder;
}
private MediaCodec createVideoEncoder(MediaCodecInfo codecInfo, MediaFormat format,
AtomicReference<Surface> surfaceReference) throws IOException {
MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
surfaceReference.set(encoder.createInputSurface());
encoder.start();
return encoder;
}
private MediaCodec createAudioDecoder(MediaFormat inputFormat) throws IOException {
MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat));
decoder.configure(inputFormat, null, null, 0);
decoder.start();
return decoder;
}
private MediaCodec createAudioEncoder(MediaCodecInfo codecInfo, MediaFormat format) throws IOException {
MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
encoder.start();
return encoder;
}
private int getAndSelectVideoTrackIndex(MediaExtractor extractor) {
for (int index = 0; index < extractor.getTrackCount(); ++index) {
if (isVideoFormat(extractor.getTrackFormat(index))) {
extractor.selectTrack(index);
return index;
}
}
return -1;
}
private int getAndSelectAudioTrackIndex(MediaExtractor extractor) {
for (int index = 0; index < extractor.getTrackCount(); ++index) {
if (isAudioFormat(extractor.getTrackFormat(index))) {
extractor.selectTrack(index);
return index;
}
}
return -1;
}
private void changeResolution(MediaExtractor videoExtractor, MediaExtractor audioExtractor,
MediaCodec videoDecoder, MediaCodec videoEncoder,
MediaCodec audioDecoder, MediaCodec audioEncoder,
MediaMuxer muxer,
InputSurface inputSurface, OutputSurface outputSurface) {
ByteBuffer[] videoDecoderInputBuffers;
ByteBuffer[] videoDecoderOutputBuffers;
ByteBuffer[] videoEncoderOutputBuffers;
MediaCodec.BufferInfo videoDecoderOutputBufferInfo;
MediaCodec.BufferInfo videoEncoderOutputBufferInfo;
videoDecoderInputBuffers = videoDecoder.getInputBuffers();
videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
videoDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
videoEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] audioDecoderInputBuffers;
ByteBuffer[] audioDecoderOutputBuffers;
ByteBuffer[] audioEncoderInputBuffers;
ByteBuffer[] audioEncoderOutputBuffers;
MediaCodec.BufferInfo audioDecoderOutputBufferInfo;
MediaCodec.BufferInfo audioEncoderOutputBufferInfo;
audioDecoderInputBuffers = audioDecoder.getInputBuffers();
audioDecoderOutputBuffers = audioDecoder.getOutputBuffers();
audioEncoderInputBuffers = audioEncoder.getInputBuffers();
audioEncoderOutputBuffers = audioEncoder.getOutputBuffers();
audioDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
audioEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
MediaFormat decoderOutputVideoFormat = null;
MediaFormat decoderOutputAudioFormat = null;
MediaFormat encoderOutputVideoFormat = null;
MediaFormat encoderOutputAudioFormat = null;
int outputVideoTrack = -1;
int outputAudioTrack = -1;
boolean videoExtractorDone = false;
boolean videoDecoderDone = false;
boolean videoEncoderDone = false;
boolean audioExtractorDone = false;
boolean audioDecoderDone = false;
boolean audioEncoderDone = false;
int pendingAudioDecoderOutputBufferIndex = -1;
boolean muxing = false;
while ((!videoEncoderDone) || (!audioEncoderDone)) {
while (!videoExtractorDone
&& (encoderOutputVideoFormat == null || muxing)) {
int decoderInputBufferIndex = videoDecoder.dequeueInputBuffer(TIMEOUT_USEC);
if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
ByteBuffer decoderInputBuffer = videoDecoderInputBuffers[decoderInputBufferIndex];
int size = videoExtractor.readSampleData(decoderInputBuffer, 0);
long presentationTime = videoExtractor.getSampleTime();
if (size >= 0) {
videoDecoder.queueInputBuffer(
decoderInputBufferIndex,
0,
size,
presentationTime,
videoExtractor.getSampleFlags());
}
videoExtractorDone = !videoExtractor.advance();
if (videoExtractorDone) {
videoDecoder.queueInputBuffer(decoderInputBufferIndex,
0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
break;
}
while (!audioExtractorDone
&& (encoderOutputAudioFormat == null || muxing)) {
int decoderInputBufferIndex = audioDecoder.dequeueInputBuffer(TIMEOUT_USEC);
if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
ByteBuffer decoderInputBuffer = audioDecoderInputBuffers[decoderInputBufferIndex];
int size = audioExtractor.readSampleData(decoderInputBuffer, 0);
long presentationTime = audioExtractor.getSampleTime();
if (size >= 0) {
audioDecoder.queueInputBuffer(decoderInputBufferIndex, 0, size,
presentationTime, audioExtractor.getSampleFlags());
}
audioExtractorDone = !audioExtractor.advance();
if (audioExtractorDone) {
audioDecoder.queueInputBuffer(decoderInputBufferIndex, 0, 0,
0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
break;
}
while (!videoDecoderDone
&& (encoderOutputVideoFormat == null || muxing)) {
int decoderOutputBufferIndex =
videoDecoder.dequeueOutputBuffer(
videoDecoderOutputBufferInfo, TIMEOUT_USEC);
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
decoderOutputVideoFormat = videoDecoder.getOutputFormat();
break;
}
ByteBuffer decoderOutputBuffer =
videoDecoderOutputBuffers[decoderOutputBufferIndex];
if ((videoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
!= 0) {
videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
break;
}
boolean render = videoDecoderOutputBufferInfo.size != 0;
videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, render);
if (render) {
outputSurface.awaitNewImage();
outputSurface.drawImage();
inputSurface.setPresentationTime(
videoDecoderOutputBufferInfo.presentationTimeUs * 1000);
inputSurface.swapBuffers();
}
if ((videoDecoderOutputBufferInfo.flags
& MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
videoDecoderDone = true;
videoEncoder.signalEndOfInputStream();
}
break;
}
while (!audioDecoderDone && pendingAudioDecoderOutputBufferIndex == -1
&& (encoderOutputAudioFormat == null || muxing)) {
int decoderOutputBufferIndex =
audioDecoder.dequeueOutputBuffer(
audioDecoderOutputBufferInfo, TIMEOUT_USEC);
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
audioDecoderOutputBuffers = audioDecoder.getOutputBuffers();
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
decoderOutputAudioFormat = audioDecoder.getOutputFormat();
break;
}
ByteBuffer decoderOutputBuffer =
audioDecoderOutputBuffers[decoderOutputBufferIndex];
if ((audioDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
!= 0) {
audioDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
break;
}
pendingAudioDecoderOutputBufferIndex = decoderOutputBufferIndex;
break;
}
while (pendingAudioDecoderOutputBufferIndex != -1) {
int encoderInputBufferIndex = audioEncoder.dequeueInputBuffer(TIMEOUT_USEC);
ByteBuffer encoderInputBuffer = audioEncoderInputBuffers[encoderInputBufferIndex];
int size = audioDecoderOutputBufferInfo.size;
long presentationTime = audioDecoderOutputBufferInfo.presentationTimeUs;
if (size >= 0) {
ByteBuffer decoderOutputBuffer =
audioDecoderOutputBuffers[pendingAudioDecoderOutputBufferIndex]
.duplicate();
decoderOutputBuffer.position(audioDecoderOutputBufferInfo.offset);
decoderOutputBuffer.limit(audioDecoderOutputBufferInfo.offset + size);
encoderInputBuffer.position(0);
encoderInputBuffer.put(decoderOutputBuffer);
audioEncoder.queueInputBuffer(
encoderInputBufferIndex,
0,
size,
presentationTime,
audioDecoderOutputBufferInfo.flags);
}
audioDecoder.releaseOutputBuffer(pendingAudioDecoderOutputBufferIndex, false);
pendingAudioDecoderOutputBufferIndex = -1;
if ((audioDecoderOutputBufferInfo.flags
& MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
audioDecoderDone = true;
}
break;
}
while (!videoEncoderDone
&& (encoderOutputVideoFormat == null || muxing)) {
int encoderOutputBufferIndex = videoEncoder.dequeueOutputBuffer(
videoEncoderOutputBufferInfo, TIMEOUT_USEC);
if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
encoderOutputVideoFormat = videoEncoder.getOutputFormat();
break;
}
ByteBuffer encoderOutputBuffer =
videoEncoderOutputBuffers[encoderOutputBufferIndex];
if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
!= 0) {
videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
if (videoEncoderOutputBufferInfo.size != 0) {
muxer.writeSampleData(
outputVideoTrack, encoderOutputBuffer, videoEncoderOutputBufferInfo);
}
if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
!= 0) {
videoEncoderDone = true;
}
videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
while (!audioEncoderDone
&& (encoderOutputAudioFormat == null || muxing)) {
int encoderOutputBufferIndex = audioEncoder.dequeueOutputBuffer(
audioEncoderOutputBufferInfo, TIMEOUT_USEC);
if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
audioEncoderOutputBuffers = audioEncoder.getOutputBuffers();
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
encoderOutputAudioFormat = audioEncoder.getOutputFormat();
break;
}
ByteBuffer encoderOutputBuffer =
audioEncoderOutputBuffers[encoderOutputBufferIndex];
if ((audioEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
!= 0) {
audioEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
if (audioEncoderOutputBufferInfo.size != 0) {
muxer.writeSampleData(
outputAudioTrack, encoderOutputBuffer, audioEncoderOutputBufferInfo);
}
if ((audioEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
!= 0) {
audioEncoderDone = true;
}
audioEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
if (!muxing && (encoderOutputAudioFormat != null)
&& (encoderOutputVideoFormat != null)) {
outputVideoTrack = muxer.addTrack(encoderOutputVideoFormat);
outputAudioTrack = muxer.addTrack(encoderOutputAudioFormat);
muxer.start();
muxing = true;
}
}
}
private static boolean isVideoFormat(MediaFormat format) {
return getMimeTypeFor(format).startsWith("video/");
}
private static boolean isAudioFormat(MediaFormat format) {
return getMimeTypeFor(format).startsWith("audio/");
}
private static String getMimeTypeFor(MediaFormat format) {
return format.getString(MediaFormat.KEY_MIME);
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
}
......@@ -3,7 +3,11 @@ package jp.agentec.abook.abv.ui.home.view;
import jp.agentec.abook.abv.launcher.android.R;
import android.content.Context;
import android.graphics.Color;
import android.view.Gravity;
import android.widget.Button;
import android.widget.TextView;
/**
* パンくず履歴用のボタン
......@@ -16,10 +20,12 @@ public class BreadCrumbButton extends Button {
super(context);
}
public BreadCrumbButton(Context context, String name) {
super(context);
super(context, null, R.style.ButtonABookDark);
this.name = name;
setText(name);
//画面表示調整のため、最後に空白を追加する。
setText(name + " ");
setTextColor(Color.BLACK);
setGravity(Gravity.CENTER_VERTICAL);
setBackgroundResource(R.drawable.btn_breadcrumbs);
setPadding(10, 0, 40, 0);
}
......
......@@ -76,6 +76,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.concurrent.ConcurrentHashMap;
import jp.agentec.abook.abv.bl.acms.client.json.DownloadedContentInfoJSON;
......@@ -118,6 +119,7 @@ import jp.agentec.abook.abv.cl.util.ContentLogUtil;
import jp.agentec.abook.abv.cl.util.PreferenceUtil;
import jp.agentec.abook.abv.launcher.android.R;
import jp.agentec.abook.abv.launcher.android.R.id;
import jp.agentec.abook.abv.ui.common.activity.ABVAuthenticatedActivity;
import jp.agentec.abook.abv.ui.common.activity.ABVContentViewActivity;
import jp.agentec.abook.abv.ui.common.appinfo.AppColor;
import jp.agentec.abook.abv.ui.common.appinfo.AppDefType;
......@@ -135,6 +137,7 @@ import jp.agentec.abook.abv.ui.common.util.PatternStringUtil;
import jp.agentec.abook.abv.ui.common.view.ABVEditText;
import jp.agentec.abook.abv.ui.common.view.ABVPopupListWindow;
import jp.agentec.abook.abv.ui.common.vo.Size;
import jp.agentec.abook.abv.ui.home.activity.ChatWebViewActivity;
import jp.agentec.abook.abv.ui.home.helper.ABookCheckWebViewHelper;
import jp.agentec.abook.abv.ui.home.helper.ActivityHandlingHelper;
import jp.agentec.abook.abv.ui.Interface.MovePageInterface;
......@@ -332,6 +335,8 @@ public class ContentViewActivity extends ABVContentViewActivity {
public boolean mShowPinFlg = true;
// 移動・タップモードのフラグ
public boolean mMoveTaskFlg = false;
/**
* スクロールステータス
......@@ -347,6 +352,7 @@ public class ContentViewActivity extends ABVContentViewActivity {
Logger.i(TAG, "[onCreate]:contentId=" + getContentId());
super.onCreate(savedInstanceState);
// 全画面表示
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
......@@ -941,6 +947,13 @@ public class ContentViewActivity extends ABVContentViewActivity {
showMeetingExitDialog();
}
});
if (isCollaboration) {
pauseBtn.setVisibility(View.GONE);
exitMeetingBtn.setVisibility(View.GONE);
exitBtn.setVisibility(View.GONE);
promoteBtn.setVisibility(View.GONE);
}
}
private boolean addPageViewFinish;
......@@ -1390,6 +1403,15 @@ public class ContentViewActivity extends ABVContentViewActivity {
mBtnRemoteStart.setVisibility(View.GONE);
}
}
if (isCollaboration) {
mHistoryBtn.setVisibility(View.GONE);
indexBtn.setVisibility(View.GONE);
search.setVisibility(View.GONE);
marking.setVisibility(View.GONE);
bookmark.setVisibility(View.GONE);
subMenuBtn.setVisibility(View.GONE);
}
}
private void setMarkingViewVisibility(boolean visible){
......@@ -1823,6 +1845,7 @@ public class ContentViewActivity extends ABVContentViewActivity {
exitMeetingBtn.setVisibility(View.VISIBLE);
}
}
mToolBar.setVisibility(visability);
}
......@@ -5363,7 +5386,7 @@ public class ContentViewActivity extends ABVContentViewActivity {
zoomRelativeLayout.removeView(view);
}
}
List<OperationTaskDto> tasks = mTaskHotspotJSON.getPageTasks(i);
List<OperationTaskDto> tasks = mTaskHotspotJSON.getPageTasks(key);
operationTaskLayout.addAllOperationTaskIcon(zoomRelativeLayout, tasks);
}
}
......
......@@ -60,12 +60,12 @@ public class ParentWebViewActivity extends ABVContentViewActivity {
protected ImageButton promoteBtn;
protected ImageButton pauseBtn;
protected ImageButton mBtnRemoteStart;
protected Button historyListBtn;
protected ImageButton historyListBtn;
protected ImageButton btnWebClose;
protected Button closeButton;
protected Button btnLinkOriginalBack;
protected Button btnWebBack;
protected Button btnWebForward;
protected ImageButton closeButton;
protected ImageButton btnLinkOriginalBack;
protected ImageButton btnWebBack;
protected ImageButton btnWebForward;
private ProgressBar m_progress;
protected ContentDto mContentDto;
......@@ -77,18 +77,18 @@ public class ParentWebViewActivity extends ABVContentViewActivity {
taskListButton = (ImageButton) findViewById(R.id.btn_show_task_list);
// helpButton = (ImageButton) findViewById(R.id.btn_help);
btnWebClose = (ImageButton) findViewById(R.id.btnWebClose);
closeButton = (Button) findViewById(R.id.closeBtn);
historyListBtn = (Button) findViewById(R.id.btn_history_list);
closeButton = (ImageButton) findViewById(R.id.closeBtn);
historyListBtn = (ImageButton) findViewById(R.id.btn_history_list);
subMenuBtn = (ImageButton) findViewById(R.id.btn_sub_menu);
exitMeetingBtn = (ImageButton) findViewById(R.id.btn_exitMeeting);
mBtnRemoteStart = (ImageButton) findViewById(R.id.btn_remote_start);
pauseBtn = (ImageButton) findViewById(R.id.btn_remote_pause);
promoteBtn = (ImageButton) findViewById(R.id.btn_promote);
btnWebBack = (Button) findViewById(R.id.btnWebBack);
btnWebForward = (Button) findViewById(R.id.btnWebForward);
btnWebBack = (ImageButton) findViewById(R.id.btnWebBack);
btnWebForward = (ImageButton) findViewById(R.id.btnWebForward);
m_progress = (ProgressBar) findViewById(R.id.refresh_prog);
btnLinkOriginalBack = (Button) findViewById(R.id.btn_link_original_back);
btnLinkOriginalBack = (ImageButton) findViewById(R.id.btn_link_original_back);
if (!isLinkedContent) {
final Context context = this;
......
......@@ -18,6 +18,7 @@ import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;
......@@ -61,7 +62,7 @@ public class MemoManager {
memoDialog.setContentView(contentView);
final EditText memoText = (EditText) contentView.findViewById(R.id.memoText);
final Button btnDelete = (Button) contentView.findViewById(R.id.memo_delete);
final ImageButton btnDelete = (ImageButton) contentView.findViewById(R.id.memo_delete);
final Button btnSave = (Button) contentView.findViewById(R.id.memo_save);
final Button btnCancel = (Button) contentView.findViewById(R.id.memo_cancel);
final ImageView abookmark = (ImageView) contentView.findViewById(R.id.abookmark);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment