2017-03-31 01:58:05 +02:00
|
|
|
/*
|
2019-01-23 18:03:33 +01:00
|
|
|
* This is the source code of Telegram for Android v. 5.x.x.
|
2017-03-31 01:58:05 +02:00
|
|
|
* It is licensed under GNU GPL v. 2 or later.
|
|
|
|
* You should have received a copy of the license in this archive (see LICENSE).
|
|
|
|
*
|
2019-01-23 18:03:33 +01:00
|
|
|
* Copyright Nikolai Kudashov, 2013-2018.
|
2017-03-31 01:58:05 +02:00
|
|
|
*/
|
|
|
|
|
|
|
|
package org.telegram.ui.Components;
|
|
|
|
|
|
|
|
import android.animation.Animator;
|
|
|
|
import android.animation.AnimatorListenerAdapter;
|
|
|
|
import android.animation.AnimatorSet;
|
|
|
|
import android.animation.ObjectAnimator;
|
|
|
|
import android.annotation.TargetApi;
|
2017-07-08 18:32:04 +02:00
|
|
|
import android.app.Activity;
|
2017-03-31 01:58:05 +02:00
|
|
|
import android.content.Context;
|
2019-01-23 18:03:33 +01:00
|
|
|
import android.graphics.Bitmap;
|
|
|
|
import android.graphics.BitmapFactory;
|
2017-03-31 01:58:05 +02:00
|
|
|
import android.graphics.Canvas;
|
2017-07-08 18:32:04 +02:00
|
|
|
import android.graphics.ImageFormat;
|
2017-03-31 01:58:05 +02:00
|
|
|
import android.graphics.Outline;
|
|
|
|
import android.graphics.Paint;
|
|
|
|
import android.graphics.Path;
|
|
|
|
import android.graphics.PorterDuff;
|
|
|
|
import android.graphics.PorterDuffXfermode;
|
2017-07-08 18:32:04 +02:00
|
|
|
import android.graphics.RectF;
|
|
|
|
import android.graphics.SurfaceTexture;
|
2017-03-31 01:58:05 +02:00
|
|
|
import android.graphics.drawable.ColorDrawable;
|
2017-07-08 18:32:04 +02:00
|
|
|
import android.media.AudioFormat;
|
|
|
|
import android.media.AudioRecord;
|
|
|
|
import android.media.MediaCodec;
|
|
|
|
import android.media.MediaCodecInfo;
|
|
|
|
import android.media.MediaFormat;
|
|
|
|
import android.media.MediaRecorder;
|
|
|
|
import android.net.Uri;
|
|
|
|
import android.opengl.EGL14;
|
|
|
|
import android.opengl.EGLExt;
|
|
|
|
import android.opengl.GLES11Ext;
|
|
|
|
import android.opengl.GLES20;
|
|
|
|
import android.opengl.GLUtils;
|
2017-03-31 01:58:05 +02:00
|
|
|
import android.os.Build;
|
2017-07-08 18:32:04 +02:00
|
|
|
import android.os.Handler;
|
|
|
|
import android.os.Looper;
|
|
|
|
import android.os.Message;
|
2019-05-14 14:08:05 +02:00
|
|
|
import androidx.annotation.Keep;
|
2017-03-31 01:58:05 +02:00
|
|
|
import android.view.Gravity;
|
2018-07-30 04:07:02 +02:00
|
|
|
import android.view.HapticFeedbackConstants;
|
2017-07-08 18:32:04 +02:00
|
|
|
import android.view.MotionEvent;
|
|
|
|
import android.view.Surface;
|
|
|
|
import android.view.TextureView;
|
2017-03-31 01:58:05 +02:00
|
|
|
import android.view.View;
|
|
|
|
import android.view.ViewOutlineProvider;
|
2017-07-08 18:32:04 +02:00
|
|
|
import android.view.WindowManager;
|
2017-03-31 01:58:05 +02:00
|
|
|
import android.view.animation.DecelerateInterpolator;
|
|
|
|
import android.widget.FrameLayout;
|
2017-07-08 18:32:04 +02:00
|
|
|
import android.widget.ImageView;
|
2017-03-31 01:58:05 +02:00
|
|
|
|
2018-08-27 10:33:11 +02:00
|
|
|
import com.google.android.exoplayer2.ExoPlayer;
|
|
|
|
|
2017-03-31 01:58:05 +02:00
|
|
|
import org.telegram.messenger.AndroidUtilities;
|
2019-01-23 18:03:33 +01:00
|
|
|
import org.telegram.messenger.ApplicationLoader;
|
2018-07-30 04:07:02 +02:00
|
|
|
import org.telegram.messenger.BuildVars;
|
2017-07-08 18:32:04 +02:00
|
|
|
import org.telegram.messenger.DispatchQueue;
|
|
|
|
import org.telegram.messenger.FileLoader;
|
2017-03-31 01:58:05 +02:00
|
|
|
import org.telegram.messenger.FileLog;
|
2019-05-14 14:08:05 +02:00
|
|
|
import org.telegram.messenger.LocaleController;
|
2017-03-31 01:58:05 +02:00
|
|
|
import org.telegram.messenger.MediaController;
|
|
|
|
import org.telegram.messenger.NotificationCenter;
|
2017-07-08 18:32:04 +02:00
|
|
|
import org.telegram.messenger.R;
|
2018-07-30 04:07:02 +02:00
|
|
|
import org.telegram.messenger.SharedConfig;
|
2017-07-08 18:32:04 +02:00
|
|
|
import org.telegram.messenger.UserConfig;
|
2019-01-23 18:03:33 +01:00
|
|
|
import org.telegram.messenger.Utilities;
|
2017-03-31 01:58:05 +02:00
|
|
|
import org.telegram.messenger.VideoEditedInfo;
|
|
|
|
import org.telegram.messenger.camera.CameraController;
|
2017-07-08 18:32:04 +02:00
|
|
|
import org.telegram.messenger.camera.CameraInfo;
|
|
|
|
import org.telegram.messenger.camera.CameraSession;
|
|
|
|
import org.telegram.messenger.camera.Size;
|
|
|
|
import org.telegram.messenger.video.MP4Builder;
|
|
|
|
import org.telegram.messenger.video.Mp4Movie;
|
|
|
|
import org.telegram.tgnet.ConnectionsManager;
|
|
|
|
import org.telegram.tgnet.TLRPC;
|
|
|
|
import org.telegram.ui.ActionBar.Theme;
|
2017-03-31 01:58:05 +02:00
|
|
|
import org.telegram.ui.ChatActivity;
|
|
|
|
|
|
|
|
import java.io.File;
|
2019-01-23 18:03:33 +01:00
|
|
|
import java.io.FileOutputStream;
|
2017-07-08 18:32:04 +02:00
|
|
|
import java.lang.ref.WeakReference;
|
|
|
|
import java.nio.ByteBuffer;
|
|
|
|
import java.nio.ByteOrder;
|
|
|
|
import java.nio.FloatBuffer;
|
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.Timer;
|
|
|
|
import java.util.TimerTask;
|
|
|
|
import java.util.concurrent.ArrayBlockingQueue;
|
2018-07-30 04:07:02 +02:00
|
|
|
import java.util.concurrent.CountDownLatch;
|
2017-03-31 01:58:05 +02:00
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
import javax.microedition.khronos.egl.EGL10;
|
|
|
|
import javax.microedition.khronos.egl.EGLConfig;
|
|
|
|
import javax.microedition.khronos.egl.EGLContext;
|
|
|
|
import javax.microedition.khronos.egl.EGLDisplay;
|
|
|
|
import javax.microedition.khronos.egl.EGLSurface;
|
|
|
|
import javax.microedition.khronos.opengles.GL;
|
|
|
|
|
|
|
|
@TargetApi(18)
|
|
|
|
public class InstantCameraView extends FrameLayout implements NotificationCenter.NotificationCenterDelegate {
|
2017-03-31 01:58:05 +02:00
|
|
|
|
2018-07-30 04:07:02 +02:00
|
|
|
private int currentAccount = UserConfig.selectedAccount;
|
2017-03-31 01:58:05 +02:00
|
|
|
private FrameLayout cameraContainer;
|
|
|
|
private ChatActivity baseFragment;
|
2017-07-08 18:32:04 +02:00
|
|
|
private Paint paint;
|
|
|
|
private RectF rect;
|
|
|
|
private ImageView switchCameraButton;
|
|
|
|
private ImageView muteImageView;
|
|
|
|
private float progress;
|
|
|
|
private CameraInfo selectedCamera;
|
|
|
|
private boolean isFrontface = true;
|
|
|
|
private volatile boolean cameraReady;
|
|
|
|
private AnimatorSet muteAnimation;
|
|
|
|
private TLRPC.InputFile file;
|
|
|
|
private TLRPC.InputEncryptedFile encryptedFile;
|
|
|
|
private byte[] key;
|
|
|
|
private byte[] iv;
|
|
|
|
private long size;
|
2018-07-30 04:07:02 +02:00
|
|
|
private boolean isSecretChat;
|
2017-07-08 18:32:04 +02:00
|
|
|
private VideoEditedInfo videoEditedInfo;
|
|
|
|
private VideoPlayer videoPlayer;
|
2019-01-23 18:03:33 +01:00
|
|
|
private Bitmap lastBitmap;
|
2017-03-31 01:58:05 +02:00
|
|
|
|
|
|
|
private int[] position = new int[2];
|
2017-07-08 18:32:04 +02:00
|
|
|
private int cameraTexture[] = new int[1];
|
|
|
|
private int oldCameraTexture[] = new int[1];
|
|
|
|
private float cameraTextureAlpha = 1.0f;
|
2017-03-31 01:58:05 +02:00
|
|
|
|
|
|
|
private AnimatorSet animatorSet;
|
|
|
|
|
|
|
|
private boolean deviceHasGoodCamera;
|
|
|
|
private boolean requestingPermissions;
|
|
|
|
private File cameraFile;
|
|
|
|
private long recordStartTime;
|
|
|
|
private boolean recording;
|
2017-07-08 18:32:04 +02:00
|
|
|
private long recordedTime;
|
|
|
|
private boolean cancelled;
|
|
|
|
private long duration;
|
2017-03-31 01:58:05 +02:00
|
|
|
|
|
|
|
private Runnable timerRunnable = new Runnable() {
|
|
|
|
@Override
|
|
|
|
public void run() {
|
|
|
|
if (!recording) {
|
|
|
|
return;
|
|
|
|
}
|
2018-07-30 04:07:02 +02:00
|
|
|
NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.recordProgressChanged, duration = System.currentTimeMillis() - recordStartTime, 0.0);
|
2017-03-31 01:58:05 +02:00
|
|
|
AndroidUtilities.runOnUIThread(timerRunnable, 50);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
private CameraGLThread cameraThread;
|
|
|
|
private Size previewSize;
|
|
|
|
private Size pictureSize;
|
2018-07-30 04:07:02 +02:00
|
|
|
private Size aspectRatio = SharedConfig.roundCamera16to9 ? new Size(16, 9) : new Size(4, 3);
|
2017-07-08 18:32:04 +02:00
|
|
|
private TextureView textureView;
|
2019-01-23 18:03:33 +01:00
|
|
|
private BackupImageView textureOverlayView;
|
2017-07-08 18:32:04 +02:00
|
|
|
private CameraSession cameraSession;
|
|
|
|
|
|
|
|
private float[] mMVPMatrix = new float[16];
|
|
|
|
private float[] mSTMatrix = new float[16];
|
|
|
|
private float[] moldSTMatrix = new float[16];
|
|
|
|
private static final String VERTEX_SHADER =
|
|
|
|
"uniform mat4 uMVPMatrix;\n" +
|
|
|
|
"uniform mat4 uSTMatrix;\n" +
|
|
|
|
"attribute vec4 aPosition;\n" +
|
|
|
|
"attribute vec4 aTextureCoord;\n" +
|
|
|
|
"varying vec2 vTextureCoord;\n" +
|
|
|
|
"void main() {\n" +
|
|
|
|
" gl_Position = uMVPMatrix * aPosition;\n" +
|
|
|
|
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
|
|
|
|
"}\n";
|
|
|
|
|
|
|
|
private static final String FRAGMENT_SHADER =
|
|
|
|
"#extension GL_OES_EGL_image_external : require\n" +
|
|
|
|
"precision highp float;\n" +
|
|
|
|
"varying vec2 vTextureCoord;\n" +
|
|
|
|
"uniform float scaleX;\n" +
|
|
|
|
"uniform float scaleY;\n" +
|
|
|
|
"uniform float alpha;\n" +
|
|
|
|
"uniform samplerExternalOES sTexture;\n" +
|
|
|
|
"void main() {\n" +
|
|
|
|
" vec2 coord = vec2((vTextureCoord.x - 0.5) * scaleX, (vTextureCoord.y - 0.5) * scaleY);\n" +
|
|
|
|
" float coef = ceil(clamp(0.2601 - dot(coord, coord), 0.0, 1.0));\n" +
|
|
|
|
" vec3 color = texture2D(sTexture, vTextureCoord).rgb * coef + (1.0 - step(0.001, coef));\n" +
|
|
|
|
" gl_FragColor = vec4(color * alpha, alpha);\n" +
|
|
|
|
"}\n";
|
|
|
|
|
|
|
|
private static final String FRAGMENT_SCREEN_SHADER =
|
|
|
|
"#extension GL_OES_EGL_image_external : require\n" +
|
|
|
|
"precision lowp float;\n" +
|
|
|
|
"varying vec2 vTextureCoord;\n" +
|
|
|
|
"uniform samplerExternalOES sTexture;\n" +
|
|
|
|
"void main() {\n" +
|
|
|
|
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
|
|
|
|
"}\n";
|
|
|
|
|
|
|
|
private FloatBuffer vertexBuffer;
|
|
|
|
private FloatBuffer textureBuffer;
|
|
|
|
private float scaleX;
|
|
|
|
private float scaleY;
|
|
|
|
|
|
|
|
public InstantCameraView(Context context, ChatActivity parentFragment) {
|
2017-03-31 01:58:05 +02:00
|
|
|
super(context);
|
2019-01-23 18:03:33 +01:00
|
|
|
setOnTouchListener((v, event) -> {
|
|
|
|
if (event.getAction() == MotionEvent.ACTION_DOWN && baseFragment != null) {
|
|
|
|
if (videoPlayer != null) {
|
|
|
|
boolean mute = !videoPlayer.isMuted();
|
|
|
|
videoPlayer.setMute(mute);
|
|
|
|
if (muteAnimation != null) {
|
|
|
|
muteAnimation.cancel();
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
2019-01-23 18:03:33 +01:00
|
|
|
muteAnimation = new AnimatorSet();
|
|
|
|
muteAnimation.playTogether(
|
|
|
|
ObjectAnimator.ofFloat(muteImageView, "alpha", mute ? 1.0f : 0.0f),
|
|
|
|
ObjectAnimator.ofFloat(muteImageView, "scaleX", mute ? 1.0f : 0.5f),
|
|
|
|
ObjectAnimator.ofFloat(muteImageView, "scaleY", mute ? 1.0f : 0.5f));
|
|
|
|
muteAnimation.addListener(new AnimatorListenerAdapter() {
|
|
|
|
@Override
|
|
|
|
public void onAnimationEnd(Animator animation) {
|
|
|
|
if (animation.equals(muteAnimation)) {
|
|
|
|
muteAnimation = null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
muteAnimation.setDuration(180);
|
|
|
|
muteAnimation.setInterpolator(new DecelerateInterpolator());
|
|
|
|
muteAnimation.start();
|
|
|
|
} else {
|
|
|
|
baseFragment.checkRecordLocked();
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
}
|
2019-01-23 18:03:33 +01:00
|
|
|
return true;
|
2017-07-08 18:32:04 +02:00
|
|
|
});
|
|
|
|
setWillNotDraw(false);
|
|
|
|
setBackgroundColor(0xc0000000);
|
2017-03-31 01:58:05 +02:00
|
|
|
baseFragment = parentFragment;
|
2018-07-30 04:07:02 +02:00
|
|
|
isSecretChat = baseFragment.getCurrentEncryptedChat() != null;
|
2017-07-08 18:32:04 +02:00
|
|
|
paint = new Paint(Paint.ANTI_ALIAS_FLAG) {
|
|
|
|
@Override
|
|
|
|
public void setAlpha(int a) {
|
|
|
|
super.setAlpha(a);
|
|
|
|
invalidate();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
paint.setStyle(Paint.Style.STROKE);
|
|
|
|
paint.setStrokeCap(Paint.Cap.ROUND);
|
|
|
|
paint.setStrokeWidth(AndroidUtilities.dp(3));
|
|
|
|
paint.setColor(0xffffffff);
|
|
|
|
|
|
|
|
rect = new RectF();
|
2017-03-31 01:58:05 +02:00
|
|
|
|
|
|
|
if (Build.VERSION.SDK_INT >= 21) {
|
2017-07-08 18:32:04 +02:00
|
|
|
cameraContainer = new FrameLayout(context) {
|
|
|
|
@Override
|
|
|
|
public void setScaleX(float scaleX) {
|
|
|
|
super.setScaleX(scaleX);
|
|
|
|
InstantCameraView.this.invalidate();
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void setAlpha(float alpha) {
|
|
|
|
super.setAlpha(alpha);
|
|
|
|
InstantCameraView.this.invalidate();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
cameraContainer.setOutlineProvider(new ViewOutlineProvider() {
|
|
|
|
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
|
|
|
|
@Override
|
|
|
|
public void getOutline(View view, Outline outline) {
|
|
|
|
outline.setOval(0, 0, AndroidUtilities.roundMessageSize, AndroidUtilities.roundMessageSize);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
cameraContainer.setClipToOutline(true);
|
|
|
|
cameraContainer.setWillNotDraw(false);
|
2017-03-31 01:58:05 +02:00
|
|
|
} else {
|
|
|
|
final Path path = new Path();
|
|
|
|
final Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
|
|
|
|
paint.setColor(0xff000000);
|
|
|
|
paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR));
|
|
|
|
cameraContainer = new FrameLayout(context) {
|
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
@Override
|
|
|
|
public void setScaleX(float scaleX) {
|
|
|
|
super.setScaleX(scaleX);
|
|
|
|
InstantCameraView.this.invalidate();
|
|
|
|
}
|
|
|
|
|
2017-03-31 01:58:05 +02:00
|
|
|
@Override
|
|
|
|
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
|
|
|
|
super.onSizeChanged(w, h, oldw, oldh);
|
|
|
|
path.reset();
|
|
|
|
path.addCircle(w / 2, h / 2, w / 2, Path.Direction.CW);
|
|
|
|
path.toggleInverseFillType();
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
protected void dispatchDraw(Canvas canvas) {
|
2017-07-08 18:32:04 +02:00
|
|
|
try {
|
|
|
|
super.dispatchDraw(canvas);
|
|
|
|
canvas.drawPath(path, paint);
|
|
|
|
} catch (Exception ignore) {
|
|
|
|
|
|
|
|
}
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
|
|
|
};
|
2017-07-08 18:32:04 +02:00
|
|
|
cameraContainer.setWillNotDraw(false);
|
2017-03-31 01:58:05 +02:00
|
|
|
cameraContainer.setLayerType(View.LAYER_TYPE_HARDWARE, null);
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
|
|
|
|
addView(cameraContainer, new FrameLayout.LayoutParams(AndroidUtilities.roundMessageSize, AndroidUtilities.roundMessageSize, Gravity.CENTER));
|
|
|
|
|
|
|
|
switchCameraButton = new ImageView(context);
|
|
|
|
switchCameraButton.setScaleType(ImageView.ScaleType.CENTER);
|
2019-05-14 14:08:05 +02:00
|
|
|
switchCameraButton.setContentDescription(LocaleController.getString("AccDescrSwitchCamera", R.string.AccDescrSwitchCamera));
|
2017-07-08 18:32:04 +02:00
|
|
|
addView(switchCameraButton, LayoutHelper.createFrame(48, 48, Gravity.LEFT | Gravity.BOTTOM, 20, 0, 0, 14));
|
2019-01-23 18:03:33 +01:00
|
|
|
switchCameraButton.setOnClickListener(v -> {
|
|
|
|
if (!cameraReady || cameraSession == null || !cameraSession.isInitied() || cameraThread == null) {
|
|
|
|
return;
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
2019-01-23 18:03:33 +01:00
|
|
|
switchCamera();
|
|
|
|
ObjectAnimator animator = ObjectAnimator.ofFloat(switchCameraButton, "scaleX", 0.0f).setDuration(100);
|
|
|
|
animator.addListener(new AnimatorListenerAdapter() {
|
|
|
|
@Override
|
|
|
|
public void onAnimationEnd(Animator animator) {
|
|
|
|
switchCameraButton.setImageResource(isFrontface ? R.drawable.camera_revert1 : R.drawable.camera_revert2);
|
|
|
|
ObjectAnimator.ofFloat(switchCameraButton, "scaleX", 1.0f).setDuration(100).start();
|
|
|
|
}
|
|
|
|
});
|
|
|
|
animator.start();
|
2017-07-08 18:32:04 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
muteImageView = new ImageView(context);
|
|
|
|
muteImageView.setScaleType(ImageView.ScaleType.CENTER);
|
|
|
|
muteImageView.setImageResource(R.drawable.video_mute);
|
|
|
|
muteImageView.setAlpha(0.0f);
|
|
|
|
addView(muteImageView, LayoutHelper.createFrame(48, 48, Gravity.CENTER));
|
|
|
|
((LayoutParams) muteImageView.getLayoutParams()).topMargin = AndroidUtilities.roundMessageSize / 2 - AndroidUtilities.dp(24);
|
|
|
|
|
2019-01-23 18:03:33 +01:00
|
|
|
textureOverlayView = new BackupImageView(getContext());
|
|
|
|
textureOverlayView.setRoundRadius(AndroidUtilities.roundMessageSize / 2);
|
|
|
|
addView(textureOverlayView, new FrameLayout.LayoutParams(AndroidUtilities.roundMessageSize, AndroidUtilities.roundMessageSize, Gravity.CENTER));
|
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
setVisibility(INVISIBLE);
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
@Override
|
|
|
|
public boolean onInterceptTouchEvent(MotionEvent ev) {
|
|
|
|
getParent().requestDisallowInterceptTouchEvent(true);
|
|
|
|
return super.onInterceptTouchEvent(ev);
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
|
|
|
|
super.onSizeChanged(w, h, oldw, oldh);
|
|
|
|
if (getVisibility() != VISIBLE) {
|
|
|
|
cameraContainer.setTranslationY(getMeasuredHeight() / 2);
|
2019-01-23 18:03:33 +01:00
|
|
|
textureOverlayView.setTranslationY(getMeasuredHeight() / 2);
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
protected void onAttachedToWindow() {
|
|
|
|
super.onAttachedToWindow();
|
2018-07-30 04:07:02 +02:00
|
|
|
NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.recordProgressChanged);
|
|
|
|
NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.FileDidUpload);
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
protected void onDetachedFromWindow() {
|
|
|
|
super.onDetachedFromWindow();
|
2018-07-30 04:07:02 +02:00
|
|
|
NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.recordProgressChanged);
|
|
|
|
NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.FileDidUpload);
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
2018-07-30 04:07:02 +02:00
|
|
|
public void didReceivedNotification(int id, int account, Object... args) {
|
2017-07-08 18:32:04 +02:00
|
|
|
if (id == NotificationCenter.recordProgressChanged) {
|
|
|
|
long t = (Long) args[0];
|
|
|
|
progress = t / 60000.0f;
|
|
|
|
recordedTime = t;
|
|
|
|
invalidate();
|
|
|
|
} else if (id == NotificationCenter.FileDidUpload) {
|
|
|
|
final String location = (String) args[0];
|
|
|
|
if (cameraFile != null && cameraFile.getAbsolutePath().equals(location)) {
|
|
|
|
file = (TLRPC.InputFile) args[1];
|
|
|
|
encryptedFile = (TLRPC.InputEncryptedFile) args[2];
|
|
|
|
size = (Long) args[5];
|
|
|
|
if (encryptedFile != null) {
|
|
|
|
key = (byte[]) args[3];
|
|
|
|
iv = (byte[]) args[4];
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
public void destroy(boolean async, final Runnable beforeDestroyRunnable) {
|
|
|
|
if (cameraSession != null) {
|
|
|
|
cameraSession.destroy();
|
2018-07-30 04:07:02 +02:00
|
|
|
CameraController.getInstance().close(cameraSession, !async ? new CountDownLatch(1) : null, beforeDestroyRunnable);
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
protected void onDraw(Canvas canvas) {
|
|
|
|
float x = cameraContainer.getX();
|
|
|
|
float y = cameraContainer.getY();
|
|
|
|
rect.set(x - AndroidUtilities.dp(8), y - AndroidUtilities.dp(8), x + cameraContainer.getMeasuredWidth() + AndroidUtilities.dp(8), y + cameraContainer.getMeasuredHeight() + AndroidUtilities.dp(8));
|
|
|
|
if (progress != 0) {
|
|
|
|
canvas.drawArc(rect, -90, 360 * progress, false, paint);
|
|
|
|
}
|
|
|
|
if (Theme.chat_roundVideoShadow != null) {
|
|
|
|
int x1 = (int) x - AndroidUtilities.dp(3);
|
|
|
|
int y1 = (int) y - AndroidUtilities.dp(2);
|
|
|
|
canvas.save();
|
|
|
|
canvas.scale(cameraContainer.getScaleX(), cameraContainer.getScaleY(), x1 + AndroidUtilities.roundMessageSize / 2 + AndroidUtilities.dp(3), y1 + AndroidUtilities.roundMessageSize / 2 + AndroidUtilities.dp(3));
|
|
|
|
Theme.chat_roundVideoShadow.setAlpha((int) (cameraContainer.getAlpha() * 255));
|
|
|
|
Theme.chat_roundVideoShadow.setBounds(x1, y1, x1 + AndroidUtilities.roundMessageSize + AndroidUtilities.dp(6), y1 + AndroidUtilities.roundMessageSize + AndroidUtilities.dp(6));
|
|
|
|
Theme.chat_roundVideoShadow.draw(canvas);
|
|
|
|
canvas.restore();
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void setVisibility(int visibility) {
|
|
|
|
super.setVisibility(visibility);
|
|
|
|
setAlpha(0.0f);
|
2017-07-08 18:32:04 +02:00
|
|
|
switchCameraButton.setAlpha(0.0f);
|
2017-03-31 01:58:05 +02:00
|
|
|
cameraContainer.setAlpha(0.0f);
|
2019-01-23 18:03:33 +01:00
|
|
|
textureOverlayView.setAlpha(0.0f);
|
2017-07-08 18:32:04 +02:00
|
|
|
muteImageView.setAlpha(0.0f);
|
|
|
|
muteImageView.setScaleX(1.0f);
|
|
|
|
muteImageView.setScaleY(1.0f);
|
2017-03-31 01:58:05 +02:00
|
|
|
cameraContainer.setScaleX(0.1f);
|
|
|
|
cameraContainer.setScaleY(0.1f);
|
2019-01-23 18:03:33 +01:00
|
|
|
textureOverlayView.setScaleX(0.1f);
|
|
|
|
textureOverlayView.setScaleY(0.1f);
|
2017-03-31 01:58:05 +02:00
|
|
|
if (cameraContainer.getMeasuredWidth() != 0) {
|
|
|
|
cameraContainer.setPivotX(cameraContainer.getMeasuredWidth() / 2);
|
|
|
|
cameraContainer.setPivotY(cameraContainer.getMeasuredHeight() / 2);
|
2019-01-23 18:03:33 +01:00
|
|
|
textureOverlayView.setPivotX(textureOverlayView.getMeasuredWidth() / 2);
|
|
|
|
textureOverlayView.setPivotY(textureOverlayView.getMeasuredHeight() / 2);
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
try {
|
|
|
|
if (visibility == VISIBLE) {
|
|
|
|
((Activity) getContext()).getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
|
|
|
|
} else {
|
|
|
|
((Activity) getContext()).getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
|
|
|
|
}
|
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
public void showCamera() {
|
2017-07-08 18:32:04 +02:00
|
|
|
if (textureView != null) {
|
2017-03-31 01:58:05 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
switchCameraButton.setImageResource(R.drawable.camera_revert1);
|
2019-01-23 18:03:33 +01:00
|
|
|
textureOverlayView.setAlpha(1.0f);
|
|
|
|
if (lastBitmap == null) {
|
|
|
|
try {
|
|
|
|
File file = new File(ApplicationLoader.getFilesDirFixed(), "icthumb.jpg");
|
|
|
|
lastBitmap = BitmapFactory.decodeFile(file.getAbsolutePath());
|
|
|
|
} catch (Throwable ignore) {
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (lastBitmap != null) {
|
|
|
|
textureOverlayView.setImageBitmap(lastBitmap);
|
|
|
|
} else {
|
|
|
|
textureOverlayView.setImageResource(R.drawable.icplaceholder);
|
|
|
|
}
|
|
|
|
cameraReady = false;
|
2017-07-08 18:32:04 +02:00
|
|
|
isFrontface = true;
|
|
|
|
selectedCamera = null;
|
|
|
|
recordedTime = 0;
|
|
|
|
progress = 0;
|
|
|
|
cancelled = false;
|
|
|
|
file = null;
|
|
|
|
encryptedFile = null;
|
|
|
|
key = null;
|
|
|
|
iv = null;
|
2017-03-31 01:58:05 +02:00
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
if (!initCamera()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
MediaController.getInstance().pauseMessage(MediaController.getInstance().getPlayingMessageObject());
|
|
|
|
|
2018-07-30 04:07:02 +02:00
|
|
|
cameraFile = new File(FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE), SharedConfig.getLastLocalId() + ".mp4");
|
|
|
|
SharedConfig.saveConfig();
|
2017-07-08 18:32:04 +02:00
|
|
|
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("show round camera");
|
|
|
|
}
|
2017-03-31 01:58:05 +02:00
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
textureView = new TextureView(getContext());
|
|
|
|
textureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
|
2017-03-31 01:58:05 +02:00
|
|
|
@Override
|
2017-07-08 18:32:04 +02:00
|
|
|
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("camera surface available");
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
if (cameraThread == null && surface != null) {
|
|
|
|
if (cancelled) {
|
2017-03-31 01:58:05 +02:00
|
|
|
return;
|
|
|
|
}
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("start create thread");
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
cameraThread = new CameraGLThread(surface, width, height);
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, final int width, final int height) {
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
|
|
|
|
if (cameraThread != null) {
|
|
|
|
cameraThread.shutdown(0);
|
|
|
|
cameraThread = null;
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
if (cameraSession != null) {
|
|
|
|
CameraController.getInstance().close(cameraSession, null, null);
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
|
2017-03-31 01:58:05 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
});
|
2017-07-08 18:32:04 +02:00
|
|
|
cameraContainer.addView(textureView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT));
|
|
|
|
|
|
|
|
setVisibility(VISIBLE);
|
|
|
|
|
|
|
|
startAnimation(true);
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
public FrameLayout getCameraContainer() {
|
|
|
|
return cameraContainer;
|
|
|
|
}
|
|
|
|
|
|
|
|
public void startAnimation(boolean open) {
|
|
|
|
if (animatorSet != null) {
|
|
|
|
animatorSet.cancel();
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
PipRoundVideoView pipRoundVideoView = PipRoundVideoView.getInstance();
|
|
|
|
if (pipRoundVideoView != null) {
|
|
|
|
pipRoundVideoView.showTemporary(!open);
|
|
|
|
}
|
2017-03-31 01:58:05 +02:00
|
|
|
animatorSet = new AnimatorSet();
|
|
|
|
animatorSet.playTogether(
|
|
|
|
ObjectAnimator.ofFloat(this, "alpha", open ? 1.0f : 0.0f),
|
2017-07-08 18:32:04 +02:00
|
|
|
ObjectAnimator.ofFloat(switchCameraButton, "alpha", open ? 1.0f : 0.0f),
|
|
|
|
ObjectAnimator.ofFloat(muteImageView, "alpha", 0.0f),
|
|
|
|
ObjectAnimator.ofInt(paint, "alpha", open ? 255 : 0),
|
2017-03-31 01:58:05 +02:00
|
|
|
ObjectAnimator.ofFloat(cameraContainer, "alpha", open ? 1.0f : 0.0f),
|
|
|
|
ObjectAnimator.ofFloat(cameraContainer, "scaleX", open ? 1.0f : 0.1f),
|
|
|
|
ObjectAnimator.ofFloat(cameraContainer, "scaleY", open ? 1.0f : 0.1f),
|
2019-01-23 18:03:33 +01:00
|
|
|
ObjectAnimator.ofFloat(cameraContainer, "translationY", open ? getMeasuredHeight() / 2 : 0, open ? 0 : getMeasuredHeight() / 2),
|
|
|
|
ObjectAnimator.ofFloat(textureOverlayView, "alpha", open ? 1.0f : 0.0f),
|
|
|
|
ObjectAnimator.ofFloat(textureOverlayView, "scaleX", open ? 1.0f : 0.1f),
|
|
|
|
ObjectAnimator.ofFloat(textureOverlayView, "scaleY", open ? 1.0f : 0.1f),
|
|
|
|
ObjectAnimator.ofFloat(textureOverlayView, "translationY", open ? getMeasuredHeight() / 2 : 0, open ? 0 : getMeasuredHeight() / 2)
|
2017-03-31 01:58:05 +02:00
|
|
|
);
|
|
|
|
if (!open) {
|
|
|
|
animatorSet.addListener(new AnimatorListenerAdapter() {
|
|
|
|
@Override
|
|
|
|
public void onAnimationEnd(Animator animation) {
|
|
|
|
if (animation.equals(animatorSet)) {
|
|
|
|
hideCamera(true);
|
2017-07-08 18:32:04 +02:00
|
|
|
setVisibility(INVISIBLE);
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
animatorSet.setDuration(180);
|
|
|
|
animatorSet.setInterpolator(new DecelerateInterpolator());
|
|
|
|
animatorSet.start();
|
|
|
|
}
|
|
|
|
|
|
|
|
public Rect getCameraRect() {
|
|
|
|
cameraContainer.getLocationOnScreen(position);
|
|
|
|
return new Rect(position[0], position[1], cameraContainer.getWidth(), cameraContainer.getHeight());
|
|
|
|
}
|
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
public void changeVideoPreviewState(int state, float progress) {
|
|
|
|
if (videoPlayer == null) {
|
2017-03-31 01:58:05 +02:00
|
|
|
return;
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
if (state == 0) {
|
|
|
|
startProgressTimer();
|
|
|
|
videoPlayer.play();
|
|
|
|
} else if (state == 1) {
|
|
|
|
stopProgressTimer();
|
|
|
|
videoPlayer.pause();
|
|
|
|
} else if (state == 2) {
|
|
|
|
videoPlayer.seekTo((long) (progress * videoPlayer.getDuration()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public void send(int state) {
|
|
|
|
if (textureView == null) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
stopProgressTimer();
|
|
|
|
if (videoPlayer != null) {
|
2019-03-03 21:40:48 +01:00
|
|
|
videoPlayer.releasePlayer(true);
|
2017-07-08 18:32:04 +02:00
|
|
|
videoPlayer = null;
|
|
|
|
}
|
|
|
|
if (state == 4) {
|
|
|
|
if (videoEditedInfo.needConvert()) {
|
|
|
|
file = null;
|
|
|
|
encryptedFile = null;
|
|
|
|
key = null;
|
|
|
|
iv = null;
|
|
|
|
double totalDuration = videoEditedInfo.estimatedDuration;
|
|
|
|
long startTime = videoEditedInfo.startTime >= 0 ? videoEditedInfo.startTime : 0;
|
|
|
|
long endTime = videoEditedInfo.endTime >= 0 ? videoEditedInfo.endTime : videoEditedInfo.estimatedDuration;
|
|
|
|
videoEditedInfo.estimatedDuration = endTime - startTime;
|
2018-07-30 04:07:02 +02:00
|
|
|
videoEditedInfo.estimatedSize = Math.max(1, (long) (size * (videoEditedInfo.estimatedDuration / totalDuration)));
|
2017-07-08 18:32:04 +02:00
|
|
|
videoEditedInfo.bitrate = 400000;
|
|
|
|
if (videoEditedInfo.startTime > 0) {
|
|
|
|
videoEditedInfo.startTime *= 1000;
|
|
|
|
}
|
|
|
|
if (videoEditedInfo.endTime > 0) {
|
|
|
|
videoEditedInfo.endTime *= 1000;
|
|
|
|
}
|
2018-07-30 04:07:02 +02:00
|
|
|
FileLoader.getInstance(currentAccount).cancelUploadFile(cameraFile.getAbsolutePath(), false);
|
2017-07-08 18:32:04 +02:00
|
|
|
} else {
|
2018-07-30 04:07:02 +02:00
|
|
|
videoEditedInfo.estimatedSize = Math.max(1, size);
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
videoEditedInfo.file = file;
|
|
|
|
videoEditedInfo.encryptedFile = encryptedFile;
|
|
|
|
videoEditedInfo.key = key;
|
|
|
|
videoEditedInfo.iv = iv;
|
|
|
|
baseFragment.sendMedia(new MediaController.PhotoEntry(0, 0, 0, cameraFile.getAbsolutePath(), 0, true), videoEditedInfo);
|
|
|
|
} else {
|
|
|
|
cancelled = recordedTime < 800;
|
|
|
|
recording = false;
|
|
|
|
AndroidUtilities.cancelRunOnUIThread(timerRunnable);
|
|
|
|
if (cameraThread != null) {
|
2018-07-30 04:07:02 +02:00
|
|
|
NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.recordStopped, !cancelled && state == 3 ? 2 : 0);
|
2017-07-08 18:32:04 +02:00
|
|
|
int send;
|
|
|
|
if (cancelled) {
|
|
|
|
send = 0;
|
|
|
|
} else if (state == 3) {
|
|
|
|
send = 2;
|
|
|
|
} else {
|
|
|
|
send = 1;
|
|
|
|
}
|
2019-01-23 18:03:33 +01:00
|
|
|
saveLastCameraBitmap();
|
2017-07-08 18:32:04 +02:00
|
|
|
cameraThread.shutdown(send);
|
|
|
|
cameraThread = null;
|
|
|
|
}
|
|
|
|
if (cancelled) {
|
2019-01-23 18:03:33 +01:00
|
|
|
NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.audioRecordTooShort, true);
|
2017-07-08 18:32:04 +02:00
|
|
|
startAnimation(false);
|
|
|
|
}
|
|
|
|
}
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
|
|
|
|
2019-01-23 18:03:33 +01:00
|
|
|
private void saveLastCameraBitmap() {
|
|
|
|
Bitmap bitmap = textureView.getBitmap();
|
|
|
|
if (bitmap != null) {
|
|
|
|
lastBitmap = Bitmap.createScaledBitmap(textureView.getBitmap(), 80, 80, true);
|
|
|
|
if (lastBitmap != null) {
|
|
|
|
Utilities.blurBitmap(lastBitmap, 7, 1, lastBitmap.getWidth(), lastBitmap.getHeight(), lastBitmap.getRowBytes());
|
|
|
|
try {
|
|
|
|
File file = new File(ApplicationLoader.getFilesDirFixed(), "icthumb.jpg");
|
|
|
|
FileOutputStream stream = new FileOutputStream(file);
|
|
|
|
lastBitmap.compress(Bitmap.CompressFormat.JPEG, 87, stream);
|
|
|
|
} catch (Throwable ignore) {
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-31 01:58:05 +02:00
|
|
|
public void cancel() {
|
2017-07-08 18:32:04 +02:00
|
|
|
stopProgressTimer();
|
|
|
|
if (videoPlayer != null) {
|
2019-03-03 21:40:48 +01:00
|
|
|
videoPlayer.releasePlayer(true);
|
2017-07-08 18:32:04 +02:00
|
|
|
videoPlayer = null;
|
|
|
|
}
|
|
|
|
if (textureView == null) {
|
2017-03-31 01:58:05 +02:00
|
|
|
return;
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
cancelled = true;
|
2017-03-31 01:58:05 +02:00
|
|
|
recording = false;
|
|
|
|
AndroidUtilities.cancelRunOnUIThread(timerRunnable);
|
2018-07-30 04:07:02 +02:00
|
|
|
NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.recordStopped, 0);
|
2017-07-08 18:32:04 +02:00
|
|
|
if (cameraThread != null) {
|
2019-01-23 18:03:33 +01:00
|
|
|
saveLastCameraBitmap();
|
2017-07-08 18:32:04 +02:00
|
|
|
cameraThread.shutdown(0);
|
|
|
|
cameraThread = null;
|
|
|
|
}
|
|
|
|
if (cameraFile != null) {
|
|
|
|
cameraFile.delete();
|
|
|
|
cameraFile = null;
|
|
|
|
}
|
2017-03-31 01:58:05 +02:00
|
|
|
startAnimation(false);
|
|
|
|
}
|
|
|
|
|
2018-07-30 04:07:02 +02:00
|
|
|
@Keep
|
2017-03-31 01:58:05 +02:00
|
|
|
@Override
|
|
|
|
public void setAlpha(float alpha) {
|
|
|
|
ColorDrawable colorDrawable = (ColorDrawable) getBackground();
|
2017-07-08 18:32:04 +02:00
|
|
|
colorDrawable.setAlpha((int) (0xc0 * alpha));
|
|
|
|
invalidate();
|
|
|
|
}
|
|
|
|
|
|
|
|
public View getSwitchButtonView() {
|
|
|
|
return switchCameraButton;
|
|
|
|
}
|
|
|
|
|
|
|
|
public View getMuteImageView() {
|
|
|
|
return muteImageView;
|
|
|
|
}
|
|
|
|
|
|
|
|
public Paint getPaint() {
|
|
|
|
return paint;
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
public void hideCamera(boolean async) {
|
2017-07-08 18:32:04 +02:00
|
|
|
destroy(async, null);
|
|
|
|
cameraContainer.removeView(textureView);
|
|
|
|
cameraContainer.setTranslationX(0);
|
|
|
|
cameraContainer.setTranslationY(0);
|
2019-01-23 18:03:33 +01:00
|
|
|
textureOverlayView.setTranslationX(0);
|
|
|
|
textureOverlayView.setTranslationY(0);
|
2017-07-08 18:32:04 +02:00
|
|
|
textureView = null;
|
|
|
|
}
|
|
|
|
|
|
|
|
private void switchCamera() {
|
2019-01-23 18:03:33 +01:00
|
|
|
saveLastCameraBitmap();
|
|
|
|
if (lastBitmap != null) {
|
|
|
|
textureOverlayView.setImageBitmap(lastBitmap);
|
|
|
|
textureOverlayView.animate().setDuration(120).alpha(1.0f).setInterpolator(new DecelerateInterpolator()).start();
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
if (cameraSession != null) {
|
|
|
|
cameraSession.destroy();
|
|
|
|
CameraController.getInstance().close(cameraSession, null, null);
|
|
|
|
cameraSession = null;
|
|
|
|
}
|
|
|
|
isFrontface = !isFrontface;
|
|
|
|
initCamera();
|
|
|
|
cameraReady = false;
|
|
|
|
cameraThread.reinitForNewCamera();
|
|
|
|
}
|
|
|
|
|
|
|
|
private boolean initCamera() {
|
|
|
|
ArrayList<CameraInfo> cameraInfos = CameraController.getInstance().getCameras();
|
|
|
|
if (cameraInfos == null) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
CameraInfo notFrontface = null;
|
|
|
|
for (int a = 0; a < cameraInfos.size(); a++) {
|
|
|
|
CameraInfo cameraInfo = cameraInfos.get(a);
|
|
|
|
if (!cameraInfo.isFrontface()) {
|
|
|
|
notFrontface = cameraInfo;
|
|
|
|
}
|
|
|
|
if (isFrontface && cameraInfo.isFrontface() || !isFrontface && !cameraInfo.isFrontface()) {
|
|
|
|
selectedCamera = cameraInfo;
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
notFrontface = cameraInfo;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (selectedCamera == null) {
|
|
|
|
selectedCamera = notFrontface;
|
|
|
|
}
|
|
|
|
if (selectedCamera == null) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
ArrayList<Size> previewSizes = selectedCamera.getPreviewSizes();
|
|
|
|
ArrayList<Size> pictureSizes = selectedCamera.getPictureSizes();
|
|
|
|
previewSize = CameraController.chooseOptimalSize(previewSizes, 480, 270, aspectRatio);
|
|
|
|
pictureSize = CameraController.chooseOptimalSize(pictureSizes, 480, 270, aspectRatio);
|
|
|
|
if (previewSize.mWidth != pictureSize.mWidth) {
|
|
|
|
boolean found = false;
|
|
|
|
for (int a = previewSizes.size() - 1; a >= 0; a--) {
|
|
|
|
Size preview = previewSizes.get(a);
|
|
|
|
for (int b = pictureSizes.size() - 1; b >= 0; b--) {
|
|
|
|
Size picture = pictureSizes.get(b);
|
|
|
|
if (preview.mWidth >= pictureSize.mWidth && preview.mHeight >= pictureSize.mHeight && preview.mWidth == picture.mWidth && preview.mHeight == picture.mHeight) {
|
|
|
|
previewSize = preview;
|
|
|
|
pictureSize = picture;
|
|
|
|
found = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (found) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!found) {
|
|
|
|
for (int a = previewSizes.size() - 1; a >= 0; a--) {
|
|
|
|
Size preview = previewSizes.get(a);
|
|
|
|
for (int b = pictureSizes.size() - 1; b >= 0; b--) {
|
|
|
|
Size picture = pictureSizes.get(b);
|
|
|
|
if (preview.mWidth >= 240 && preview.mHeight >= 240 && preview.mWidth == picture.mWidth && preview.mHeight == picture.mHeight) {
|
|
|
|
previewSize = preview;
|
|
|
|
pictureSize = picture;
|
|
|
|
found = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (found) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("preview w = " + previewSize.mWidth + " h = " + previewSize.mHeight);
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
private void createCamera(final SurfaceTexture surfaceTexture) {
|
2019-01-23 18:03:33 +01:00
|
|
|
AndroidUtilities.runOnUIThread(() -> {
|
|
|
|
if (cameraThread == null) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("create camera session");
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
|
2019-01-23 18:03:33 +01:00
|
|
|
surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
|
|
|
|
cameraSession = new CameraSession(selectedCamera, previewSize, pictureSize, ImageFormat.JPEG);
|
|
|
|
cameraThread.setCurrentSession(cameraSession);
|
|
|
|
CameraController.getInstance().openRound(cameraSession, surfaceTexture, () -> {
|
|
|
|
if (cameraSession != null) {
|
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("camera initied");
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
2019-01-23 18:03:33 +01:00
|
|
|
cameraSession.setInitied();
|
|
|
|
}
|
|
|
|
}, () -> cameraThread.setCurrentSession(cameraSession));
|
2017-07-08 18:32:04 +02:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
private int loadShader(int type, String shaderCode) {
|
|
|
|
int shader = GLES20.glCreateShader(type);
|
|
|
|
GLES20.glShaderSource(shader, shaderCode);
|
|
|
|
GLES20.glCompileShader(shader);
|
|
|
|
int[] compileStatus = new int[1];
|
|
|
|
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
|
|
|
|
if (compileStatus[0] == 0) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e(GLES20.glGetShaderInfoLog(shader));
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
GLES20.glDeleteShader(shader);
|
|
|
|
shader = 0;
|
|
|
|
}
|
|
|
|
return shader;
|
|
|
|
}
|
|
|
|
|
|
|
|
private Timer progressTimer;
|
|
|
|
|
|
|
|
private void startProgressTimer() {
|
|
|
|
if (progressTimer != null) {
|
|
|
|
try {
|
|
|
|
progressTimer.cancel();
|
|
|
|
progressTimer = null;
|
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
progressTimer = new Timer();
|
|
|
|
progressTimer.schedule(new TimerTask() {
|
|
|
|
@Override
|
|
|
|
public void run() {
|
2019-01-23 18:03:33 +01:00
|
|
|
AndroidUtilities.runOnUIThread(() -> {
|
|
|
|
try {
|
|
|
|
if (videoPlayer != null && videoEditedInfo != null && videoEditedInfo.endTime > 0 && videoPlayer.getCurrentPosition() >= videoEditedInfo.endTime) {
|
|
|
|
videoPlayer.seekTo(videoEditedInfo.startTime > 0 ? videoEditedInfo.startTime : 0);
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
2019-01-23 18:03:33 +01:00
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}, 0, 17);
|
|
|
|
}
|
|
|
|
|
|
|
|
private void stopProgressTimer() {
|
|
|
|
if (progressTimer != null) {
|
|
|
|
try {
|
|
|
|
progressTimer.cancel();
|
|
|
|
progressTimer = null;
|
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public class CameraGLThread extends DispatchQueue {
|
|
|
|
|
|
|
|
private final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
|
|
|
|
private final int EGL_OPENGL_ES2_BIT = 4;
|
|
|
|
private SurfaceTexture surfaceTexture;
|
|
|
|
private EGL10 egl10;
|
|
|
|
private EGLDisplay eglDisplay;
|
|
|
|
private EGLConfig eglConfig;
|
|
|
|
private EGLContext eglContext;
|
|
|
|
private EGLSurface eglSurface;
|
|
|
|
private GL gl;
|
|
|
|
private boolean initied;
|
|
|
|
|
|
|
|
private CameraSession currentSession;
|
|
|
|
|
|
|
|
private SurfaceTexture cameraSurface;
|
|
|
|
|
|
|
|
private final int DO_RENDER_MESSAGE = 0;
|
|
|
|
private final int DO_SHUTDOWN_MESSAGE = 1;
|
|
|
|
private final int DO_REINIT_MESSAGE = 2;
|
|
|
|
private final int DO_SETSESSION_MESSAGE = 3;
|
|
|
|
|
|
|
|
private int drawProgram;
|
|
|
|
private int vertexMatrixHandle;
|
|
|
|
private int textureMatrixHandle;
|
|
|
|
private int positionHandle;
|
|
|
|
private int textureHandle;
|
|
|
|
|
|
|
|
private int rotationAngle;
|
|
|
|
|
|
|
|
private boolean recording;
|
|
|
|
|
|
|
|
private Integer cameraId = 0;
|
|
|
|
|
|
|
|
private VideoRecorder videoEncoder;
|
|
|
|
|
|
|
|
public CameraGLThread(SurfaceTexture surface, int surfaceWidth, int surfaceHeight) {
|
|
|
|
super("CameraGLThread");
|
|
|
|
surfaceTexture = surface;
|
|
|
|
|
|
|
|
int width = previewSize.getWidth();
|
|
|
|
int height = previewSize.getHeight();
|
|
|
|
|
2018-07-30 04:07:02 +02:00
|
|
|
float scale = surfaceWidth / (float) Math.min(width, height);
|
2017-07-08 18:32:04 +02:00
|
|
|
|
|
|
|
width *= scale;
|
|
|
|
height *= scale;
|
|
|
|
if (width > height) {
|
|
|
|
scaleX = 1.0f;
|
|
|
|
scaleY = width / (float) surfaceHeight;
|
|
|
|
} else {
|
|
|
|
scaleX = height / (float) surfaceWidth;
|
|
|
|
scaleY = 1.0f;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private boolean initGL() {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("start init gl");
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
egl10 = (EGL10) EGLContext.getEGL();
|
|
|
|
|
|
|
|
eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
|
|
|
|
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("eglGetDisplay failed " + GLUtils.getEGLErrorString(egl10.eglGetError()));
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
finish();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
int[] version = new int[2];
|
|
|
|
if (!egl10.eglInitialize(eglDisplay, version)) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("eglInitialize failed " + GLUtils.getEGLErrorString(egl10.eglGetError()));
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
finish();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
int[] configsCount = new int[1];
|
|
|
|
EGLConfig[] configs = new EGLConfig[1];
|
|
|
|
int[] configSpec = new int[] {
|
|
|
|
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
|
|
|
EGL10.EGL_RED_SIZE, 8,
|
|
|
|
EGL10.EGL_GREEN_SIZE, 8,
|
|
|
|
EGL10.EGL_BLUE_SIZE, 8,
|
|
|
|
EGL10.EGL_ALPHA_SIZE, 0,
|
|
|
|
EGL10.EGL_DEPTH_SIZE, 0,
|
|
|
|
EGL10.EGL_STENCIL_SIZE, 0,
|
|
|
|
EGL10.EGL_NONE
|
|
|
|
};
|
|
|
|
if (!egl10.eglChooseConfig(eglDisplay, configSpec, configs, 1, configsCount)) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("eglChooseConfig failed " + GLUtils.getEGLErrorString(egl10.eglGetError()));
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
finish();
|
|
|
|
return false;
|
|
|
|
} else if (configsCount[0] > 0) {
|
|
|
|
eglConfig = configs[0];
|
|
|
|
} else {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("eglConfig not initialized");
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
finish();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
int[] attrib_list = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
|
|
|
|
eglContext = egl10.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
|
|
|
|
if (eglContext == null) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("eglCreateContext failed " + GLUtils.getEGLErrorString(egl10.eglGetError()));
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
finish();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (surfaceTexture instanceof SurfaceTexture) {
|
|
|
|
eglSurface = egl10.eglCreateWindowSurface(eglDisplay, eglConfig, surfaceTexture, null);
|
|
|
|
} else {
|
|
|
|
finish();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (eglSurface == null || eglSurface == EGL10.EGL_NO_SURFACE) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("createWindowSurface failed " + GLUtils.getEGLErrorString(egl10.eglGetError()));
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
finish();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("eglMakeCurrent failed " + GLUtils.getEGLErrorString(egl10.eglGetError()));
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
finish();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
gl = eglContext.getGL();
|
|
|
|
|
|
|
|
float tX = 1.0f / scaleX / 2.0f;
|
|
|
|
float tY = 1.0f / scaleY / 2.0f;
|
|
|
|
float[] verticesData = {
|
|
|
|
-1.0f, -1.0f, 0,
|
|
|
|
1.0f, -1.0f, 0,
|
|
|
|
-1.0f, 1.0f, 0,
|
|
|
|
1.0f, 1.0f, 0
|
|
|
|
};
|
|
|
|
float[] texData = {
|
|
|
|
0.5f - tX, 0.5f - tY,
|
|
|
|
0.5f + tX, 0.5f - tY,
|
|
|
|
0.5f - tX, 0.5f + tY,
|
|
|
|
0.5f + tX, 0.5f + tY
|
|
|
|
};
|
|
|
|
|
|
|
|
videoEncoder = new VideoRecorder();
|
|
|
|
|
|
|
|
vertexBuffer = ByteBuffer.allocateDirect(verticesData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
|
|
|
|
vertexBuffer.put(verticesData).position(0);
|
|
|
|
|
|
|
|
textureBuffer = ByteBuffer.allocateDirect(texData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
|
|
|
|
textureBuffer.put(texData).position(0);
|
|
|
|
|
|
|
|
android.opengl.Matrix.setIdentityM(mSTMatrix, 0);
|
|
|
|
|
|
|
|
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
|
|
|
|
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SCREEN_SHADER);
|
|
|
|
if (vertexShader != 0 && fragmentShader != 0) {
|
|
|
|
drawProgram = GLES20.glCreateProgram();
|
|
|
|
GLES20.glAttachShader(drawProgram, vertexShader);
|
|
|
|
GLES20.glAttachShader(drawProgram, fragmentShader);
|
|
|
|
GLES20.glLinkProgram(drawProgram);
|
|
|
|
int[] linkStatus = new int[1];
|
|
|
|
GLES20.glGetProgramiv(drawProgram, GLES20.GL_LINK_STATUS, linkStatus, 0);
|
|
|
|
if (linkStatus[0] == 0) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("failed link shader");
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
GLES20.glDeleteProgram(drawProgram);
|
|
|
|
drawProgram = 0;
|
|
|
|
} else {
|
|
|
|
positionHandle = GLES20.glGetAttribLocation(drawProgram, "aPosition");
|
|
|
|
textureHandle = GLES20.glGetAttribLocation(drawProgram, "aTextureCoord");
|
|
|
|
vertexMatrixHandle = GLES20.glGetUniformLocation(drawProgram, "uMVPMatrix");
|
|
|
|
textureMatrixHandle = GLES20.glGetUniformLocation(drawProgram, "uSTMatrix");
|
|
|
|
}
|
|
|
|
} else {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("failed creating shader");
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
finish();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
GLES20.glGenTextures(1, cameraTexture, 0);
|
|
|
|
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexture[0]);
|
|
|
|
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
|
|
|
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
|
|
|
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
|
|
|
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
|
|
|
|
|
|
|
android.opengl.Matrix.setIdentityM(mMVPMatrix, 0);
|
|
|
|
|
|
|
|
cameraSurface = new SurfaceTexture(cameraTexture[0]);
|
2019-01-23 18:03:33 +01:00
|
|
|
cameraSurface.setOnFrameAvailableListener(surfaceTexture -> requestRender());
|
2017-07-08 18:32:04 +02:00
|
|
|
createCamera(cameraSurface);
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("gl initied");
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
public void reinitForNewCamera() {
|
|
|
|
Handler handler = getHandler();
|
|
|
|
if (handler != null) {
|
|
|
|
sendMessage(handler.obtainMessage(DO_REINIT_MESSAGE), 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public void finish() {
|
|
|
|
if (eglSurface != null) {
|
|
|
|
egl10.eglMakeCurrent(eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
|
|
|
|
egl10.eglDestroySurface(eglDisplay, eglSurface);
|
|
|
|
eglSurface = null;
|
|
|
|
}
|
|
|
|
if (eglContext != null) {
|
|
|
|
egl10.eglDestroyContext(eglDisplay, eglContext);
|
|
|
|
eglContext = null;
|
|
|
|
}
|
|
|
|
if (eglDisplay != null) {
|
|
|
|
egl10.eglTerminate(eglDisplay);
|
|
|
|
eglDisplay = null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public void setCurrentSession(CameraSession session) {
|
|
|
|
Handler handler = getHandler();
|
|
|
|
if (handler != null) {
|
|
|
|
sendMessage(handler.obtainMessage(DO_SETSESSION_MESSAGE, session), 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private void onDraw(Integer cameraId) {
|
|
|
|
if (!initied) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!eglContext.equals(egl10.eglGetCurrentContext()) || !eglSurface.equals(egl10.eglGetCurrentSurface(EGL10.EGL_DRAW))) {
|
|
|
|
if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("eglMakeCurrent failed " + GLUtils.getEGLErrorString(egl10.eglGetError()));
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
cameraSurface.updateTexImage();
|
|
|
|
|
|
|
|
if (!recording) {
|
2018-07-30 04:07:02 +02:00
|
|
|
videoEncoder.startRecording(cameraFile, EGL14.eglGetCurrentContext());
|
2017-07-08 18:32:04 +02:00
|
|
|
recording = true;
|
|
|
|
int orientation = currentSession.getCurrentOrientation();
|
|
|
|
if (orientation == 90 || orientation == 270) {
|
|
|
|
float temp = scaleX;
|
|
|
|
scaleX = scaleY;
|
|
|
|
scaleY = temp;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-08 18:35:59 +01:00
|
|
|
videoEncoder.frameAvailable(cameraSurface, cameraId, System.nanoTime());
|
2017-07-08 18:32:04 +02:00
|
|
|
|
|
|
|
cameraSurface.getTransformMatrix(mSTMatrix);
|
|
|
|
|
|
|
|
GLES20.glUseProgram(drawProgram);
|
|
|
|
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
|
|
|
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexture[0]);
|
|
|
|
|
|
|
|
GLES20.glVertexAttribPointer(positionHandle, 3, GLES20.GL_FLOAT, false, 12, vertexBuffer);
|
|
|
|
GLES20.glEnableVertexAttribArray(positionHandle);
|
|
|
|
|
|
|
|
GLES20.glVertexAttribPointer(textureHandle, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
|
|
|
|
GLES20.glEnableVertexAttribArray(textureHandle);
|
|
|
|
|
|
|
|
GLES20.glUniformMatrix4fv(textureMatrixHandle, 1, false, mSTMatrix, 0);
|
|
|
|
GLES20.glUniformMatrix4fv(vertexMatrixHandle, 1, false, mMVPMatrix, 0);
|
|
|
|
|
|
|
|
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
|
|
|
|
|
|
|
GLES20.glDisableVertexAttribArray(positionHandle);
|
|
|
|
GLES20.glDisableVertexAttribArray(textureHandle);
|
|
|
|
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
|
|
|
|
GLES20.glUseProgram(0);
|
|
|
|
|
|
|
|
egl10.eglSwapBuffers(eglDisplay, eglSurface);
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void run() {
|
|
|
|
initied = initGL();
|
|
|
|
super.run();
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void handleMessage(Message inputMessage) {
|
|
|
|
int what = inputMessage.what;
|
|
|
|
|
|
|
|
switch (what) {
|
|
|
|
case DO_RENDER_MESSAGE:
|
|
|
|
onDraw((Integer) inputMessage.obj);
|
|
|
|
break;
|
|
|
|
case DO_SHUTDOWN_MESSAGE:
|
|
|
|
finish();
|
|
|
|
if (recording) {
|
|
|
|
videoEncoder.stopRecording(inputMessage.arg1);
|
|
|
|
}
|
|
|
|
Looper looper = Looper.myLooper();
|
|
|
|
if (looper != null) {
|
|
|
|
looper.quit();
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
case DO_REINIT_MESSAGE: {
|
|
|
|
if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("eglMakeCurrent failed " + GLUtils.getEGLErrorString(egl10.eglGetError()));
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (cameraSurface != null) {
|
|
|
|
cameraSurface.getTransformMatrix(moldSTMatrix);
|
|
|
|
cameraSurface.setOnFrameAvailableListener(null);
|
|
|
|
cameraSurface.release();
|
|
|
|
oldCameraTexture[0] = cameraTexture[0];
|
|
|
|
cameraTextureAlpha = 0.0f;
|
|
|
|
cameraTexture[0] = 0;
|
|
|
|
}
|
|
|
|
cameraId++;
|
|
|
|
cameraReady = false;
|
|
|
|
|
|
|
|
GLES20.glGenTextures(1, cameraTexture, 0);
|
|
|
|
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexture[0]);
|
|
|
|
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
|
|
|
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
|
|
|
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
|
|
|
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
|
|
|
|
|
|
|
cameraSurface = new SurfaceTexture(cameraTexture[0]);
|
2019-01-23 18:03:33 +01:00
|
|
|
cameraSurface.setOnFrameAvailableListener(surfaceTexture -> requestRender());
|
2017-07-08 18:32:04 +02:00
|
|
|
createCamera(cameraSurface);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case DO_SETSESSION_MESSAGE: {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("set gl rednderer session");
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
CameraSession newSession = (CameraSession) inputMessage.obj;
|
|
|
|
if (currentSession == newSession) {
|
|
|
|
rotationAngle = currentSession.getWorldAngle();
|
|
|
|
android.opengl.Matrix.setIdentityM(mMVPMatrix, 0);
|
|
|
|
if (rotationAngle != 0) {
|
|
|
|
android.opengl.Matrix.rotateM(mMVPMatrix, 0, rotationAngle, 0, 0, 1);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
currentSession = newSession;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public void shutdown(int send) {
|
|
|
|
Handler handler = getHandler();
|
|
|
|
if (handler != null) {
|
|
|
|
sendMessage(handler.obtainMessage(DO_SHUTDOWN_MESSAGE, send, 0), 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public void requestRender() {
|
|
|
|
Handler handler = getHandler();
|
|
|
|
if (handler != null) {
|
|
|
|
sendMessage(handler.obtainMessage(DO_RENDER_MESSAGE, cameraId), 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private static final int MSG_START_RECORDING = 0;
|
|
|
|
private static final int MSG_STOP_RECORDING = 1;
|
|
|
|
private static final int MSG_VIDEOFRAME_AVAILABLE = 2;
|
|
|
|
private static final int MSG_AUDIOFRAME_AVAILABLE = 3;
|
|
|
|
|
|
|
|
private static class EncoderHandler extends Handler {
|
|
|
|
private WeakReference<VideoRecorder> mWeakEncoder;
|
|
|
|
|
|
|
|
public EncoderHandler(VideoRecorder encoder) {
|
|
|
|
mWeakEncoder = new WeakReference<>(encoder);
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void handleMessage(Message inputMessage) {
|
|
|
|
int what = inputMessage.what;
|
|
|
|
Object obj = inputMessage.obj;
|
|
|
|
|
|
|
|
VideoRecorder encoder = mWeakEncoder.get();
|
|
|
|
if (encoder == null) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
switch (what) {
|
|
|
|
case MSG_START_RECORDING: {
|
|
|
|
try {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("start encoder");
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
encoder.prepareEncoder();
|
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
encoder.handleStopRecording(0);
|
|
|
|
Looper.myLooper().quit();
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case MSG_STOP_RECORDING: {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("stop encoder");
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
encoder.handleStopRecording(inputMessage.arg1);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case MSG_VIDEOFRAME_AVAILABLE: {
|
|
|
|
long timestamp = (((long) inputMessage.arg1) << 32) | (((long) inputMessage.arg2) & 0xffffffffL);
|
|
|
|
Integer cameraId = (Integer) inputMessage.obj;
|
|
|
|
encoder.handleVideoFrameAvailable(timestamp, cameraId);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case MSG_AUDIOFRAME_AVAILABLE: {
|
|
|
|
encoder.handleAudioFrameAvailable((AudioBufferInfo) inputMessage.obj);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public void exit() {
|
|
|
|
Looper.myLooper().quit();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private class AudioBufferInfo {
|
|
|
|
byte[] buffer = new byte[2048 * 10];
|
|
|
|
long[] offset = new long[10];
|
|
|
|
int[] read = new int[10];
|
|
|
|
int results;
|
|
|
|
int lastWroteBuffer;
|
|
|
|
boolean last;
|
|
|
|
}
|
|
|
|
|
|
|
|
private class VideoRecorder implements Runnable {
|
|
|
|
|
|
|
|
private static final String VIDEO_MIME_TYPE = "video/avc";
|
|
|
|
private static final String AUDIO_MIME_TYPE = "audio/mp4a-latm";
|
|
|
|
private static final int FRAME_RATE = 30;
|
|
|
|
private static final int IFRAME_INTERVAL = 1;
|
|
|
|
|
|
|
|
private File videoFile;
|
|
|
|
private int videoWidth;
|
|
|
|
private int videoHeight;
|
|
|
|
private int videoBitrate;
|
|
|
|
private boolean videoConvertFirstWrite = true;
|
|
|
|
private boolean blendEnabled;
|
|
|
|
|
|
|
|
private Surface surface;
|
|
|
|
private android.opengl.EGLDisplay eglDisplay = EGL14.EGL_NO_DISPLAY;
|
|
|
|
private android.opengl.EGLContext eglContext = EGL14.EGL_NO_CONTEXT;
|
|
|
|
private android.opengl.EGLContext sharedEglContext;
|
|
|
|
private android.opengl.EGLConfig eglConfig;
|
|
|
|
private android.opengl.EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
|
|
|
|
|
|
|
|
private MediaCodec videoEncoder;
|
|
|
|
private MediaCodec audioEncoder;
|
|
|
|
|
|
|
|
private MediaCodec.BufferInfo videoBufferInfo;
|
|
|
|
private MediaCodec.BufferInfo audioBufferInfo;
|
|
|
|
private MP4Builder mediaMuxer;
|
|
|
|
private ArrayList<AudioBufferInfo> buffersToWrite = new ArrayList<>();
|
|
|
|
private int videoTrackIndex = -5;
|
|
|
|
private int audioTrackIndex = -5;
|
|
|
|
|
|
|
|
private long lastCommitedFrameTime;
|
|
|
|
private long audioStartTime = -1;
|
|
|
|
|
|
|
|
private long currentTimestamp = 0;
|
|
|
|
private long lastTimestamp = -1;
|
|
|
|
|
|
|
|
private volatile EncoderHandler handler;
|
|
|
|
|
|
|
|
private final Object sync = new Object();
|
|
|
|
private boolean ready;
|
|
|
|
private volatile boolean running;
|
|
|
|
private volatile int sendWhenDone;
|
|
|
|
private long skippedTime;
|
|
|
|
private boolean skippedFirst;
|
|
|
|
|
2018-07-30 04:07:02 +02:00
|
|
|
private long desyncTime;
|
|
|
|
private long videoFirst = -1;
|
|
|
|
private long videoLast;
|
|
|
|
private long audioFirst = -1;
|
|
|
|
private boolean audioStopedByTime;
|
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
private int drawProgram;
|
|
|
|
private int vertexMatrixHandle;
|
|
|
|
private int textureMatrixHandle;
|
|
|
|
private int positionHandle;
|
|
|
|
private int textureHandle;
|
|
|
|
private int scaleXHandle;
|
|
|
|
private int scaleYHandle;
|
|
|
|
private int alphaHandle;
|
2017-12-08 18:35:59 +01:00
|
|
|
private int zeroTimeStamps;
|
2017-07-08 18:32:04 +02:00
|
|
|
|
|
|
|
private Integer lastCameraId = 0;
|
|
|
|
|
|
|
|
private AudioRecord audioRecorder;
|
|
|
|
|
|
|
|
private ArrayBlockingQueue<AudioBufferInfo> buffers = new ArrayBlockingQueue<>(10);
|
|
|
|
|
|
|
|
private Runnable recorderRunnable = new Runnable() {
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void run() {
|
2018-07-30 04:07:02 +02:00
|
|
|
long audioPresentationTimeUs = -1;
|
2017-07-08 18:32:04 +02:00
|
|
|
int readResult;
|
|
|
|
boolean done = false;
|
|
|
|
while (!done) {
|
|
|
|
if (!running && audioRecorder.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) {
|
|
|
|
try {
|
|
|
|
audioRecorder.stop();
|
|
|
|
} catch (Exception e) {
|
|
|
|
done = true;
|
|
|
|
}
|
|
|
|
if (sendWhenDone == 0) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
AudioBufferInfo buffer;
|
|
|
|
if (buffers.isEmpty()) {
|
|
|
|
buffer = new AudioBufferInfo();
|
|
|
|
} else {
|
|
|
|
buffer = buffers.poll();
|
|
|
|
}
|
|
|
|
buffer.lastWroteBuffer = 0;
|
|
|
|
buffer.results = 10;
|
|
|
|
for (int a = 0; a < 10; a++) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (audioPresentationTimeUs == -1) {
|
|
|
|
audioPresentationTimeUs = System.nanoTime() / 1000;
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
readResult = audioRecorder.read(buffer.buffer, a * 2048, 2048);
|
|
|
|
if (readResult <= 0) {
|
|
|
|
buffer.results = a;
|
|
|
|
if (!running) {
|
|
|
|
buffer.last = true;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2018-07-30 04:07:02 +02:00
|
|
|
buffer.offset[a] = audioPresentationTimeUs;
|
2017-07-08 18:32:04 +02:00
|
|
|
buffer.read[a] = readResult;
|
2018-07-30 04:07:02 +02:00
|
|
|
int bufferDurationUs = 1000000 * readResult / 44100 / 2;
|
|
|
|
audioPresentationTimeUs += bufferDurationUs;
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
if (buffer.results >= 0 || buffer.last) {
|
|
|
|
if (!running && buffer.results < 10) {
|
|
|
|
done = true;
|
|
|
|
}
|
|
|
|
handler.sendMessage(handler.obtainMessage(MSG_AUDIOFRAME_AVAILABLE, buffer));
|
|
|
|
} else {
|
|
|
|
if (!running) {
|
|
|
|
done = true;
|
|
|
|
} else {
|
|
|
|
try {
|
|
|
|
buffers.put(buffer);
|
|
|
|
} catch (Exception ignore) {
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
try {
|
|
|
|
audioRecorder.release();
|
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
|
|
|
handler.sendMessage(handler.obtainMessage(MSG_STOP_RECORDING, sendWhenDone, 0));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2018-07-30 04:07:02 +02:00
|
|
|
public void startRecording(File outputFile, android.opengl.EGLContext sharedContext) {
|
|
|
|
int resolution;
|
|
|
|
int bitrate;
|
|
|
|
String model = Build.DEVICE;
|
|
|
|
if (model == null) {
|
|
|
|
model = "";
|
|
|
|
}
|
|
|
|
if (model.startsWith("zeroflte") || model.startsWith("zenlte")) {
|
|
|
|
resolution = 320;
|
|
|
|
bitrate = 600000;
|
|
|
|
} else {
|
|
|
|
resolution = 240;
|
|
|
|
bitrate = 400000;
|
|
|
|
}
|
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
videoFile = outputFile;
|
2018-07-30 04:07:02 +02:00
|
|
|
videoWidth = resolution;
|
|
|
|
videoHeight = resolution;
|
|
|
|
videoBitrate = bitrate;
|
2017-07-08 18:32:04 +02:00
|
|
|
sharedEglContext = sharedContext;
|
|
|
|
|
|
|
|
synchronized (sync) {
|
|
|
|
if (running) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
running = true;
|
2018-07-30 04:07:02 +02:00
|
|
|
Thread thread = new Thread(this, "TextureMovieEncoder");
|
|
|
|
thread.setPriority(Thread.MAX_PRIORITY);
|
|
|
|
thread.start();
|
2017-07-08 18:32:04 +02:00
|
|
|
while (!ready) {
|
|
|
|
try {
|
|
|
|
sync.wait();
|
|
|
|
} catch (InterruptedException ie) {
|
|
|
|
// ignore
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
handler.sendMessage(handler.obtainMessage(MSG_START_RECORDING));
|
|
|
|
}
|
|
|
|
|
|
|
|
public void stopRecording(int send) {
|
|
|
|
handler.sendMessage(handler.obtainMessage(MSG_STOP_RECORDING, send, 0));
|
|
|
|
}
|
|
|
|
|
2017-12-08 18:35:59 +01:00
|
|
|
public void frameAvailable(SurfaceTexture st, Integer cameraId, long timestampInternal) {
|
2017-07-08 18:32:04 +02:00
|
|
|
synchronized (sync) {
|
|
|
|
if (!ready) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
long timestamp = st.getTimestamp();
|
|
|
|
if (timestamp == 0) {
|
2017-12-08 18:35:59 +01:00
|
|
|
zeroTimeStamps++;
|
|
|
|
if (zeroTimeStamps > 1) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("fix timestamp enabled");
|
|
|
|
}
|
2017-12-08 18:35:59 +01:00
|
|
|
timestamp = timestampInternal;
|
|
|
|
} else {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
zeroTimeStamps = 0;
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
handler.sendMessage(handler.obtainMessage(MSG_VIDEOFRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, cameraId));
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void run() {
|
|
|
|
Looper.prepare();
|
|
|
|
synchronized (sync) {
|
|
|
|
handler = new EncoderHandler(this);
|
|
|
|
ready = true;
|
|
|
|
sync.notify();
|
|
|
|
}
|
|
|
|
Looper.loop();
|
|
|
|
|
|
|
|
synchronized (sync) {
|
|
|
|
ready = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private void handleAudioFrameAvailable(AudioBufferInfo input) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (audioStopedByTime) {
|
|
|
|
return;
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
buffersToWrite.add(input);
|
2018-07-30 04:07:02 +02:00
|
|
|
if (audioFirst == -1) {
|
|
|
|
if (videoFirst == -1) {
|
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("video record not yet started");
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
while (true) {
|
|
|
|
boolean ok = false;
|
|
|
|
for (int a = 0; a < input.results; a++) {
|
2019-01-23 18:03:33 +01:00
|
|
|
if (a == 0 && Math.abs(videoFirst - input.offset[a]) > 10000000L) {
|
2018-07-30 04:07:02 +02:00
|
|
|
desyncTime = videoFirst - input.offset[a];
|
|
|
|
audioFirst = input.offset[a];
|
|
|
|
ok = true;
|
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("detected desync between audio and video " + desyncTime);
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (input.offset[a] >= videoFirst) {
|
|
|
|
input.lastWroteBuffer = a;
|
|
|
|
audioFirst = input.offset[a];
|
|
|
|
ok = true;
|
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("found first audio frame at " + a + " timestamp = " + input.offset[a]);
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("ignore first audio frame at " + a + " timestamp = " + input.offset[a]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!ok) {
|
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("first audio frame not found, removing buffers " + input.results);
|
|
|
|
}
|
|
|
|
buffersToWrite.remove(input);
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (!buffersToWrite.isEmpty()) {
|
|
|
|
input = buffersToWrite.get(0);
|
|
|
|
} else {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (audioStartTime == -1) {
|
|
|
|
audioStartTime = input.offset[input.lastWroteBuffer];
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
if (buffersToWrite.size() > 1) {
|
|
|
|
input = buffersToWrite.get(0);
|
|
|
|
}
|
|
|
|
try {
|
|
|
|
drainEncoder(false);
|
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
|
|
|
try {
|
|
|
|
boolean isLast = false;
|
|
|
|
while (input != null) {
|
|
|
|
int inputBufferIndex = audioEncoder.dequeueInputBuffer(0);
|
|
|
|
if (inputBufferIndex >= 0) {
|
|
|
|
ByteBuffer inputBuffer;
|
|
|
|
if (Build.VERSION.SDK_INT >= 21) {
|
|
|
|
inputBuffer = audioEncoder.getInputBuffer(inputBufferIndex);
|
|
|
|
} else {
|
|
|
|
ByteBuffer[] inputBuffers = audioEncoder.getInputBuffers();
|
|
|
|
inputBuffer = inputBuffers[inputBufferIndex];
|
|
|
|
inputBuffer.clear();
|
|
|
|
}
|
|
|
|
long startWriteTime = input.offset[input.lastWroteBuffer];
|
|
|
|
for (int a = input.lastWroteBuffer; a <= input.results; a++) {
|
|
|
|
if (a < input.results) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (!running && input.offset[a] >= videoLast - desyncTime) {
|
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("stop audio encoding because of stoped video recording at " + input.offset[a] + " last video " + videoLast);
|
|
|
|
}
|
|
|
|
audioStopedByTime = true;
|
|
|
|
isLast = true;
|
|
|
|
input = null;
|
|
|
|
buffersToWrite.clear();
|
|
|
|
break;
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
if (inputBuffer.remaining() < input.read[a]) {
|
|
|
|
input.lastWroteBuffer = a;
|
|
|
|
input = null;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
inputBuffer.put(input.buffer, a * 2048, input.read[a]);
|
|
|
|
}
|
|
|
|
if (a >= input.results - 1) {
|
|
|
|
buffersToWrite.remove(input);
|
|
|
|
if (running) {
|
|
|
|
buffers.put(input);
|
|
|
|
}
|
|
|
|
if (!buffersToWrite.isEmpty()) {
|
|
|
|
input = buffersToWrite.get(0);
|
|
|
|
} else {
|
|
|
|
isLast = input.last;
|
|
|
|
input = null;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-07-30 04:07:02 +02:00
|
|
|
audioEncoder.queueInputBuffer(inputBufferIndex, 0, inputBuffer.position(), startWriteTime == 0 ? 0 : startWriteTime - audioStartTime, isLast ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} catch (Throwable e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private void handleVideoFrameAvailable(long timestampNanos, Integer cameraId) {
|
|
|
|
try {
|
|
|
|
drainEncoder(false);
|
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
|
|
|
long dt, alphaDt;
|
|
|
|
if (!lastCameraId.equals(cameraId)) {
|
|
|
|
lastTimestamp = -1;
|
|
|
|
lastCameraId = cameraId;
|
|
|
|
}
|
|
|
|
if (lastTimestamp == -1) {
|
|
|
|
lastTimestamp = timestampNanos;
|
|
|
|
if (currentTimestamp != 0) {
|
|
|
|
dt = (System.currentTimeMillis() - lastCommitedFrameTime) * 1000000;
|
|
|
|
alphaDt = 0;
|
|
|
|
} else {
|
|
|
|
alphaDt = dt = 0;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
alphaDt = dt = (timestampNanos - lastTimestamp);
|
|
|
|
lastTimestamp = timestampNanos;
|
|
|
|
}
|
|
|
|
lastCommitedFrameTime = System.currentTimeMillis();
|
|
|
|
if (!skippedFirst) {
|
|
|
|
skippedTime += dt;
|
|
|
|
if (skippedTime < 200000000) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
skippedFirst = true;
|
|
|
|
}
|
|
|
|
currentTimestamp += dt;
|
2018-07-30 04:07:02 +02:00
|
|
|
if (videoFirst == -1) {
|
|
|
|
videoFirst = timestampNanos / 1000;
|
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("first video frame was at " + videoFirst);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
videoLast = timestampNanos;
|
2017-07-08 18:32:04 +02:00
|
|
|
|
|
|
|
GLES20.glUseProgram(drawProgram);
|
|
|
|
GLES20.glVertexAttribPointer(positionHandle, 3, GLES20.GL_FLOAT, false, 12, vertexBuffer);
|
|
|
|
GLES20.glEnableVertexAttribArray(positionHandle);
|
|
|
|
GLES20.glVertexAttribPointer(textureHandle, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
|
|
|
|
GLES20.glEnableVertexAttribArray(textureHandle);
|
|
|
|
GLES20.glUniform1f(scaleXHandle, scaleX);
|
|
|
|
GLES20.glUniform1f(scaleYHandle, scaleY);
|
|
|
|
GLES20.glUniformMatrix4fv(vertexMatrixHandle, 1, false, mMVPMatrix, 0);
|
|
|
|
|
|
|
|
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
|
|
|
if (oldCameraTexture[0] != 0) {
|
|
|
|
if (!blendEnabled) {
|
|
|
|
GLES20.glEnable(GLES20.GL_BLEND);
|
|
|
|
blendEnabled = true;
|
|
|
|
}
|
|
|
|
GLES20.glUniformMatrix4fv(textureMatrixHandle, 1, false, moldSTMatrix, 0);
|
|
|
|
GLES20.glUniform1f(alphaHandle, 1.0f);
|
|
|
|
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oldCameraTexture[0]);
|
|
|
|
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
|
|
|
}
|
|
|
|
GLES20.glUniformMatrix4fv(textureMatrixHandle, 1, false, mSTMatrix, 0);
|
|
|
|
GLES20.glUniform1f(alphaHandle, cameraTextureAlpha);
|
|
|
|
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexture[0]);
|
|
|
|
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
|
|
|
|
|
|
|
GLES20.glDisableVertexAttribArray(positionHandle);
|
|
|
|
GLES20.glDisableVertexAttribArray(textureHandle);
|
|
|
|
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
|
|
|
|
GLES20.glUseProgram(0);
|
|
|
|
|
|
|
|
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, currentTimestamp);
|
|
|
|
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
|
|
|
|
|
|
|
if (oldCameraTexture[0] != 0 && cameraTextureAlpha < 1.0f) {
|
|
|
|
cameraTextureAlpha += alphaDt / 200000000.0f;
|
|
|
|
if (cameraTextureAlpha > 1) {
|
|
|
|
GLES20.glDisable(GLES20.GL_BLEND);
|
|
|
|
blendEnabled = false;
|
|
|
|
cameraTextureAlpha = 1;
|
|
|
|
GLES20.glDeleteTextures(1, oldCameraTexture, 0);
|
|
|
|
oldCameraTexture[0] = 0;
|
|
|
|
if (!cameraReady) {
|
|
|
|
cameraReady = true;
|
2019-01-23 18:03:33 +01:00
|
|
|
AndroidUtilities.runOnUIThread(() -> textureOverlayView.animate().setDuration(120).alpha(0.0f).setInterpolator(new DecelerateInterpolator()).start());
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if (!cameraReady) {
|
|
|
|
cameraReady = true;
|
2019-01-23 18:03:33 +01:00
|
|
|
AndroidUtilities.runOnUIThread(() -> textureOverlayView.animate().setDuration(120).alpha(0.0f).setInterpolator(new DecelerateInterpolator()).start());
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private void handleStopRecording(final int send) {
|
|
|
|
if (running) {
|
|
|
|
sendWhenDone = send;
|
|
|
|
running = false;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
try {
|
|
|
|
drainEncoder(true);
|
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
|
|
|
if (videoEncoder != null) {
|
|
|
|
try {
|
|
|
|
videoEncoder.stop();
|
|
|
|
videoEncoder.release();
|
|
|
|
videoEncoder = null;
|
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (audioEncoder != null) {
|
|
|
|
try {
|
|
|
|
audioEncoder.stop();
|
|
|
|
audioEncoder.release();
|
|
|
|
audioEncoder = null;
|
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (mediaMuxer != null) {
|
|
|
|
try {
|
|
|
|
mediaMuxer.finishMovie();
|
|
|
|
} catch (Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (send != 0) {
|
2019-01-23 18:03:33 +01:00
|
|
|
AndroidUtilities.runOnUIThread(() -> {
|
|
|
|
videoEditedInfo = new VideoEditedInfo();
|
|
|
|
videoEditedInfo.roundVideo = true;
|
|
|
|
videoEditedInfo.startTime = -1;
|
|
|
|
videoEditedInfo.endTime = -1;
|
|
|
|
videoEditedInfo.file = file;
|
|
|
|
videoEditedInfo.encryptedFile = encryptedFile;
|
|
|
|
videoEditedInfo.key = key;
|
|
|
|
videoEditedInfo.iv = iv;
|
|
|
|
videoEditedInfo.estimatedSize = Math.max(1, size);
|
|
|
|
videoEditedInfo.framerate = 25;
|
|
|
|
videoEditedInfo.resultWidth = videoEditedInfo.originalWidth = 240;
|
|
|
|
videoEditedInfo.resultHeight = videoEditedInfo.originalHeight = 240;
|
|
|
|
videoEditedInfo.originalPath = videoFile.getAbsolutePath();
|
|
|
|
if (send == 1) {
|
|
|
|
baseFragment.sendMedia(new MediaController.PhotoEntry(0, 0, 0, videoFile.getAbsolutePath(), 0, true), videoEditedInfo);
|
|
|
|
} else {
|
|
|
|
videoPlayer = new VideoPlayer();
|
|
|
|
videoPlayer.setDelegate(new VideoPlayer.VideoPlayerDelegate() {
|
|
|
|
@Override
|
|
|
|
public void onStateChanged(boolean playWhenReady, int playbackState) {
|
|
|
|
if (videoPlayer == null) {
|
|
|
|
return;
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
2019-01-23 18:03:33 +01:00
|
|
|
if (videoPlayer.isPlaying() && playbackState == ExoPlayer.STATE_ENDED) {
|
|
|
|
videoPlayer.seekTo(videoEditedInfo.startTime > 0 ? videoEditedInfo.startTime : 0);
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
2019-01-23 18:03:33 +01:00
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
|
2019-01-23 18:03:33 +01:00
|
|
|
@Override
|
|
|
|
public void onError(Exception e) {
|
|
|
|
FileLog.e(e);
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
|
2019-01-23 18:03:33 +01:00
|
|
|
@Override
|
|
|
|
public void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees, float pixelWidthHeightRatio) {
|
2017-07-08 18:32:04 +02:00
|
|
|
|
2019-01-23 18:03:33 +01:00
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
|
2019-01-23 18:03:33 +01:00
|
|
|
@Override
|
|
|
|
public void onRenderedFirstFrame() {
|
2017-07-08 18:32:04 +02:00
|
|
|
|
2019-01-23 18:03:33 +01:00
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
|
2019-01-23 18:03:33 +01:00
|
|
|
@Override
|
|
|
|
public boolean onSurfaceDestroyed(SurfaceTexture surfaceTexture) {
|
|
|
|
return false;
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
|
2019-01-23 18:03:33 +01:00
|
|
|
@Override
|
|
|
|
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
|
|
|
|
|
|
|
|
}
|
|
|
|
});
|
|
|
|
videoPlayer.setTextureView(textureView);
|
|
|
|
videoPlayer.preparePlayer(Uri.fromFile(videoFile), "other");
|
|
|
|
videoPlayer.play();
|
|
|
|
videoPlayer.setMute(true);
|
|
|
|
startProgressTimer();
|
|
|
|
|
|
|
|
AnimatorSet animatorSet = new AnimatorSet();
|
|
|
|
animatorSet.playTogether(
|
|
|
|
ObjectAnimator.ofFloat(switchCameraButton, "alpha", 0.0f),
|
|
|
|
ObjectAnimator.ofInt(paint, "alpha", 0),
|
|
|
|
ObjectAnimator.ofFloat(muteImageView, "alpha", 1.0f));
|
|
|
|
animatorSet.setDuration(180);
|
|
|
|
animatorSet.setInterpolator(new DecelerateInterpolator());
|
|
|
|
animatorSet.start();
|
|
|
|
videoEditedInfo.estimatedDuration = duration;
|
|
|
|
NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.audioDidSent, videoEditedInfo, videoFile.getAbsolutePath());
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
2019-03-03 21:40:48 +01:00
|
|
|
didWriteData(videoFile, 0, true);
|
2017-07-08 18:32:04 +02:00
|
|
|
});
|
|
|
|
} else {
|
2018-07-30 04:07:02 +02:00
|
|
|
FileLoader.getInstance(currentAccount).cancelUploadFile(videoFile.getAbsolutePath(), false);
|
2017-07-08 18:32:04 +02:00
|
|
|
videoFile.delete();
|
|
|
|
}
|
|
|
|
EGL14.eglDestroySurface(eglDisplay, eglSurface);
|
|
|
|
eglSurface = EGL14.EGL_NO_SURFACE;
|
|
|
|
if (surface != null) {
|
|
|
|
surface.release();
|
|
|
|
surface = null;
|
|
|
|
}
|
|
|
|
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
|
|
|
|
EGL14.eglMakeCurrent(eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
|
|
|
|
EGL14.eglDestroyContext(eglDisplay, eglContext);
|
|
|
|
EGL14.eglReleaseThread();
|
|
|
|
EGL14.eglTerminate(eglDisplay);
|
|
|
|
}
|
|
|
|
eglDisplay = EGL14.EGL_NO_DISPLAY;
|
|
|
|
eglContext = EGL14.EGL_NO_CONTEXT;
|
|
|
|
eglConfig = null;
|
|
|
|
handler.exit();
|
|
|
|
}
|
|
|
|
|
|
|
|
private void prepareEncoder() {
|
|
|
|
try {
|
|
|
|
int recordBufferSize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
|
|
|
|
if (recordBufferSize <= 0) {
|
|
|
|
recordBufferSize = 3584;
|
|
|
|
}
|
|
|
|
int bufferSize = 2048 * 24;
|
|
|
|
if (bufferSize < recordBufferSize) {
|
|
|
|
bufferSize = ((recordBufferSize / 2048) + 1) * 2048 * 2;
|
|
|
|
}
|
|
|
|
for (int a = 0; a < 3; a++) {
|
|
|
|
buffers.add(new AudioBufferInfo());
|
|
|
|
}
|
2019-05-14 14:08:05 +02:00
|
|
|
audioRecorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
|
2017-07-08 18:32:04 +02:00
|
|
|
audioRecorder.startRecording();
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.d("initied audio record with channels " + audioRecorder.getChannelCount() + " sample rate = " + audioRecorder.getSampleRate() + " bufferSize = " + bufferSize);
|
|
|
|
}
|
|
|
|
Thread thread = new Thread(recorderRunnable);
|
|
|
|
thread.setPriority(Thread.MAX_PRIORITY);
|
|
|
|
thread.start();
|
2017-07-08 18:32:04 +02:00
|
|
|
|
|
|
|
audioBufferInfo = new MediaCodec.BufferInfo();
|
|
|
|
videoBufferInfo = new MediaCodec.BufferInfo();
|
|
|
|
|
|
|
|
MediaFormat audioFormat = new MediaFormat();
|
|
|
|
audioFormat.setString(MediaFormat.KEY_MIME, AUDIO_MIME_TYPE);
|
|
|
|
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
|
|
|
|
audioFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100);
|
|
|
|
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
|
|
|
|
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 32000);
|
|
|
|
audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 2048 * 10);
|
|
|
|
|
|
|
|
audioEncoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE);
|
|
|
|
audioEncoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
|
|
|
audioEncoder.start();
|
|
|
|
|
|
|
|
videoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE);
|
|
|
|
|
|
|
|
MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, videoWidth, videoHeight);
|
|
|
|
|
|
|
|
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
|
|
|
format.setInteger(MediaFormat.KEY_BIT_RATE, videoBitrate);
|
|
|
|
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
|
|
|
|
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
|
2018-08-27 20:45:56 +02:00
|
|
|
/*if (Build.VERSION.SDK_INT >= 21) {
|
2018-08-27 10:33:11 +02:00
|
|
|
format.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileHigh);
|
|
|
|
if (Build.VERSION.SDK_INT >= 23) {
|
|
|
|
format.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel5);
|
|
|
|
}
|
2018-08-27 20:45:56 +02:00
|
|
|
}*/
|
2017-07-08 18:32:04 +02:00
|
|
|
|
|
|
|
videoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
|
|
|
surface = videoEncoder.createInputSurface();
|
|
|
|
videoEncoder.start();
|
|
|
|
|
|
|
|
Mp4Movie movie = new Mp4Movie();
|
|
|
|
movie.setCacheFile(videoFile);
|
|
|
|
movie.setRotation(0);
|
|
|
|
movie.setSize(videoWidth, videoHeight);
|
2018-07-30 04:07:02 +02:00
|
|
|
mediaMuxer = new MP4Builder().createMovie(movie, isSecretChat);
|
2017-07-08 18:32:04 +02:00
|
|
|
|
2019-01-23 18:03:33 +01:00
|
|
|
AndroidUtilities.runOnUIThread(() -> {
|
|
|
|
if (cancelled) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
try {
|
|
|
|
performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING);
|
|
|
|
} catch (Exception ignore) {
|
2018-07-30 04:07:02 +02:00
|
|
|
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
2019-01-23 18:03:33 +01:00
|
|
|
AndroidUtilities.lockOrientation(baseFragment.getParentActivity());
|
|
|
|
recording = true;
|
|
|
|
recordStartTime = System.currentTimeMillis();
|
|
|
|
AndroidUtilities.runOnUIThread(timerRunnable);
|
|
|
|
NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.recordStarted);
|
2017-07-08 18:32:04 +02:00
|
|
|
});
|
|
|
|
} catch (Exception ioe) {
|
|
|
|
throw new RuntimeException(ioe);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
|
|
|
|
throw new RuntimeException("EGL already set up");
|
|
|
|
}
|
|
|
|
|
|
|
|
eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
|
|
|
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
|
|
|
|
throw new RuntimeException("unable to get EGL14 display");
|
|
|
|
}
|
|
|
|
int[] version = new int[2];
|
|
|
|
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
|
|
|
|
eglDisplay = null;
|
|
|
|
throw new RuntimeException("unable to initialize EGL14");
|
|
|
|
}
|
|
|
|
|
|
|
|
if (eglContext == EGL14.EGL_NO_CONTEXT) {
|
|
|
|
int renderableType = EGL14.EGL_OPENGL_ES2_BIT;
|
|
|
|
|
|
|
|
int[] attribList = {
|
|
|
|
EGL14.EGL_RED_SIZE, 8,
|
|
|
|
EGL14.EGL_GREEN_SIZE, 8,
|
|
|
|
EGL14.EGL_BLUE_SIZE, 8,
|
|
|
|
EGL14.EGL_ALPHA_SIZE, 8,
|
|
|
|
EGL14.EGL_RENDERABLE_TYPE, renderableType,
|
|
|
|
0x3142, 1,
|
|
|
|
EGL14.EGL_NONE
|
|
|
|
};
|
|
|
|
android.opengl.EGLConfig[] configs = new android.opengl.EGLConfig[1];
|
|
|
|
int[] numConfigs = new int[1];
|
|
|
|
if (!EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0)) {
|
|
|
|
throw new RuntimeException("Unable to find a suitable EGLConfig");
|
|
|
|
}
|
|
|
|
|
|
|
|
int[] attrib2_list = {
|
|
|
|
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
|
|
|
|
EGL14.EGL_NONE
|
|
|
|
};
|
|
|
|
eglContext = EGL14.eglCreateContext(eglDisplay, configs[0], sharedEglContext, attrib2_list, 0);
|
|
|
|
eglConfig = configs[0];
|
|
|
|
}
|
|
|
|
|
|
|
|
int[] values = new int[1];
|
|
|
|
EGL14.eglQueryContext(eglDisplay, eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
|
|
|
|
|
|
|
|
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
|
|
|
throw new IllegalStateException("surface already created");
|
|
|
|
}
|
|
|
|
|
|
|
|
int[] surfaceAttribs = {
|
|
|
|
EGL14.EGL_NONE
|
|
|
|
};
|
|
|
|
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
|
|
|
|
if (eglSurface == null) {
|
|
|
|
throw new RuntimeException("surface was null");
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
2018-07-30 04:07:02 +02:00
|
|
|
if (BuildVars.LOGS_ENABLED) {
|
|
|
|
FileLog.e("eglMakeCurrent failed " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
throw new RuntimeException("eglMakeCurrent failed");
|
|
|
|
}
|
|
|
|
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
|
|
|
|
|
|
|
|
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
|
|
|
|
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
|
|
|
|
if (vertexShader != 0 && fragmentShader != 0) {
|
|
|
|
drawProgram = GLES20.glCreateProgram();
|
|
|
|
GLES20.glAttachShader(drawProgram, vertexShader);
|
|
|
|
GLES20.glAttachShader(drawProgram, fragmentShader);
|
|
|
|
GLES20.glLinkProgram(drawProgram);
|
|
|
|
int[] linkStatus = new int[1];
|
|
|
|
GLES20.glGetProgramiv(drawProgram, GLES20.GL_LINK_STATUS, linkStatus, 0);
|
|
|
|
if (linkStatus[0] == 0) {
|
|
|
|
GLES20.glDeleteProgram(drawProgram);
|
|
|
|
drawProgram = 0;
|
|
|
|
} else {
|
|
|
|
positionHandle = GLES20.glGetAttribLocation(drawProgram, "aPosition");
|
|
|
|
textureHandle = GLES20.glGetAttribLocation(drawProgram, "aTextureCoord");
|
|
|
|
scaleXHandle = GLES20.glGetUniformLocation(drawProgram, "scaleX");
|
|
|
|
scaleYHandle = GLES20.glGetUniformLocation(drawProgram, "scaleY");
|
|
|
|
alphaHandle = GLES20.glGetUniformLocation(drawProgram, "alpha");
|
|
|
|
vertexMatrixHandle = GLES20.glGetUniformLocation(drawProgram, "uMVPMatrix");
|
|
|
|
textureMatrixHandle = GLES20.glGetUniformLocation(drawProgram, "uSTMatrix");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public Surface getInputSurface() {
|
|
|
|
return surface;
|
|
|
|
}
|
|
|
|
|
2019-03-03 21:40:48 +01:00
|
|
|
private void didWriteData(File file, long availableSize, boolean last) {
|
2017-07-08 18:32:04 +02:00
|
|
|
if (videoConvertFirstWrite) {
|
2018-07-30 04:07:02 +02:00
|
|
|
FileLoader.getInstance(currentAccount).uploadFile(file.toString(), isSecretChat, false, 1, ConnectionsManager.FileTypeVideo);
|
2017-07-08 18:32:04 +02:00
|
|
|
videoConvertFirstWrite = false;
|
2018-07-30 04:07:02 +02:00
|
|
|
if (last) {
|
2019-03-03 21:40:48 +01:00
|
|
|
FileLoader.getInstance(currentAccount).checkUploadNewDataAvailable(file.toString(), isSecretChat, availableSize, last ? file.length() : 0);
|
2018-07-30 04:07:02 +02:00
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
} else {
|
2019-03-03 21:40:48 +01:00
|
|
|
FileLoader.getInstance(currentAccount).checkUploadNewDataAvailable(file.toString(), isSecretChat, availableSize, last ? file.length() : 0);
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public void drainEncoder(boolean endOfStream) throws Exception {
|
|
|
|
if (endOfStream) {
|
|
|
|
videoEncoder.signalEndOfInputStream();
|
|
|
|
}
|
|
|
|
|
2017-12-08 18:35:59 +01:00
|
|
|
ByteBuffer[] encoderOutputBuffers = null;
|
|
|
|
if (Build.VERSION.SDK_INT < 21) {
|
|
|
|
encoderOutputBuffers = videoEncoder.getOutputBuffers();
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
while (true) {
|
|
|
|
int encoderStatus = videoEncoder.dequeueOutputBuffer(videoBufferInfo, 10000);
|
|
|
|
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
|
|
|
|
if (!endOfStream) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
|
|
|
if (Build.VERSION.SDK_INT < 21) {
|
|
|
|
encoderOutputBuffers = videoEncoder.getOutputBuffers();
|
|
|
|
}
|
|
|
|
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
|
|
|
MediaFormat newFormat = videoEncoder.getOutputFormat();
|
|
|
|
if (videoTrackIndex == -5) {
|
|
|
|
videoTrackIndex = mediaMuxer.addTrack(newFormat, false);
|
|
|
|
}
|
|
|
|
} else if (encoderStatus >= 0) {
|
|
|
|
ByteBuffer encodedData;
|
|
|
|
if (Build.VERSION.SDK_INT < 21) {
|
|
|
|
encodedData = encoderOutputBuffers[encoderStatus];
|
|
|
|
} else {
|
|
|
|
encodedData = videoEncoder.getOutputBuffer(encoderStatus);
|
|
|
|
}
|
|
|
|
if (encodedData == null) {
|
|
|
|
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
|
|
|
|
}
|
|
|
|
if (videoBufferInfo.size > 1) {
|
|
|
|
if ((videoBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
|
2019-03-03 21:40:48 +01:00
|
|
|
long availableSize = mediaMuxer.writeSampleData(videoTrackIndex, encodedData, videoBufferInfo, true);
|
|
|
|
if (availableSize != 0) {
|
|
|
|
didWriteData(videoFile, availableSize, false);
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
} else if (videoTrackIndex == -5) {
|
|
|
|
byte[] csd = new byte[videoBufferInfo.size];
|
|
|
|
encodedData.limit(videoBufferInfo.offset + videoBufferInfo.size);
|
|
|
|
encodedData.position(videoBufferInfo.offset);
|
|
|
|
encodedData.get(csd);
|
|
|
|
ByteBuffer sps = null;
|
|
|
|
ByteBuffer pps = null;
|
|
|
|
for (int a = videoBufferInfo.size - 1; a >= 0; a--) {
|
|
|
|
if (a > 3) {
|
|
|
|
if (csd[a] == 1 && csd[a - 1] == 0 && csd[a - 2] == 0 && csd[a - 3] == 0) {
|
|
|
|
sps = ByteBuffer.allocate(a - 3);
|
|
|
|
pps = ByteBuffer.allocate(videoBufferInfo.size - (a - 3));
|
|
|
|
sps.put(csd, 0, a - 3).position(0);
|
|
|
|
pps.put(csd, a - 3, videoBufferInfo.size - (a - 3)).position(0);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
MediaFormat newFormat = MediaFormat.createVideoFormat("video/avc", videoWidth, videoHeight);
|
|
|
|
if (sps != null && pps != null) {
|
|
|
|
newFormat.setByteBuffer("csd-0", sps);
|
|
|
|
newFormat.setByteBuffer("csd-1", pps);
|
|
|
|
}
|
|
|
|
videoTrackIndex = mediaMuxer.addTrack(newFormat, false);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
videoEncoder.releaseOutputBuffer(encoderStatus, false);
|
|
|
|
if ((videoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-08 18:35:59 +01:00
|
|
|
if (Build.VERSION.SDK_INT < 21) {
|
|
|
|
encoderOutputBuffers = audioEncoder.getOutputBuffers();
|
|
|
|
}
|
2017-07-08 18:32:04 +02:00
|
|
|
boolean encoderOutputAvailable = true;
|
|
|
|
while (true) {
|
|
|
|
int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 0);
|
|
|
|
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
|
|
|
|
if (!endOfStream || !running && sendWhenDone == 0) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
|
|
|
if (Build.VERSION.SDK_INT < 21) {
|
|
|
|
encoderOutputBuffers = audioEncoder.getOutputBuffers();
|
|
|
|
}
|
|
|
|
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
|
|
|
MediaFormat newFormat = audioEncoder.getOutputFormat();
|
|
|
|
if (audioTrackIndex == -5) {
|
|
|
|
audioTrackIndex = mediaMuxer.addTrack(newFormat, true);
|
|
|
|
}
|
|
|
|
} else if (encoderStatus >= 0) {
|
|
|
|
ByteBuffer encodedData;
|
|
|
|
if (Build.VERSION.SDK_INT < 21) {
|
|
|
|
encodedData = encoderOutputBuffers[encoderStatus];
|
|
|
|
} else {
|
|
|
|
encodedData = audioEncoder.getOutputBuffer(encoderStatus);
|
|
|
|
}
|
|
|
|
if (encodedData == null) {
|
|
|
|
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
|
|
|
|
}
|
|
|
|
if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
|
|
|
|
audioBufferInfo.size = 0;
|
|
|
|
}
|
|
|
|
if (audioBufferInfo.size != 0) {
|
2019-03-03 21:40:48 +01:00
|
|
|
long availableSize = mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo, false);
|
|
|
|
if (availableSize != 0) {
|
|
|
|
didWriteData(videoFile, availableSize, false);
|
2017-07-08 18:32:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
audioEncoder.releaseOutputBuffer(encoderStatus, false);
|
|
|
|
if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
protected void finalize() throws Throwable {
|
|
|
|
try {
|
|
|
|
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
|
|
|
|
EGL14.eglMakeCurrent(eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
|
|
|
|
EGL14.eglDestroyContext(eglDisplay, eglContext);
|
|
|
|
EGL14.eglReleaseThread();
|
|
|
|
EGL14.eglTerminate(eglDisplay);
|
|
|
|
eglDisplay = EGL14.EGL_NO_DISPLAY;
|
|
|
|
eglContext = EGL14.EGL_NO_CONTEXT;
|
|
|
|
eglConfig = null;
|
|
|
|
}
|
|
|
|
} finally {
|
|
|
|
super.finalize();
|
|
|
|
}
|
2017-03-31 01:58:05 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|