Upgrade CameraX to Beta01.

This commit is contained in:
Alex Hart 2020-03-25 09:30:13 -03:00 committed by Greyson Parrelli
parent 951a61117a
commit d46894e5db
22 changed files with 1817 additions and 1011 deletions

View File

@ -269,8 +269,10 @@ dependencies {
implementation 'androidx.lifecycle:lifecycle-extensions:2.1.0'
implementation 'androidx.lifecycle:lifecycle-viewmodel-savedstate:1.0.0-alpha05'
implementation 'androidx.lifecycle:lifecycle-common-java8:2.1.0'
implementation "androidx.camera:camera-core:1.0.0-alpha06"
implementation "androidx.camera:camera-camera2:1.0.0-alpha06"
implementation "androidx.camera:camera-core:1.0.0-beta01"
implementation "androidx.camera:camera-camera2:1.0.0-beta01"
implementation "androidx.camera:camera-lifecycle:1.0.0-beta01"
implementation "androidx.concurrent:concurrent-futures:1.0.0"
implementation('com.google.firebase:firebase-messaging:17.3.4') {
exclude group: 'com.google.firebase', module: 'firebase-core'

View File

@ -3,7 +3,7 @@
xmlns:tools="http://schemas.android.com/tools"
package="org.thoughtcrime.securesms">
<uses-sdk tools:overrideLibrary="androidx.camera.core,androidx.camera.camera2"/>
<uses-sdk tools:overrideLibrary="androidx.camera.core,androidx.camera.camera2,androidx.camera.lifecycle" />
<permission android:name="org.thoughtcrime.securesms.ACCESS_SECRETS"
android:label="Access to TextSecure Secrets"

View File

@ -19,7 +19,7 @@ package org.thoughtcrime.securesms;
import android.annotation.SuppressLint;
import androidx.appcompat.app.AppCompatDelegate;
import androidx.camera.camera2.Camera2AppConfig;
import androidx.camera.camera2.Camera2Config;
import androidx.camera.core.CameraX;
import androidx.lifecycle.DefaultLifecycleObserver;
import androidx.lifecycle.LifecycleOwner;
@ -386,7 +386,7 @@ public class ApplicationContext extends MultiDexApplication implements DefaultLi
if (CameraXUtil.isSupported()) {
new Thread(() -> {
try {
CameraX.init(this, Camera2AppConfig.create(this));
CameraX.initialize(this, Camera2Config.defaultConfig());
} catch (Throwable t) {
Log.w(TAG, "Failed to initialize CameraX.");
}

View File

@ -44,7 +44,6 @@ public class CameraButtonView extends View {
private boolean isRecordingVideo;
private float progressPercent = 0f;
private float latestIncrement = 0f;
private @NonNull CameraButtonMode cameraButtonMode = CameraButtonMode.IMAGE;
private @Nullable VideoCaptureListener videoCaptureListener;
@ -247,7 +246,6 @@ public class CameraButtonView extends View {
int action = event.getAction();
switch (action) {
case MotionEvent.ACTION_DOWN:
latestIncrement = 0f;
if (isEnabled()) {
startAnimation(shrinkAnimation);
}
@ -258,11 +256,6 @@ public class CameraButtonView extends View {
float deltaY = Math.abs(event.getY() - deadzoneRect.top);
float increment = Math.min(1f, deltaY / maxRange);
if (Math.abs(increment - latestIncrement) < MINIMUM_ALLOWED_ZOOM_STEP) {
break;
}
latestIncrement = increment;
notifyZoomPercent(ZOOM_INTERPOLATOR.getInterpolation(increment));
invalidate();
}

View File

@ -22,8 +22,9 @@ import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.camera.core.CameraX;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureException;
import androidx.camera.core.ImageProxy;
import androidx.core.content.ContextCompat;
import androidx.fragment.app.Fragment;
@ -204,7 +205,9 @@ public class CameraXFragment extends Fragment implements CameraFragment {
onCaptureClicked();
});
if (camera.hasCameraWithLensFacing(CameraX.LensFacing.FRONT) && camera.hasCameraWithLensFacing(CameraX.LensFacing.BACK)) {
camera.setScaleType(CameraXView.ScaleType.CENTER_INSIDE);
if (camera.hasCameraWithLensFacing(CameraSelector.LENS_FACING_FRONT) && camera.hasCameraWithLensFacing(CameraSelector.LENS_FACING_BACK)) {
flipButton.setVisibility(View.VISIBLE);
flipButton.setOnClickListener(v -> {
camera.toggleCamera();
@ -361,15 +364,15 @@ public class CameraXFragment extends Fragment implements CameraFragment {
selfieFlash
);
camera.takePicture(Executors.mainThreadExecutor(), new ImageCapture.OnImageCapturedListener() {
camera.takePicture(Executors.mainThreadExecutor(), new ImageCapture.OnImageCapturedCallback() {
@Override
public void onCaptureSuccess(ImageProxy image, int rotationDegrees) {
public void onCaptureSuccess(ImageProxy image) {
flashHelper.endFlash();
SimpleTask.run(CameraXFragment.this.getViewLifecycleOwner().getLifecycle(), () -> {
stopwatch.split("captured");
try {
return CameraXUtil.toJpeg(image, rotationDegrees, camera.getCameraLensFacing() == CameraX.LensFacing.FRONT);
return CameraXUtil.toJpeg(image, camera.getCameraLensFacing() == CameraSelector.LENS_FACING_FRONT);
} catch (IOException e) {
return null;
} finally {
@ -388,7 +391,7 @@ public class CameraXFragment extends Fragment implements CameraFragment {
}
@Override
public void onError(ImageCapture.ImageCaptureError useCaseError, String message, @Nullable Throwable cause) {
public void onError(ImageCaptureException exception) {
flashHelper.endFlash();
controller.onCameraError();
}

View File

@ -6,8 +6,9 @@ import android.view.WindowManager;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.CameraX;
import androidx.camera.core.FlashMode;
import androidx.camera.core.ImageCapture;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXView;
@ -65,8 +66,10 @@ final class CameraXSelfieFlashHelper {
}
private boolean shouldUseViewBasedFlash() {
return camera.getFlash() == FlashMode.ON &&
Integer cameraLensFacing = camera.getCameraLensFacing();
return camera.getFlash() == ImageCapture.FLASH_MODE_ON &&
!camera.hasFlash() &&
camera.getCameraLensFacing() == CameraX.LensFacing.FRONT;
cameraLensFacing != null && cameraLensFacing == CameraSelector.LENS_FACING_BACK;
}
}

View File

@ -23,7 +23,6 @@ import org.thoughtcrime.securesms.mediasend.camerax.CameraXView;
import org.thoughtcrime.securesms.mediasend.camerax.VideoCapture;
import org.thoughtcrime.securesms.permissions.Permissions;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor;
import org.thoughtcrime.securesms.util.Util;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.FileDescriptor;
@ -45,12 +44,12 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
private boolean isRecording;
private ValueAnimator cameraMetricsAnimator;
private final VideoCapture.OnVideoSavedListener videoSavedListener = new VideoCapture.OnVideoSavedListener() {
private final VideoCapture.OnVideoSavedCallback videoSavedListener = new VideoCapture.OnVideoSavedCallback() {
@Override
public void onVideoSaved(@NonNull FileDescriptor fileDescriptor) {
try {
isRecording = false;
camera.setZoomLevel(0f);
camera.setZoomRatio(camera.getMinZoomRatio());
memoryFileDescriptor.seek(0);
callback.onVideoSaved(fileDescriptor);
} catch (IOException e) {
@ -59,13 +58,9 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
}
@Override
public void onError(@NonNull VideoCapture.VideoCaptureError videoCaptureError,
@NonNull String message,
@Nullable Throwable cause)
{
public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) {
isRecording = false;
callback.onVideoError(cause);
Util.runOnMain(() -> resetCameraSizing());
}
};
@ -119,7 +114,7 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
}
private void beginCameraRecording() {
this.camera.setZoomLevel(0f);
this.camera.setZoomRatio(this.camera.getMinZoomRatio());
callback.onVideoRecordStarted();
shrinkCaptureArea();
camera.startRecording(memoryFileDescriptor.getFileDescriptor(), Executors.mainThreadExecutor(), videoSavedListener);
@ -135,22 +130,24 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
if (scaleX == 1f) {
float targetHeightForAnimation = videoRecordingSize.getHeight() * scale;
if (screenSize.getHeight() == targetHeightForAnimation) {
return;
}
cameraMetricsAnimator = ValueAnimator.ofFloat(screenSize.getHeight(), targetHeightForAnimation);
} else {
if (screenSize.getWidth() == targetWidthForAnimation) {
return;
}
cameraMetricsAnimator = ValueAnimator.ofFloat(screenSize.getWidth(), targetWidthForAnimation);
}
ViewGroup.LayoutParams params = camera.getLayoutParams();
cameraMetricsAnimator.setInterpolator(new LinearInterpolator());
cameraMetricsAnimator.setDuration(200);
cameraMetricsAnimator.addListener(new AnimationEndCallback() {
@Override
public void onAnimationEnd(Animator animation) {
if (!isRecording) return;
scaleCameraViewToMatchRecordingSizeAndAspectRatio();
}
});
cameraMetricsAnimator.addUpdateListener(animation -> {
if (scaleX == 1f) {
params.height = Math.round((float) animation.getAnimatedValue());
@ -162,20 +159,6 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
cameraMetricsAnimator.start();
}
private void scaleCameraViewToMatchRecordingSizeAndAspectRatio() {
ViewGroup.LayoutParams layoutParams = camera.getLayoutParams();
Size videoRecordingSize = VideoUtil.getVideoRecordingSize();
float scale = getSurfaceScaleForRecording();
layoutParams.height = videoRecordingSize.getHeight();
layoutParams.width = videoRecordingSize.getWidth();
camera.setLayoutParams(layoutParams);
camera.setScaleX(scale);
camera.setScaleY(scale);
}
private Size getScreenSize() {
DisplayMetrics metrics = camera.getResources().getDisplayMetrics();
return new Size(metrics.widthPixels, metrics.heightPixels);
@ -187,16 +170,6 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
return Math.min(screenSize.getHeight(), screenSize.getWidth()) / (float) Math.min(videoRecordingSize.getHeight(), videoRecordingSize.getWidth());
}
private void resetCameraSizing() {
ViewGroup.LayoutParams layoutParams = camera.getLayoutParams();
layoutParams.width = ViewGroup.LayoutParams.MATCH_PARENT;
layoutParams.height = ViewGroup.LayoutParams.MATCH_PARENT;
camera.setLayoutParams(layoutParams);
camera.setScaleX(1);
camera.setScaleY(1);
}
@Override
public void onVideoCaptureComplete() {
isRecording = false;
@ -214,8 +187,8 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
@Override
public void onZoomIncremented(float increment) {
float range = camera.getMaxZoomLevel() - camera.getMinZoomLevel();
camera.setZoomLevel(range * increment);
float range = camera.getMaxZoomRatio() - camera.getMinZoomRatio();
camera.setZoomRatio((range * increment) + camera.getMinZoomRatio());
}
static MemoryFileDescriptor createFileDescriptor(@NonNull Context context) throws MemoryFileDescriptor.MemoryFileException {
@ -226,7 +199,7 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
);
}
private abstract class AnimationEndCallback implements Animator.AnimatorListener {
private static abstract class AnimationEndCallback implements Animator.AnimatorListener {
@Override
public final void onAnimationStart(Animator animation) {

View File

@ -5,10 +5,9 @@ import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.widget.AppCompatImageView;
import androidx.camera.core.FlashMode;
import androidx.camera.core.ImageCapture;
import org.thoughtcrime.securesms.R;
@ -43,7 +42,7 @@ public final class CameraXFlashToggleView extends AppCompatImageView {
public CameraXFlashToggleView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
super.setOnClickListener((v) -> setFlash(FLASH_MODES.get((flashIndex + 1) % FLASH_ENUM.length)));
super.setOnClickListener((v) -> setFlash(FLASH_MODES.get((flashIndex + 1) % FLASH_ENUM.length).getFlashMode()));
}
@Override
@ -61,10 +60,12 @@ public final class CameraXFlashToggleView extends AppCompatImageView {
public void setAutoFlashEnabled(boolean isAutoEnabled) {
supportsFlashModeAuto = isAutoEnabled;
setFlash(FLASH_MODES.get(flashIndex));
setFlash(FLASH_MODES.get(flashIndex).getFlashMode());
}
public void setFlash(@NonNull FlashMode flashMode) {
public void setFlash(@ImageCapture.FlashMode int mode) {
FlashMode flashMode = FlashMode.fromImageCaptureFlashMode(mode);
flashIndex = resolveFlashIndex(FLASH_MODES.indexOf(flashMode), supportsFlashModeAuto);
refreshDrawableState();
notifyListener();
@ -92,7 +93,7 @@ public final class CameraXFlashToggleView extends AppCompatImageView {
supportsFlashModeAuto = savedState.getBoolean(STATE_SUPPORT_AUTO);
setFlash(FLASH_MODES.get(
resolveFlashIndex(savedState.getInt(STATE_FLASH_INDEX), supportsFlashModeAuto))
resolveFlashIndex(savedState.getInt(STATE_FLASH_INDEX), supportsFlashModeAuto)).getFlashMode()
);
super.onRestoreInstanceState(savedState.getParcelable(STATE_PARENT));
@ -104,7 +105,7 @@ public final class CameraXFlashToggleView extends AppCompatImageView {
private void notifyListener() {
if (flashModeChangedListener == null) return;
flashModeChangedListener.flashModeChanged(FLASH_MODES.get(flashIndex));
flashModeChangedListener.flashModeChanged(FLASH_MODES.get(flashIndex).getFlashMode());
}
private static int resolveFlashIndex(int desiredFlashIndex, boolean supportsFlashModeAuto) {
@ -126,6 +127,33 @@ public final class CameraXFlashToggleView extends AppCompatImageView {
}
public interface OnFlashModeChangedListener {
void flashModeChanged(FlashMode flashMode);
void flashModeChanged(@ImageCapture.CaptureMode int flashMode);
}
private enum FlashMode {
AUTO(ImageCapture.FLASH_MODE_AUTO),
OFF(ImageCapture.FLASH_MODE_OFF),
ON(ImageCapture.FLASH_MODE_ON);
private final @ImageCapture.FlashMode int flashMode;
FlashMode(@ImageCapture.FlashMode int flashMode) {
this.flashMode = flashMode;
}
@ImageCapture.FlashMode int getFlashMode() {
return flashMode;
}
private static FlashMode fromImageCaptureFlashMode(@ImageCapture.FlashMode int flashMode) {
for (FlashMode mode : values()) {
if (mode.getFlashMode() == flashMode) {
return mode;
}
}
throw new AssertionError();
}
}
}

View File

@ -20,14 +20,6 @@ import android.Manifest.permission;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.os.Build;
import android.os.Looper;
import android.util.Log;
import android.util.Rational;
import android.util.Size;
@ -36,41 +28,53 @@ import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.RequiresPermission;
import androidx.annotation.UiThread;
import androidx.camera.core.AspectRatio;
import androidx.camera.core.CameraInfo;
import androidx.camera.core.CameraInfoUnavailableException;
import androidx.camera.core.CameraOrientationUtil;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.CameraX;
import androidx.camera.core.FlashMode;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureConfig;
import androidx.camera.core.ImageCapture.OnImageCapturedCallback;
import androidx.camera.core.Preview;
import androidx.camera.core.PreviewConfig;
import androidx.camera.core.VideoCaptureConfig;
import androidx.camera.core.TorchState;
import androidx.camera.core.UseCase;
import androidx.camera.core.impl.CameraInternal;
import androidx.camera.core.impl.LensFacingConverter;
import androidx.camera.core.impl.VideoCaptureConfig;
import androidx.camera.core.impl.utils.CameraOrientationUtil;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import androidx.camera.core.impl.utils.futures.FutureCallback;
import androidx.camera.core.impl.utils.futures.Futures;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.core.util.Preconditions;
import androidx.lifecycle.Lifecycle;
import androidx.lifecycle.LifecycleObserver;
import androidx.lifecycle.LifecycleOwner;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.OnLifecycleEvent;
import com.google.common.util.concurrent.ListenableFuture;
import org.thoughtcrime.securesms.mms.MediaConstraints;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.File;
import java.io.FileDescriptor;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import static androidx.camera.core.ImageCapture.FLASH_MODE_OFF;
/** CameraX use case operation built on @{link androidx.camera.core}. */
// Begin Signal Custom Code Block
@RequiresApi(21)
// End Signal Custom Code Block
final class CameraXModule {
public static final String TAG = "CameraXModule";
private static final int MAX_VIEW_DIMENSION = 2000;
private static final float UNITY_ZOOM_SCALE = 1f;
private static final float ZOOM_NOT_SUPPORTED = UNITY_ZOOM_SCALE;
private static final Rational ASPECT_RATIO_16_9 = new Rational(16, 9);
@ -78,22 +82,27 @@ final class CameraXModule {
private static final Rational ASPECT_RATIO_9_16 = new Rational(9, 16);
private static final Rational ASPECT_RATIO_3_4 = new Rational(3, 4);
private final CameraManager mCameraManager;
private final PreviewConfig.Builder mPreviewConfigBuilder;
private final Preview.Builder mPreviewBuilder;
private final VideoCaptureConfig.Builder mVideoCaptureConfigBuilder;
private final ImageCaptureConfig.Builder mImageCaptureConfigBuilder;
private final CameraXView mCameraView;
private final ImageCapture.Builder mImageCaptureBuilder;
private final CameraXView mCameraXView;
final AtomicBoolean mVideoIsRecording = new AtomicBoolean(false);
private CameraXView.CaptureMode mCaptureMode = CameraXView.CaptureMode.IMAGE;
private long mMaxVideoDuration = CameraXView.INDEFINITE_VIDEO_DURATION;
private long mMaxVideoSize = CameraXView.INDEFINITE_VIDEO_SIZE;
private FlashMode mFlash = FlashMode.OFF;
@ImageCapture.FlashMode
private int mFlash = FLASH_MODE_OFF;
@Nullable
@SuppressWarnings("WeakerAccess") /* synthetic accessor */
Camera mCamera;
@Nullable
private ImageCapture mImageCapture;
@Nullable
private VideoCapture mVideoCapture;
@SuppressWarnings("WeakerAccess") /* synthetic accessor */
@Nullable
Preview mPreview;
@SuppressWarnings("WeakerAccess") /* synthetic accessor */
@Nullable
LifecycleOwner mCurrentLifecycle;
private final LifecycleObserver mCurrentLifecycleObserver =
@ -102,27 +111,44 @@ final class CameraXModule {
public void onDestroy(LifecycleOwner owner) {
if (owner == mCurrentLifecycle) {
clearCurrentLifecycle();
mPreview.removePreviewOutputListener();
mPreview.setSurfaceProvider(null);
}
}
};
@Nullable
private LifecycleOwner mNewLifecycle;
private float mZoomLevel = UNITY_ZOOM_SCALE;
@SuppressWarnings("WeakerAccess") /* synthetic accessor */
@Nullable
private Rect mCropRegion;
Integer mCameraLensFacing = CameraSelector.LENS_FACING_BACK;
@SuppressWarnings("WeakerAccess") /* synthetic accessor */
@Nullable
private CameraX.LensFacing mCameraLensFacing = CameraX.LensFacing.BACK;
ProcessCameraProvider mCameraProvider;
CameraXModule(CameraXView view) {
this.mCameraView = view;
mCameraXView = view;
mCameraManager = (CameraManager) view.getContext().getSystemService(Context.CAMERA_SERVICE);
Futures.addCallback(ProcessCameraProvider.getInstance(view.getContext()),
new FutureCallback<ProcessCameraProvider>() {
// TODO(b/124269166): Rethink how we can handle permissions here.
@SuppressLint("MissingPermission")
@Override
public void onSuccess(@Nullable ProcessCameraProvider provider) {
Preconditions.checkNotNull(provider);
mCameraProvider = provider;
if (mCurrentLifecycle != null) {
bindToLifecycle(mCurrentLifecycle);
}
}
mPreviewConfigBuilder = new PreviewConfig.Builder().setTargetName("Preview");
@Override
public void onFailure(Throwable t) {
throw new RuntimeException("CameraX failed to initialize.", t);
}
}, CameraXExecutors.mainThreadExecutor());
mImageCaptureConfigBuilder =
new ImageCaptureConfig.Builder().setTargetName("ImageCapture");
mPreviewBuilder = new Preview.Builder().setTargetName("Preview");
mImageCaptureBuilder = new ImageCapture.Builder().setTargetName("ImageCapture");
// Begin Signal Custom Code Block
mVideoCaptureConfigBuilder =
@ -132,42 +158,8 @@ final class CameraXModule {
.setBitRate(VideoUtil.VIDEO_BIT_RATE);
// End Signal Custom Code Block
}
/**
* Rescales view rectangle with dimensions in [-1000, 1000] to a corresponding rectangle in the
* sensor coordinate frame.
*/
private static Rect rescaleViewRectToSensorRect(Rect view, Rect sensor) {
// Scale width and height.
int newWidth = Math.round(view.width() * sensor.width() / (float) MAX_VIEW_DIMENSION);
int newHeight = Math.round(view.height() * sensor.height() / (float) MAX_VIEW_DIMENSION);
// Scale top/left corner.
int halfViewDimension = MAX_VIEW_DIMENSION / 2;
int leftOffset =
Math.round(
(view.left + halfViewDimension)
* sensor.width()
/ (float) MAX_VIEW_DIMENSION)
+ sensor.left;
int topOffset =
Math.round(
(view.top + halfViewDimension)
* sensor.height()
/ (float) MAX_VIEW_DIMENSION)
+ sensor.top;
// Now, produce the scaled rect.
Rect scaled = new Rect();
scaled.left = leftOffset;
scaled.top = topOffset;
scaled.right = scaled.left + newWidth;
scaled.bottom = scaled.top + newHeight;
return scaled;
}
@RequiresPermission(permission.CAMERA)
public void bindToLifecycle(LifecycleOwner lifecycleOwner) {
void bindToLifecycle(LifecycleOwner lifecycleOwner) {
mNewLifecycle = lifecycleOwner;
if (getMeasuredWidth() > 0 && getMeasuredHeight() > 0) {
@ -189,38 +181,33 @@ final class CameraXModule {
throw new IllegalArgumentException("Cannot bind to lifecycle in a destroyed state.");
}
final int cameraOrientation;
try {
Set<CameraX.LensFacing> available = getAvailableCameraLensFacing();
if (mCameraProvider == null) {
// try again once the camera provider is no longer null
return;
}
if (available.isEmpty()) {
Log.w(TAG, "Unable to bindToLifeCycle since no cameras available");
mCameraLensFacing = null;
}
Set<Integer> available = getAvailableCameraLensFacing();
// Ensure the current camera exists, or default to another camera
if (mCameraLensFacing != null && !available.contains(mCameraLensFacing)) {
Log.w(TAG, "Camera does not exist with direction " + mCameraLensFacing);
if (available.isEmpty()) {
Log.w(TAG, "Unable to bindToLifeCycle since no cameras available");
mCameraLensFacing = null;
}
// Default to the first available camera direction
mCameraLensFacing = available.iterator().next();
// Ensure the current camera exists, or default to another camera
if (mCameraLensFacing != null && !available.contains(mCameraLensFacing)) {
Log.w(TAG, "Camera does not exist with direction " + mCameraLensFacing);
Log.w(TAG, "Defaulting to primary camera with direction " + mCameraLensFacing);
}
// Default to the first available camera direction
mCameraLensFacing = available.iterator().next();
// Do not attempt to create use cases for a null cameraLensFacing. This could occur if
// the
// user explicitly sets the LensFacing to null, or if we determined there
// were no available cameras, which should be logged in the logic above.
if (mCameraLensFacing == null) {
return;
}
CameraInfo cameraInfo = CameraX.getCameraInfo(getLensFacing());
cameraOrientation = cameraInfo.getSensorRotationDegrees();
} catch (CameraInfoUnavailableException e) {
throw new IllegalStateException("Unable to get Camera Info.", e);
} catch (Exception e) {
throw new IllegalStateException("Unable to bind to lifecycle.", e);
Log.w(TAG, "Defaulting to primary camera with direction " + mCameraLensFacing);
}
// Do not attempt to create use cases for a null cameraLensFacing. This could occur if
// the user explicitly sets the LensFacing to null, or if we determined there
// were no available cameras, which should be logged in the logic above.
if (mCameraLensFacing == null) {
return;
}
// Set the preferred aspect ratio as 4:3 if it is IMAGE only mode. Set the preferred aspect
@ -230,23 +217,32 @@ final class CameraXModule {
boolean isDisplayPortrait = getDisplayRotationDegrees() == 0
|| getDisplayRotationDegrees() == 180;
// Begin Signal Custom Code Block
Rational targetAspectRatio;
// Begin Signal Custom Code Block
int resolution = CameraXUtil.getIdealResolution(Resources.getSystem().getDisplayMetrics().widthPixels, Resources.getSystem().getDisplayMetrics().heightPixels);
Log.i(TAG, "Ideal resolution: " + resolution);
if (getCaptureMode() == CameraXView.CaptureMode.IMAGE) {
mImageCaptureConfigBuilder.setTargetResolution(CameraXUtil.buildResolutionForRatio(resolution, ASPECT_RATIO_4_3, isDisplayPortrait));
targetAspectRatio = isDisplayPortrait ? ASPECT_RATIO_3_4 : ASPECT_RATIO_4_3;
} else {
mImageCaptureConfigBuilder.setTargetResolution(CameraXUtil.buildResolutionForRatio(resolution, ASPECT_RATIO_16_9, isDisplayPortrait));
targetAspectRatio = isDisplayPortrait ? ASPECT_RATIO_9_16 : ASPECT_RATIO_16_9;
}
mImageCaptureConfigBuilder.setCaptureMode(CameraXUtil.getOptimalCaptureMode());
mImageCaptureConfigBuilder.setLensFacing(mCameraLensFacing);
// End Signal Custom Code Block
mImageCaptureConfigBuilder.setTargetRotation(getDisplaySurfaceRotation());
mImageCapture = new ImageCapture(mImageCaptureConfigBuilder.build());
if (getCaptureMode() == CameraXView.CaptureMode.IMAGE) {
// mImageCaptureBuilder.setTargetAspectRatio(AspectRatio.RATIO_4_3);
// Begin Signal Custom Code Block
mImageCaptureBuilder.setTargetResolution(CameraXUtil.buildResolutionForRatio(resolution, ASPECT_RATIO_4_3, isDisplayPortrait));
// End Signal Custom Code Block
targetAspectRatio = isDisplayPortrait ? ASPECT_RATIO_3_4 : ASPECT_RATIO_4_3;
} else {
// Begin Signal Custom Code Block
mImageCaptureBuilder.setTargetResolution(CameraXUtil.buildResolutionForRatio(resolution, ASPECT_RATIO_16_9, isDisplayPortrait));
// End Signal Custom Code Block
// mImageCaptureBuilder.setTargetAspectRatio(AspectRatio.RATIO_16_9);
targetAspectRatio = isDisplayPortrait ? ASPECT_RATIO_9_16 : ASPECT_RATIO_16_9;
}
// Begin Signal Custom Code Block
mImageCaptureBuilder.setCaptureMode(CameraXUtil.getOptimalCaptureMode());
// End Signal Custom Code Block
mImageCaptureBuilder.setTargetRotation(getDisplaySurfaceRotation());
mImageCapture = mImageCaptureBuilder.build();
// Begin Signal Custom Code Block
Size size = VideoUtil.getVideoRecordingSize();
@ -255,46 +251,37 @@ final class CameraXModule {
// End Signal Custom Code Block
mVideoCaptureConfigBuilder.setTargetRotation(getDisplaySurfaceRotation());
mVideoCaptureConfigBuilder.setLensFacing(mCameraLensFacing);
// Begin Signal Custom Code Block
if (MediaConstraints.isVideoTranscodeAvailable()) {
mVideoCapture = new VideoCapture(mVideoCaptureConfigBuilder.build());
mVideoCapture = new VideoCapture(mVideoCaptureConfigBuilder.getUseCaseConfig());
}
mPreviewConfigBuilder.setLensFacing(mCameraLensFacing);
// End Signal Custom Code Block
// Adjusts the preview resolution according to the view size and the target aspect ratio.
int height = (int) (getMeasuredWidth() / targetAspectRatio.floatValue());
mPreviewConfigBuilder.setTargetResolution(new Size(getMeasuredWidth(), height));
mPreviewBuilder.setTargetResolution(new Size(getMeasuredWidth(), height));
mPreview = new Preview(mPreviewConfigBuilder.build());
mPreview.setOnPreviewOutputUpdateListener(
new Preview.OnPreviewOutputUpdateListener() {
@Override
public void onUpdated(@NonNull Preview.PreviewOutput output) {
boolean needReverse = cameraOrientation != 0 && cameraOrientation != 180;
int textureWidth =
needReverse
? output.getTextureSize().getHeight()
: output.getTextureSize().getWidth();
int textureHeight =
needReverse
? output.getTextureSize().getWidth()
: output.getTextureSize().getHeight();
CameraXModule.this.onPreviewSourceDimensUpdated(textureWidth,
textureHeight);
CameraXModule.this.setSurfaceTexture(output.getSurfaceTexture());
}
});
mPreview = mPreviewBuilder.build();
mPreview.setSurfaceProvider(mCameraXView.getPreviewView().getPreviewSurfaceProvider());
CameraSelector cameraSelector =
new CameraSelector.Builder().requireLensFacing(mCameraLensFacing).build();
if (getCaptureMode() == CameraXView.CaptureMode.IMAGE) {
CameraX.bindToLifecycle(mCurrentLifecycle, mImageCapture, mPreview);
mCamera = mCameraProvider.bindToLifecycle(mCurrentLifecycle, cameraSelector,
mImageCapture,
mPreview);
} else if (getCaptureMode() == CameraXView.CaptureMode.VIDEO) {
CameraX.bindToLifecycle(mCurrentLifecycle, mVideoCapture, mPreview);
mCamera = mCameraProvider.bindToLifecycle(mCurrentLifecycle, cameraSelector,
mVideoCapture,
mPreview);
} else {
CameraX.bindToLifecycle(mCurrentLifecycle, mImageCapture, mVideoCapture, mPreview);
mCamera = mCameraProvider.bindToLifecycle(mCurrentLifecycle, cameraSelector,
mImageCapture,
mVideoCapture, mPreview);
}
setZoomLevel(mZoomLevel);
setZoomRatio(UNITY_ZOOM_SCALE);
mCurrentLifecycle.getLifecycle().addObserver(mCurrentLifecycleObserver);
// Enable flash setting in ImageCapture after use cases are created and binded.
setFlash(getFlash());
@ -310,7 +297,7 @@ final class CameraXModule {
"Explicit open/close of camera not yet supported. Use bindtoLifecycle() instead.");
}
public void takePicture(Executor executor, ImageCapture.OnImageCapturedListener listener) {
public void takePicture(Executor executor, OnImageCapturedCallback callback) {
if (mImageCapture == null) {
return;
}
@ -319,35 +306,19 @@ final class CameraXModule {
throw new IllegalStateException("Can not take picture under VIDEO capture mode.");
}
if (listener == null) {
throw new IllegalArgumentException("OnImageCapturedListener should not be empty");
if (callback == null) {
throw new IllegalArgumentException("OnImageCapturedCallback should not be empty");
}
mImageCapture.takePicture(executor, listener);
}
public void takePicture(File saveLocation, Executor executor, ImageCapture.OnImageSavedListener listener) {
if (mImageCapture == null) {
return;
}
if (getCaptureMode() == CameraXView.CaptureMode.VIDEO) {
throw new IllegalStateException("Can not take picture under VIDEO capture mode.");
}
if (listener == null) {
throw new IllegalArgumentException("OnImageSavedListener should not be empty");
}
ImageCapture.Metadata metadata = new ImageCapture.Metadata();
metadata.isReversedHorizontal = mCameraLensFacing == CameraX.LensFacing.FRONT;
mImageCapture.takePicture(saveLocation, metadata, executor, listener);
mImageCapture.takePicture(executor, callback);
}
// Begin Signal Custom Code Block
@RequiresApi(26)
public void startRecording(FileDescriptor file, Executor executor, final VideoCapture.OnVideoSavedListener listener) {
// End Signal Custom Code Block
public void startRecording(FileDescriptor file,
// End Signal Custom Code Block
Executor executor,
final VideoCapture.OnVideoSavedCallback callback) {
if (mVideoCapture == null) {
return;
}
@ -356,31 +327,31 @@ final class CameraXModule {
throw new IllegalStateException("Can not record video under IMAGE capture mode.");
}
if (listener == null) {
throw new IllegalArgumentException("OnVideoSavedListener should not be empty");
if (callback == null) {
throw new IllegalArgumentException("OnVideoSavedCallback should not be empty");
}
mVideoIsRecording.set(true);
mVideoCapture.startRecording(
file,
executor,
new VideoCapture.OnVideoSavedListener() {
new VideoCapture.OnVideoSavedCallback() {
@Override
// Begin Signal Custom Code block
// Begin Signal Custom Code Block
public void onVideoSaved(@NonNull FileDescriptor savedFile) {
// End Signal Custom Code Block
mVideoIsRecording.set(false);
listener.onVideoSaved(savedFile);
callback.onVideoSaved(savedFile);
}
@Override
public void onError(
@NonNull VideoCapture.VideoCaptureError videoCaptureError,
@VideoCapture.VideoCaptureError int videoCaptureError,
@NonNull String message,
@Nullable Throwable cause) {
mVideoIsRecording.set(false);
Log.e(TAG, message, cause);
listener.onError(videoCaptureError, message, cause);
callback.onError(videoCaptureError, message, cause);
}
});
}
@ -402,9 +373,9 @@ final class CameraXModule {
// TODO(b/124269166): Rethink how we can handle permissions here.
@SuppressLint("MissingPermission")
public void setCameraLensFacing(@Nullable CameraX.LensFacing lensFacing) {
public void setCameraLensFacing(@Nullable Integer lensFacing) {
// Setting same lens facing is a no-op, so check for that first
if (mCameraLensFacing != lensFacing) {
if (!Objects.equals(mCameraLensFacing, lensFacing)) {
// If we're not bound to a lifecycle, just update the camera that will be opened when we
// attach to a lifecycle.
mCameraLensFacing = lensFacing;
@ -417,7 +388,7 @@ final class CameraXModule {
}
@RequiresPermission(permission.CAMERA)
public boolean hasCameraWithLensFacing(CameraX.LensFacing lensFacing) {
public boolean hasCameraWithLensFacing(@CameraSelector.LensFacing int lensFacing) {
String cameraId;
try {
cameraId = CameraX.getCameraWithLensFacing(lensFacing);
@ -429,14 +400,14 @@ final class CameraXModule {
}
@Nullable
public CameraX.LensFacing getLensFacing() {
public Integer getLensFacing() {
return mCameraLensFacing;
}
public void toggleCamera() {
// TODO(b/124269166): Rethink how we can handle permissions here.
@SuppressLint("MissingPermission")
Set<CameraX.LensFacing> availableCameraLensFacing = getAvailableCameraLensFacing();
Set<Integer> availableCameraLensFacing = getAvailableCameraLensFacing();
if (availableCameraLensFacing.isEmpty()) {
return;
@ -447,106 +418,65 @@ final class CameraXModule {
return;
}
if (mCameraLensFacing == CameraX.LensFacing.BACK
&& availableCameraLensFacing.contains(CameraX.LensFacing.FRONT)) {
setCameraLensFacing(CameraX.LensFacing.FRONT);
if (mCameraLensFacing == CameraSelector.LENS_FACING_BACK
&& availableCameraLensFacing.contains(CameraSelector.LENS_FACING_FRONT)) {
setCameraLensFacing(CameraSelector.LENS_FACING_FRONT);
return;
}
if (mCameraLensFacing == CameraX.LensFacing.FRONT
&& availableCameraLensFacing.contains(CameraX.LensFacing.BACK)) {
setCameraLensFacing(CameraX.LensFacing.BACK);
if (mCameraLensFacing == CameraSelector.LENS_FACING_FRONT
&& availableCameraLensFacing.contains(CameraSelector.LENS_FACING_BACK)) {
setCameraLensFacing(CameraSelector.LENS_FACING_BACK);
return;
}
}
public float getZoomLevel() {
return mZoomLevel;
public float getZoomRatio() {
if (mCamera != null) {
return mCamera.getCameraInfo().getZoomState().getValue().getZoomRatio();
} else {
return UNITY_ZOOM_SCALE;
}
}
public void setZoomLevel(float zoomLevel) {
// Set the zoom level in case it is set before binding to a lifecycle
this.mZoomLevel = zoomLevel;
public void setZoomRatio(float zoomRatio) {
if (mCamera != null) {
ListenableFuture<Void> future = mCamera.getCameraControl().setZoomRatio(
zoomRatio);
Futures.addCallback(future, new FutureCallback<Void>() {
@Override
public void onSuccess(@Nullable Void result) {
}
if (mPreview == null) {
// Nothing to zoom on yet since we don't have a preview. Defer calculating crop
// region.
return;
@Override
public void onFailure(Throwable t) {
// Throw the unexpected error.
throw new RuntimeException(t);
}
}, CameraXExecutors.directExecutor());
} else {
Log.e(TAG, "Failed to set zoom ratio");
}
Rect sensorSize;
try {
sensorSize = getSensorSize(getActiveCamera());
if (sensorSize == null) {
Log.e(TAG, "Failed to get the sensor size.");
return;
}
} catch (Exception e) {
Log.e(TAG, "Failed to get the sensor size.", e);
return;
}
float minZoom = getMinZoomLevel();
float maxZoom = getMaxZoomLevel();
if (this.mZoomLevel < minZoom) {
Log.e(TAG, "Requested zoom level is less than minimum zoom level.");
}
if (this.mZoomLevel > maxZoom) {
Log.e(TAG, "Requested zoom level is greater than maximum zoom level.");
}
this.mZoomLevel = Math.max(minZoom, Math.min(maxZoom, this.mZoomLevel));
float zoomScaleFactor =
(maxZoom == minZoom) ? minZoom : (this.mZoomLevel - minZoom) / (maxZoom - minZoom);
int minWidth = Math.round(sensorSize.width() / maxZoom);
int minHeight = Math.round(sensorSize.height() / maxZoom);
int diffWidth = sensorSize.width() - minWidth;
int diffHeight = sensorSize.height() - minHeight;
float cropWidth = diffWidth * zoomScaleFactor;
float cropHeight = diffHeight * zoomScaleFactor;
Rect cropRegion =
new Rect(
/*left=*/ (int) Math.ceil(cropWidth / 2 - 0.5f),
/*top=*/ (int) Math.ceil(cropHeight / 2 - 0.5f),
/*right=*/ (int) Math.floor(sensorSize.width() - cropWidth / 2 + 0.5f),
/*bottom=*/ (int) Math.floor(sensorSize.height() - cropHeight / 2 + 0.5f));
if (cropRegion.width() < 50 || cropRegion.height() < 50) {
Log.e(TAG, "Crop region is too small to compute 3A stats, so ignoring further zoom.");
return;
}
this.mCropRegion = cropRegion;
mPreview.zoom(cropRegion);
}
public float getMinZoomLevel() {
return UNITY_ZOOM_SCALE;
public float getMinZoomRatio() {
if (mCamera != null) {
return mCamera.getCameraInfo().getZoomState().getValue().getMinZoomRatio();
} else {
return UNITY_ZOOM_SCALE;
}
}
public float getMaxZoomLevel() {
try {
CameraCharacteristics characteristics =
mCameraManager.getCameraCharacteristics(getActiveCamera());
Float maxZoom =
characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
if (maxZoom == null) {
return ZOOM_NOT_SUPPORTED;
}
if (maxZoom == ZOOM_NOT_SUPPORTED) {
return ZOOM_NOT_SUPPORTED;
}
return maxZoom;
} catch (Exception e) {
Log.e(TAG, "Failed to get SCALER_AVAILABLE_MAX_DIGITAL_ZOOM.", e);
public float getMaxZoomRatio() {
if (mCamera != null) {
return mCamera.getCameraInfo().getZoomState().getValue().getMaxZoomRatio();
} else {
return ZOOM_NOT_SUPPORTED;
}
return ZOOM_NOT_SUPPORTED;
}
public boolean isZoomSupported() {
return getMaxZoomLevel() != ZOOM_NOT_SUPPORTED;
return getMaxZoomRatio() != ZOOM_NOT_SUPPORTED;
}
// TODO(b/124269166): Rethink how we can handle permissions here.
@ -559,80 +489,47 @@ final class CameraXModule {
int getRelativeCameraOrientation(boolean compensateForMirroring) {
int rotationDegrees = 0;
try {
CameraInfo cameraInfo = CameraX.getCameraInfo(getLensFacing());
rotationDegrees = cameraInfo.getSensorRotationDegrees(getDisplaySurfaceRotation());
if (mCamera != null) {
rotationDegrees =
mCamera.getCameraInfo().getSensorRotationDegrees(getDisplaySurfaceRotation());
if (compensateForMirroring) {
rotationDegrees = (360 - rotationDegrees) % 360;
}
} catch (CameraInfoUnavailableException e) {
Log.e(TAG, "Failed to get CameraInfo", e);
} catch (Exception e) {
Log.e(TAG, "Failed to query camera", e);
}
return rotationDegrees;
}
public void invalidateView() {
transformPreview();
updateViewInfo();
}
void clearCurrentLifecycle() {
if (mCurrentLifecycle != null) {
if (mCurrentLifecycle != null && mCameraProvider != null) {
// Remove previous use cases
// Begin Signal Custom Code Block
CameraX.unbind(mImageCapture, mPreview);
if (mVideoCapture != null) {
CameraX.unbind(mVideoCapture);
List<UseCase> toUnbind = new ArrayList<>();
if (mImageCapture != null && mCameraProvider.isBound(mImageCapture)) {
toUnbind.add(mImageCapture);
}
if (mVideoCapture != null && mCameraProvider.isBound(mVideoCapture)) {
toUnbind.add(mVideoCapture);
}
if (mPreview != null && mCameraProvider.isBound(mPreview)) {
toUnbind.add(mPreview);
}
// End Signal Custom Code Block
}
if (!toUnbind.isEmpty()) {
mCameraProvider.unbind(toUnbind.toArray((new UseCase[0])));
}
}
mCamera = null;
mCurrentLifecycle = null;
}
private Rect getSensorSize(String cameraId) throws CameraAccessException {
CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(cameraId);
return characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
}
String getActiveCamera() throws CameraInfoUnavailableException {
return CameraX.getCameraWithLensFacing(mCameraLensFacing);
}
@UiThread
private void transformPreview() {
int previewWidth = getPreviewWidth();
int previewHeight = getPreviewHeight();
int displayOrientation = getDisplayRotationDegrees();
Matrix matrix = new Matrix();
// Apply rotation of the display
int rotation = -displayOrientation;
int px = (int) Math.round(previewWidth / 2d);
int py = (int) Math.round(previewHeight / 2d);
matrix.postRotate(rotation, px, py);
if (displayOrientation == 90 || displayOrientation == 270) {
// Swap width and height
float xScale = previewWidth / (float) previewHeight;
float yScale = previewHeight / (float) previewWidth;
matrix.postScale(xScale, yScale, px, py);
}
setTransform(matrix);
}
// Update view related information used in use cases
private void updateViewInfo() {
if (mImageCapture != null) {
mImageCapture.setTargetAspectRatioCustom(new Rational(getWidth(), getHeight()));
mImageCapture.setCropAspectRatio(new Rational(getWidth(), getHeight()));
mImageCapture.setTargetRotation(getDisplaySurfaceRotation());
}
@ -642,29 +539,46 @@ final class CameraXModule {
}
@RequiresPermission(permission.CAMERA)
private Set<CameraX.LensFacing> getAvailableCameraLensFacing() {
private Set<Integer> getAvailableCameraLensFacing() {
// Start with all camera directions
Set<CameraX.LensFacing> available = new LinkedHashSet<>(Arrays.asList(CameraX.LensFacing.values()));
Set<Integer> available = new LinkedHashSet<>(Arrays.asList(LensFacingConverter.values()));
// If we're bound to a lifecycle, remove unavailable cameras
if (mCurrentLifecycle != null) {
if (!hasCameraWithLensFacing(CameraX.LensFacing.BACK)) {
available.remove(CameraX.LensFacing.BACK);
if (!hasCameraWithLensFacing(CameraSelector.LENS_FACING_BACK)) {
available.remove(CameraSelector.LENS_FACING_BACK);
}
if (!hasCameraWithLensFacing(CameraX.LensFacing.FRONT)) {
available.remove(CameraX.LensFacing.FRONT);
if (!hasCameraWithLensFacing(CameraSelector.LENS_FACING_FRONT)) {
available.remove(CameraSelector.LENS_FACING_FRONT);
}
}
return available;
}
public FlashMode getFlash() {
@ImageCapture.FlashMode
public int getFlash() {
return mFlash;
}
public void setFlash(FlashMode flash) {
// Begin Signal Custom Code Block
public boolean hasFlash() {
if (mImageCapture == null) {
return false;
}
CameraInternal camera = mImageCapture.getBoundCamera();
if (camera == null) {
return false;
}
return camera.getCameraInfoInternal().hasFlashUnit();
}
// End Signal Custom Code Block
public void setFlash(@ImageCapture.FlashMode int flash) {
this.mFlash = flash;
if (mImageCapture == null) {
@ -676,101 +590,69 @@ final class CameraXModule {
}
public void enableTorch(boolean torch) {
if (mPreview == null) {
if (mCamera == null) {
return;
}
mPreview.enableTorch(torch);
ListenableFuture<Void> future = mCamera.getCameraControl().enableTorch(torch);
Futures.addCallback(future, new FutureCallback<Void>() {
@Override
public void onSuccess(@Nullable Void result) {
}
@Override
public void onFailure(Throwable t) {
// Throw the unexpected error.
throw new RuntimeException(t);
}
}, CameraXExecutors.directExecutor());
}
public boolean isTorchOn() {
if (mPreview == null) {
if (mCamera == null) {
return false;
}
return mPreview.isTorchOn();
return mCamera.getCameraInfo().getTorchState().getValue() == TorchState.ON;
}
public Context getContext() {
return mCameraView.getContext();
return mCameraXView.getContext();
}
public int getWidth() {
return mCameraView.getWidth();
return mCameraXView.getWidth();
}
public int getHeight() {
return mCameraView.getHeight();
return mCameraXView.getHeight();
}
public int getDisplayRotationDegrees() {
return CameraOrientationUtil.surfaceRotationToDegrees(getDisplaySurfaceRotation());
}
// Begin Signal Custom Code Block
public boolean hasFlash() {
try {
LiveData<Boolean> isFlashAvailable = CameraX.getCameraInfo(getLensFacing()).isFlashAvailable();
return isFlashAvailable.getValue() == Boolean.TRUE;
} catch (CameraInfoUnavailableException e) {
return false;
}
}
// End Signal Custom Code Block
protected int getDisplaySurfaceRotation() {
return mCameraView.getDisplaySurfaceRotation();
}
public void setSurfaceTexture(SurfaceTexture st) {
mCameraView.setSurfaceTexture(st);
}
private int getPreviewWidth() {
return mCameraView.getPreviewWidth();
}
private int getPreviewHeight() {
return mCameraView.getPreviewHeight();
return mCameraXView.getDisplaySurfaceRotation();
}
private int getMeasuredWidth() {
return mCameraView.getMeasuredWidth();
return mCameraXView.getMeasuredWidth();
}
private int getMeasuredHeight() {
return mCameraView.getMeasuredHeight();
return mCameraXView.getMeasuredHeight();
}
void setTransform(final Matrix matrix) {
if (Looper.myLooper() != Looper.getMainLooper()) {
mCameraView.post(
new Runnable() {
@Override
public void run() {
setTransform(matrix);
}
});
} else {
mCameraView.setTransform(matrix);
}
}
/**
* Notify the view that the source dimensions have changed.
*
* <p>This will allow the view to layout the preview to display the correct aspect ratio.
*
* @param width width of camera source buffers.
* @param height height of camera source buffers.
*/
void onPreviewSourceDimensUpdated(int width, int height) {
mCameraView.onPreviewSourceDimensUpdated(width, height);
@Nullable
public Camera getCamera() {
return mCamera;
}
@NonNull
public CameraXView.CaptureMode getCaptureMode() {
return mCaptureMode;
}
public void setCaptureMode(CameraXView.CaptureMode captureMode) {
public void setCaptureMode(@NonNull CameraXView.CaptureMode captureMode) {
this.mCaptureMode = captureMode;
rebindToLifecycle();
}

View File

@ -1,5 +1,6 @@
package org.thoughtcrime.securesms.mediasend.camerax;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Bitmap;
@ -19,14 +20,13 @@ import android.util.Size;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.camera.camera2.impl.compat.CameraManagerCompat;
import androidx.camera.core.CameraX;
import androidx.camera.camera2.internal.compat.CameraManagerCompat;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageProxy;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.mediasend.LegacyCameraModels;
import org.thoughtcrime.securesms.migrations.LegacyMigrationJob;
import org.thoughtcrime.securesms.util.Stopwatch;
import java.io.ByteArrayOutputStream;
@ -57,11 +57,12 @@ public class CameraXUtil {
@SuppressWarnings("SuspiciousNameCombination")
@RequiresApi(21)
public static ImageResult toJpeg(@NonNull ImageProxy image, int rotation, boolean flip) throws IOException {
public static ImageResult toJpeg(@NonNull ImageProxy image, boolean flip) throws IOException {
ImageProxy.PlaneProxy[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
Rect cropRect = shouldCropImage(image) ? image.getCropRect() : null;
byte[] data = new byte[buffer.capacity()];
int rotation = image.getImageInfo().getRotationDegrees();
buffer.get(data);
@ -86,25 +87,25 @@ public class CameraXUtil {
return Build.VERSION.SDK_INT >= 21 && !LegacyCameraModels.isLegacyCameraModel();
}
public static int toCameraDirectionInt(@Nullable CameraX.LensFacing facing) {
if (facing == CameraX.LensFacing.FRONT) {
public static int toCameraDirectionInt(int facing) {
if (facing == CameraSelector.LENS_FACING_FRONT) {
return Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
return Camera.CameraInfo.CAMERA_FACING_BACK;
}
}
public static @NonNull CameraX.LensFacing toLensFacing(int cameraDirectionInt) {
public static int toLensFacing(@CameraSelector.LensFacing int cameraDirectionInt) {
if (cameraDirectionInt == Camera.CameraInfo.CAMERA_FACING_FRONT) {
return CameraX.LensFacing.FRONT;
return CameraSelector.LENS_FACING_FRONT;
} else {
return CameraX.LensFacing.BACK;
return CameraSelector.LENS_FACING_BACK;
}
}
public static @NonNull ImageCapture.CaptureMode getOptimalCaptureMode() {
return FastCameraModels.contains(Build.MODEL) ? ImageCapture.CaptureMode.MAX_QUALITY
: ImageCapture.CaptureMode.MIN_LATENCY;
public static @NonNull @ImageCapture.CaptureMode int getOptimalCaptureMode() {
return FastCameraModels.contains(Build.MODEL) ? ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY
: ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY;
}
public static int getIdealResolution(int displayWidth, int displayHeight) {
@ -186,7 +187,7 @@ public class CameraXUtil {
@RequiresApi(21)
public static int getLowestSupportedHardwareLevel(@NonNull Context context) {
CameraManager cameraManager = CameraManagerCompat.from(context).unwrap();
@SuppressLint("RestrictedApi") CameraManager cameraManager = CameraManagerCompat.from(context).unwrap();
try {
int supported = maxHardwareLevel();

View File

@ -20,10 +20,6 @@ import android.Manifest.permission;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.display.DisplayManager;
import android.hardware.display.DisplayManager.DisplayListener;
import android.os.Bundle;
@ -33,17 +29,14 @@ import android.os.Parcelable;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.util.Log;
import android.util.Size;
import android.view.Display;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.animation.BaseInterpolator;
import android.view.animation.DecelerateInterpolator;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
@ -51,33 +44,42 @@ import androidx.annotation.RequiresApi;
import androidx.annotation.RequiresPermission;
import androidx.annotation.RestrictTo;
import androidx.annotation.RestrictTo.Scope;
import androidx.annotation.UiThread;
import androidx.camera.core.CameraInfoUnavailableException;
import androidx.camera.core.CameraX;
import androidx.camera.core.FlashMode;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.DisplayOrientedMeteringPointFactory;
import androidx.camera.core.FocusMeteringAction;
import androidx.camera.core.FocusMeteringResult;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCapture.OnImageCapturedCallback;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.MeteringPoint;
import androidx.camera.core.impl.LensFacingConverter;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import androidx.camera.core.impl.utils.futures.FutureCallback;
import androidx.camera.core.impl.utils.futures.Futures;
import androidx.lifecycle.LifecycleOwner;
import com.google.common.util.concurrent.ListenableFuture;
import org.thoughtcrime.securesms.R;
import java.io.File;
import java.io.FileDescriptor;
import java.util.concurrent.Executor;
/**
* A {@link View} that displays a preview of the camera with methods {@link
* #takePicture(Executor, OnImageCapturedListener)},
* {@link #takePicture(File, Executor, OnImageSavedListener)},
* {@link #startRecording(File, Executor, OnVideoSavedListener)} and {@link #stopRecording()}.
* #takePicture(Executor, OnImageCapturedCallback)},
* {@link #startRecording(FileDescriptor, Executor, VideoCapture.OnVideoSavedCallback)} and {@link #stopRecording()}.
*
* <p>Because the Camera is a limited resource and consumes a high amount of power, CameraView must
* be opened/closed. CameraView will handle opening/closing automatically through use of a {@link
* LifecycleOwner}. Use {@link #bindToLifecycle(LifecycleOwner)} to start the camera.
*/
// Begin Signal Custom Code Block
@RequiresApi(21)
public final class CameraXView extends ViewGroup {
@SuppressLint("RestrictedApi")
// End Signal Custom Code Block
public final class CameraXView extends FrameLayout {
static final String TAG = CameraXView.class.getSimpleName();
static final boolean DEBUG = false;
@ -85,7 +87,7 @@ public final class CameraXView extends ViewGroup {
static final int INDEFINITE_VIDEO_SIZE = -1;
private static final String EXTRA_SUPER = "super";
private static final String EXTRA_ZOOM_LEVEL = "zoom_level";
private static final String EXTRA_ZOOM_RATIO = "zoom_ratio";
private static final String EXTRA_PINCH_TO_ZOOM_ENABLED = "pinch_to_zoom_enabled";
private static final String EXTRA_FLASH = "flash";
private static final String EXTRA_MAX_VIDEO_DURATION = "max_video_duration";
@ -121,51 +123,31 @@ public final class CameraXView extends ViewGroup {
mCameraModule.invalidateView();
}
};
private TextureView mCameraTextureView;
private Size mPreviewSrcSize = new Size(0, 0);
private PreviewView mPreviewView;
private ScaleType mScaleType = ScaleType.CENTER_CROP;
// For accessibility event
private MotionEvent mUpEvent;
private @Nullable Paint mLayerPaint;
public CameraXView(Context context) {
public CameraXView(@NonNull Context context) {
this(context, null);
}
public CameraXView(Context context, AttributeSet attrs) {
public CameraXView(@NonNull Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public CameraXView(Context context, AttributeSet attrs, int defStyle) {
public CameraXView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context, attrs);
}
@RequiresApi(21)
public CameraXView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
public CameraXView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr,
int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init(context, attrs);
}
/** Debug logging that can be enabled. */
private static void log(String msg) {
if (DEBUG) {
Log.i(TAG, msg);
}
}
/** Utility method for converting an displayRotation int into a human readable string. */
private static String displayRotationToString(int displayRotation) {
if (displayRotation == Surface.ROTATION_0 || displayRotation == Surface.ROTATION_180) {
return "Portrait-" + (displayRotation * 90);
} else if (displayRotation == Surface.ROTATION_90
|| displayRotation == Surface.ROTATION_270) {
return "Landscape-" + (displayRotation * 90);
} else {
return "Unknown";
}
}
/**
* Binds control of the camera used by this view to the given lifecycle.
*
@ -184,21 +166,16 @@ public final class CameraXView extends ViewGroup {
* @throws IllegalStateException if camera permissions are not granted.
*/
@RequiresPermission(permission.CAMERA)
public void bindToLifecycle(LifecycleOwner lifecycleOwner) {
public void bindToLifecycle(@NonNull LifecycleOwner lifecycleOwner) {
mCameraModule.bindToLifecycle(lifecycleOwner);
}
private void init(Context context, @Nullable AttributeSet attrs) {
addView(mCameraTextureView = new TextureView(getContext()), 0 /* view position */);
mCameraTextureView.setLayerPaint(mLayerPaint);
addView(mPreviewView = new PreviewView(getContext()), 0 /* view position */);
mCameraModule = new CameraXModule(this);
if (isInEditMode()) {
onPreviewSourceDimensUpdated(640, 480);
}
if (attrs != null) {
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CameraView);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CameraXView);
setScaleType(
ScaleType.fromId(
a.getInteger(R.styleable.CameraXView_scaleType,
@ -217,10 +194,10 @@ public final class CameraXView extends ViewGroup {
setCameraLensFacing(null);
break;
case LENS_FACING_FRONT:
setCameraLensFacing(CameraX.LensFacing.FRONT);
setCameraLensFacing(CameraSelector.LENS_FACING_FRONT);
break;
case LENS_FACING_BACK:
setCameraLensFacing(CameraX.LensFacing.BACK);
setCameraLensFacing(CameraSelector.LENS_FACING_BACK);
break;
default:
// Unhandled event.
@ -229,13 +206,13 @@ public final class CameraXView extends ViewGroup {
int flashMode = a.getInt(R.styleable.CameraXView_flash, 0);
switch (flashMode) {
case FLASH_MODE_AUTO:
setFlash(FlashMode.AUTO);
setFlash(ImageCapture.FLASH_MODE_AUTO);
break;
case FLASH_MODE_ON:
setFlash(FlashMode.ON);
setFlash(ImageCapture.FLASH_MODE_ON);
break;
case FLASH_MODE_OFF:
setFlash(FlashMode.OFF);
setFlash(ImageCapture.FLASH_MODE_OFF);
break;
default:
// Unhandled event.
@ -252,12 +229,14 @@ public final class CameraXView extends ViewGroup {
}
@Override
@NonNull
protected LayoutParams generateDefaultLayoutParams() {
return new LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
}
@Override
@NonNull
protected Parcelable onSaveInstanceState() {
// TODO(b/113884082): Decide what belongs here or what should be invalidated on
// configuration
@ -265,20 +244,21 @@ public final class CameraXView extends ViewGroup {
Bundle state = new Bundle();
state.putParcelable(EXTRA_SUPER, super.onSaveInstanceState());
state.putInt(EXTRA_SCALE_TYPE, getScaleType().getId());
state.putFloat(EXTRA_ZOOM_LEVEL, getZoomLevel());
state.putFloat(EXTRA_ZOOM_RATIO, getZoomRatio());
state.putBoolean(EXTRA_PINCH_TO_ZOOM_ENABLED, isPinchToZoomEnabled());
state.putString(EXTRA_FLASH, getFlash().name());
state.putString(EXTRA_FLASH, FlashModeConverter.nameOf(getFlash()));
state.putLong(EXTRA_MAX_VIDEO_DURATION, getMaxVideoDuration());
state.putLong(EXTRA_MAX_VIDEO_SIZE, getMaxVideoSize());
if (getCameraLensFacing() != null) {
state.putString(EXTRA_CAMERA_DIRECTION, getCameraLensFacing().name());
state.putString(EXTRA_CAMERA_DIRECTION,
LensFacingConverter.nameOf(getCameraLensFacing()));
}
state.putInt(EXTRA_CAPTURE_MODE, getCaptureMode().getId());
return state;
}
@Override
protected void onRestoreInstanceState(Parcelable savedState) {
protected void onRestoreInstanceState(@Nullable Parcelable savedState) {
// TODO(b/113884082): Decide what belongs here or what should be invalidated on
// configuration
// change
@ -286,39 +266,22 @@ public final class CameraXView extends ViewGroup {
Bundle state = (Bundle) savedState;
super.onRestoreInstanceState(state.getParcelable(EXTRA_SUPER));
setScaleType(ScaleType.fromId(state.getInt(EXTRA_SCALE_TYPE)));
setZoomLevel(state.getFloat(EXTRA_ZOOM_LEVEL));
setZoomRatio(state.getFloat(EXTRA_ZOOM_RATIO));
setPinchToZoomEnabled(state.getBoolean(EXTRA_PINCH_TO_ZOOM_ENABLED));
setFlash(FlashMode.valueOf(state.getString(EXTRA_FLASH)));
setFlash(FlashModeConverter.valueOf(state.getString(EXTRA_FLASH)));
setMaxVideoDuration(state.getLong(EXTRA_MAX_VIDEO_DURATION));
setMaxVideoSize(state.getLong(EXTRA_MAX_VIDEO_SIZE));
String lensFacingString = state.getString(EXTRA_CAMERA_DIRECTION);
setCameraLensFacing(
TextUtils.isEmpty(lensFacingString)
? null
: CameraX.LensFacing.valueOf(lensFacingString));
: LensFacingConverter.valueOf(lensFacingString));
setCaptureMode(CaptureMode.fromId(state.getInt(EXTRA_CAPTURE_MODE)));
} else {
super.onRestoreInstanceState(savedState);
}
}
/**
* Sets the paint on the preview.
*
* <p>This only affects the preview, and does not affect captured images/video.
*
* @param paint The paint object to apply to the preview.
* @hide This may not work once {@link android.view.SurfaceView} is supported along with {@link
* TextureView}.
*/
@Override
@RestrictTo(Scope.LIBRARY_GROUP)
public void setLayerPaint(@Nullable Paint paint) {
super.setLayerPaint(paint);
mLayerPaint = paint;
mCameraTextureView.setLayerPaint(paint);
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
@ -335,33 +298,21 @@ public final class CameraXView extends ViewGroup {
dpyMgr.unregisterDisplayListener(mDisplayListener);
}
PreviewView getPreviewView() {
return mPreviewView;
}
// TODO(b/124269166): Rethink how we can handle permissions here.
@SuppressLint("MissingPermission")
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int viewWidth = MeasureSpec.getSize(widthMeasureSpec);
int viewHeight = MeasureSpec.getSize(heightMeasureSpec);
int displayRotation = getDisplay().getRotation();
if (mPreviewSrcSize.getHeight() == 0 || mPreviewSrcSize.getWidth() == 0) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
mCameraTextureView.measure(viewWidth, viewHeight);
} else {
Size scaled =
calculatePreviewViewDimens(
mPreviewSrcSize, viewWidth, viewHeight, displayRotation, mScaleType);
super.setMeasuredDimension(
Math.min(scaled.getWidth(), viewWidth),
Math.min(scaled.getHeight(), viewHeight));
mCameraTextureView.measure(scaled.getWidth(), scaled.getHeight());
}
// Since bindToLifecycle will depend on the measured dimension, only call it when measured
// dimension is not 0x0
if (getMeasuredWidth() > 0 && getMeasuredHeight() > 0) {
mCameraModule.bindToLifecycleAfterViewMeasured();
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
// TODO(b/124269166): Rethink how we can handle permissions here.
@ -372,114 +323,8 @@ public final class CameraXView extends ViewGroup {
// binding to lifecycle
mCameraModule.bindToLifecycleAfterViewMeasured();
// If we don't know the src buffer size yet, set the preview to be the parent size
if (mPreviewSrcSize.getWidth() == 0 || mPreviewSrcSize.getHeight() == 0) {
mCameraTextureView.layout(left, top, right, bottom);
return;
}
// Compute the preview ui size based on the available width, height, and ui orientation.
int viewWidth = (right - left);
int viewHeight = (bottom - top);
int displayRotation = getDisplay().getRotation();
Size scaled =
calculatePreviewViewDimens(
mPreviewSrcSize, viewWidth, viewHeight, displayRotation, mScaleType);
// Compute the center of the view.
int centerX = viewWidth / 2;
int centerY = viewHeight / 2;
// Compute the left / top / right / bottom values such that preview is centered.
int layoutL = centerX - (scaled.getWidth() / 2);
int layoutT = centerY - (scaled.getHeight() / 2);
int layoutR = layoutL + scaled.getWidth();
int layoutB = layoutT + scaled.getHeight();
// Layout debugging
log("layout: viewWidth: " + viewWidth);
log("layout: viewHeight: " + viewHeight);
log("layout: viewRatio: " + (viewWidth / (float) viewHeight));
log("layout: sizeWidth: " + mPreviewSrcSize.getWidth());
log("layout: sizeHeight: " + mPreviewSrcSize.getHeight());
log(
"layout: sizeRatio: "
+ (mPreviewSrcSize.getWidth() / (float) mPreviewSrcSize.getHeight()));
log("layout: scaledWidth: " + scaled.getWidth());
log("layout: scaledHeight: " + scaled.getHeight());
log("layout: scaledRatio: " + (scaled.getWidth() / (float) scaled.getHeight()));
log(
"layout: size: "
+ scaled
+ " ("
+ (scaled.getWidth() / (float) scaled.getHeight())
+ " - "
+ mScaleType
+ "-"
+ displayRotationToString(displayRotation)
+ ")");
log("layout: final " + layoutL + ", " + layoutT + ", " + layoutR + ", " + layoutB);
mCameraTextureView.layout(layoutL, layoutT, layoutR, layoutB);
mCameraModule.invalidateView();
}
/** Records the size of the preview's buffers. */
@UiThread
void onPreviewSourceDimensUpdated(int srcWidth, int srcHeight) {
if (srcWidth != mPreviewSrcSize.getWidth()
|| srcHeight != mPreviewSrcSize.getHeight()) {
mPreviewSrcSize = new Size(srcWidth, srcHeight);
requestLayout();
}
}
private Size calculatePreviewViewDimens(
Size srcSize,
int parentWidth,
int parentHeight,
int displayRotation,
ScaleType scaleType) {
int inWidth = srcSize.getWidth();
int inHeight = srcSize.getHeight();
if (displayRotation == Surface.ROTATION_90 || displayRotation == Surface.ROTATION_270) {
// Need to reverse the width and height since we're in landscape orientation.
inWidth = srcSize.getHeight();
inHeight = srcSize.getWidth();
}
int outWidth = parentWidth;
int outHeight = parentHeight;
if (inWidth != 0 && inHeight != 0) {
float vfRatio = inWidth / (float) inHeight;
float parentRatio = parentWidth / (float) parentHeight;
switch (scaleType) {
case CENTER_INSIDE:
// Match longest sides together.
if (vfRatio > parentRatio) {
outWidth = parentWidth;
outHeight = Math.round(parentWidth / vfRatio);
} else {
outWidth = Math.round(parentHeight * vfRatio);
outHeight = parentHeight;
}
break;
case CENTER_CROP:
// Match shortest sides together.
if (vfRatio < parentRatio) {
outWidth = parentWidth;
outHeight = Math.round(parentWidth / vfRatio);
} else {
outWidth = Math.round(parentHeight * vfRatio);
outHeight = parentHeight;
}
break;
}
}
return new Size(outWidth, outHeight);
super.onLayout(changed, left, top, right, bottom);
}
/**
@ -499,58 +344,12 @@ public final class CameraXView extends ViewGroup {
return display.getRotation();
}
@UiThread
SurfaceTexture getSurfaceTexture() {
if (mCameraTextureView != null) {
return mCameraTextureView.getSurfaceTexture();
}
return null;
}
@UiThread
void setSurfaceTexture(SurfaceTexture surfaceTexture) {
if (mCameraTextureView.getSurfaceTexture() != surfaceTexture) {
if (mCameraTextureView.isAvailable()) {
// Remove the old TextureView to properly detach the old SurfaceTexture from the GL
// Context.
removeView(mCameraTextureView);
addView(mCameraTextureView = new TextureView(getContext()), 0);
mCameraTextureView.setLayerPaint(mLayerPaint);
requestLayout();
}
mCameraTextureView.setSurfaceTexture(surfaceTexture);
}
}
@UiThread
Matrix getTransform(Matrix matrix) {
return mCameraTextureView.getTransform(matrix);
}
@UiThread
int getPreviewWidth() {
return mCameraTextureView.getWidth();
}
@UiThread
int getPreviewHeight() {
return mCameraTextureView.getHeight();
}
@UiThread
void setTransform(final Matrix matrix) {
if (mCameraTextureView != null) {
mCameraTextureView.setTransform(matrix);
}
}
/**
* Returns the scale type used to scale the preview.
*
* @return The current {@link ScaleType}.
*/
@NonNull
public ScaleType getScaleType() {
return mScaleType;
}
@ -562,7 +361,7 @@ public final class CameraXView extends ViewGroup {
*
* @param scaleType The desired {@link ScaleType}.
*/
public void setScaleType(ScaleType scaleType) {
public void setScaleType(@NonNull ScaleType scaleType) {
if (scaleType != mScaleType) {
mScaleType = scaleType;
requestLayout();
@ -574,6 +373,7 @@ public final class CameraXView extends ViewGroup {
*
* @return The current {@link CaptureMode}.
*/
@NonNull
public CaptureMode getCaptureMode() {
return mCameraModule.getCaptureMode();
}
@ -585,7 +385,7 @@ public final class CameraXView extends ViewGroup {
*
* @param captureMode The desired {@link CaptureMode}.
*/
public void setCaptureMode(CaptureMode captureMode) {
public void setCaptureMode(@NonNull CaptureMode captureMode) {
mCameraModule.setCaptureMode(captureMode);
}
@ -601,7 +401,7 @@ public final class CameraXView extends ViewGroup {
}
/**
* Sets the maximum video duration before {@link OnVideoSavedListener#onVideoSaved(File)} is
* Sets the maximum video duration before {@link VideoCapture.OnVideoSavedCallback#onVideoSaved(FileDescriptor)} is
* called automatically. Use {@link #INDEFINITE_VIDEO_DURATION} to disable the timeout.
*/
private void setMaxVideoDuration(long duration) {
@ -617,7 +417,7 @@ public final class CameraXView extends ViewGroup {
}
/**
* Sets the maximum video size in bytes before {@link OnVideoSavedListener#onVideoSaved(File)}
* Sets the maximum video size in bytes before {@link VideoCapture.OnVideoSavedCallback#onVideoSaved(FileDescriptor)}
* is called automatically. Use {@link #INDEFINITE_VIDEO_SIZE} to disable the size restriction.
*/
private void setMaxVideoSize(long size) {
@ -625,44 +425,32 @@ public final class CameraXView extends ViewGroup {
}
/**
* Takes a picture, and calls {@link OnImageCapturedListener#onCaptureSuccess(ImageProxy, int)}
* Takes a picture, and calls {@link OnImageCapturedCallback#onCaptureSuccess(ImageProxy)}
* once when done.
*
* @param executor The executor in which the listener callback methods will be run.
* @param listener Listener which will receive success or failure callbacks.
* @param executor The executor in which the callback methods will be run.
* @param callback Callback which will receive success or failure callbacks.
*/
@SuppressLint("LambdaLast") // Maybe remove after https://issuetracker.google.com/135275901
public void takePicture(@NonNull Executor executor, @NonNull ImageCapture.OnImageCapturedListener listener) {
mCameraModule.takePicture(executor, listener);
public void takePicture(@NonNull Executor executor, @NonNull OnImageCapturedCallback callback) {
mCameraModule.takePicture(executor, callback);
}
/**
* Takes a picture and calls {@link OnImageSavedListener#onImageSaved(File)} when done.
* Takes a video and calls the OnVideoSavedCallback when done.
*
* @param file The destination.
* @param executor The executor in which the listener callback methods will be run.
* @param listener Listener which will receive success or failure callbacks.
*/
@SuppressLint("LambdaLast") // Maybe remove after https://issuetracker.google.com/135275901
public void takePicture(@NonNull File file, @NonNull Executor executor,
@NonNull ImageCapture.OnImageSavedListener listener) {
mCameraModule.takePicture(file, executor, listener);
}
/**
* Takes a video and calls the OnVideoSavedListener when done.
*
* @param file The destination.
* @param executor The executor in which the listener callback methods will be run.
* @param listener Listener which will receive success or failure callbacks.
* @param executor The executor in which the callback methods will be run.
* @param callback Callback which will receive success or failure.
*/
// Begin Signal Custom Code Block
@RequiresApi(26)
@SuppressLint("LambdaLast") // Maybe remove after https://issuetracker.google.com/135275901
public void startRecording(@NonNull FileDescriptor file, @NonNull Executor executor,
// End Signal Custom Code Block
@NonNull VideoCapture.OnVideoSavedListener listener) {
mCameraModule.startRecording(file, executor, listener);
public void startRecording(// Begin Signal Custom Code Block
@NonNull FileDescriptor file,
// End Signal Custom Code Block
@NonNull Executor executor,
@NonNull VideoCapture.OnVideoSavedCallback callback) {
mCameraModule.startRecording(file, executor, callback);
}
/** Stops an in progress video. */
@ -685,7 +473,7 @@ public final class CameraXView extends ViewGroup {
* @throws IllegalStateException if the CAMERA permission is not currently granted.
*/
@RequiresPermission(permission.CAMERA)
public boolean hasCameraWithLensFacing(CameraX.LensFacing lensFacing) {
public boolean hasCameraWithLensFacing(@CameraSelector.LensFacing int lensFacing) {
return mCameraModule.hasCameraWithLensFacing(lensFacing);
}
@ -706,7 +494,7 @@ public final class CameraXView extends ViewGroup {
*
* <p>If called before {@link #bindToLifecycle(LifecycleOwner)}, this will set the camera to be
* used when first bound to the lifecycle. If the specified lensFacing is not supported by the
* device, as determined by {@link #hasCameraWithLensFacing(LensFacing)}, the first supported
* device, as determined by {@link #hasCameraWithLensFacing(int)}, the first supported
* lensFacing will be chosen when {@link #bindToLifecycle(LifecycleOwner)} is called.
*
* <p>If called with {@code null} AFTER binding to the lifecycle, the behavior would be
@ -714,36 +502,33 @@ public final class CameraXView extends ViewGroup {
*
* @param lensFacing The desired camera lensFacing.
*/
public void setCameraLensFacing(@Nullable CameraX.LensFacing lensFacing) {
public void setCameraLensFacing(@Nullable Integer lensFacing) {
mCameraModule.setCameraLensFacing(lensFacing);
}
/** Returns the currently selected {@link LensFacing}. */
/** Returns the currently selected lensFacing. */
@Nullable
public CameraX.LensFacing getCameraLensFacing() {
public Integer getCameraLensFacing() {
return mCameraModule.getLensFacing();
}
/** Gets the active flash strategy. */
@ImageCapture.FlashMode
public int getFlash() {
return mCameraModule.getFlash();
}
// Begin Signal Custom Code Block
public boolean hasFlash() {
return mCameraModule.hasFlash();
}
// End Signal Custom Code Block
/** Gets the active flash strategy. */
public FlashMode getFlash() {
return mCameraModule.getFlash();
}
/** Sets the active flash strategy. */
public void setFlash(@NonNull FlashMode flashMode) {
public void setFlash(@ImageCapture.FlashMode int flashMode) {
mCameraModule.setFlash(flashMode);
}
private int getRelativeCameraOrientation(boolean compensateForMirroring) {
return mCameraModule.getRelativeCameraOrientation(compensateForMirroring);
}
private long delta() {
return System.currentTimeMillis() - mDownEventTimestamp;
}
@ -793,42 +578,47 @@ public final class CameraXView extends ViewGroup {
final float y = (mUpEvent != null) ? mUpEvent.getY() : getY() + getHeight() / 2f;
mUpEvent = null;
TextureViewMeteringPointFactory pointFactory = new TextureViewMeteringPointFactory(
mCameraTextureView);
CameraSelector cameraSelector =
new CameraSelector.Builder().requireLensFacing(
mCameraModule.getLensFacing()).build();
DisplayOrientedMeteringPointFactory pointFactory = new DisplayOrientedMeteringPointFactory(
getDisplay(), cameraSelector, mPreviewView.getWidth(), mPreviewView.getHeight());
float afPointWidth = 1.0f / 6.0f; // 1/6 total area
float aePointWidth = afPointWidth * 1.5f;
MeteringPoint afPoint = pointFactory.createPoint(x, y, afPointWidth, 1.0f);
MeteringPoint aePoint = pointFactory.createPoint(x, y, aePointWidth, 1.0f);
MeteringPoint afPoint = pointFactory.createPoint(x, y, afPointWidth);
MeteringPoint aePoint = pointFactory.createPoint(x, y, aePointWidth);
try {
CameraX.getCameraControl(getCameraLensFacing()).startFocusAndMetering(
FocusMeteringAction.Builder.from(afPoint, FocusMeteringAction.MeteringMode.AF_ONLY)
.addPoint(aePoint, FocusMeteringAction.MeteringMode.AE_ONLY)
.build());
} catch (CameraInfoUnavailableException e) {
Log.d(TAG, "cannot access camera", e);
Camera camera = mCameraModule.getCamera();
if (camera != null) {
ListenableFuture<FocusMeteringResult> future =
camera.getCameraControl().startFocusAndMetering(
new FocusMeteringAction.Builder(afPoint,
FocusMeteringAction.FLAG_AF).addPoint(aePoint,
FocusMeteringAction.FLAG_AE).build());
Futures.addCallback(future, new FutureCallback<FocusMeteringResult>() {
@Override
public void onSuccess(@Nullable FocusMeteringResult result) {
}
@Override
public void onFailure(Throwable t) {
// Throw the unexpected error.
throw new RuntimeException(t);
}
}, CameraXExecutors.directExecutor());
} else {
Log.d(TAG, "cannot access camera");
}
return true;
}
/** Returns the width * height of the given rect */
private int area(Rect rect) {
return rect.width() * rect.height();
}
private int rangeLimit(int val, int max, int min) {
return Math.min(Math.max(val, min), max);
}
float rangeLimit(float val, float max, float min) {
return Math.min(Math.max(val, min), max);
}
private int distance(int a, int b) {
return Math.abs(a - b);
}
/**
* Returns whether the view allows pinch-to-zoom.
*
@ -851,47 +641,47 @@ public final class CameraXView extends ViewGroup {
}
/**
* Returns the current zoom level.
* Returns the current zoom ratio.
*
* @return The current zoom level.
* @return The current zoom ratio.
*/
public float getZoomLevel() {
return mCameraModule.getZoomLevel();
public float getZoomRatio() {
return mCameraModule.getZoomRatio();
}
/**
* Sets the current zoom level.
* Sets the current zoom ratio.
*
* <p>Valid zoom values range from 1 to {@link #getMaxZoomLevel()}.
* <p>Valid zoom values range from {@link #getMinZoomRatio()} to {@link #getMaxZoomRatio()}.
*
* @param zoomLevel The requested zoom level.
* @param zoomRatio The requested zoom ratio.
*/
public void setZoomLevel(float zoomLevel) {
mCameraModule.setZoomLevel(zoomLevel);
public void setZoomRatio(float zoomRatio) {
mCameraModule.setZoomRatio(zoomRatio);
}
/**
* Returns the minimum zoom level.
* Returns the minimum zoom ratio.
*
* <p>For most cameras this should return a zoom level of 1. A zoom level of 1 corresponds to a
* <p>For most cameras this should return a zoom ratio of 1. A zoom ratio of 1 corresponds to a
* non-zoomed image.
*
* @return The minimum zoom level.
* @return The minimum zoom ratio.
*/
public float getMinZoomLevel() {
return mCameraModule.getMinZoomLevel();
public float getMinZoomRatio() {
return mCameraModule.getMinZoomRatio();
}
/**
* Returns the maximum zoom level.
* Returns the maximum zoom ratio.
*
* <p>The zoom level corresponds to the ratio between both the widths and heights of a
* <p>The zoom ratio corresponds to the ratio between both the widths and heights of a
* non-zoomed image and a maximally zoomed image for the selected camera.
*
* @return The maximum zoom level.
* @return The maximum zoom ratio.
*/
public float getMaxZoomLevel() {
return mCameraModule.getMaxZoomLevel();
public float getMaxZoomRatio() {
return mCameraModule.getMaxZoomRatio();
}
/**
@ -935,7 +725,7 @@ public final class CameraXView extends ViewGroup {
*/
CENTER_INSIDE(1);
private int mId;
private final int mId;
int getId() {
return mId;
@ -959,7 +749,7 @@ public final class CameraXView extends ViewGroup {
* The capture mode used by CameraView.
*
* <p>This enum can be used to determine which capture mode will be enabled for {@link
* CameraView}.
* CameraXView}.
*/
public enum CaptureMode {
/** A mode where image capture is enabled. */
@ -972,7 +762,7 @@ public final class CameraXView extends ViewGroup {
*/
MIXED(2);
private int mId;
private final int mId;
int getId() {
return mId;
@ -1007,10 +797,6 @@ public final class CameraXView extends ViewGroup {
private class PinchToZoomGestureDetector extends ScaleGestureDetector
implements ScaleGestureDetector.OnScaleGestureListener {
private static final float SCALE_MULTIPIER = 0.75f;
private final BaseInterpolator mInterpolator = new DecelerateInterpolator(2f);
private float mNormalizedScaleFactor = 0;
PinchToZoomGestureDetector(Context context) {
this(context, new S());
}
@ -1022,34 +808,23 @@ public final class CameraXView extends ViewGroup {
@Override
public boolean onScale(ScaleGestureDetector detector) {
mNormalizedScaleFactor += (detector.getScaleFactor() - 1f) * SCALE_MULTIPIER;
// Since the scale factor is normalized, it should always be in the range [0, 1]
mNormalizedScaleFactor = rangeLimit(mNormalizedScaleFactor, 1f, 0);
float scale = detector.getScaleFactor();
// Apply decelerate interpolation. This will cause the differences to seem less
// pronounced
// at higher zoom levels.
float transformedScale = mInterpolator.getInterpolation(mNormalizedScaleFactor);
// Speeding up the zoom by 2X.
if (scale > 1f) {
scale = 1.0f + (scale - 1.0f) * 2;
} else {
scale = 1.0f - (1.0f - scale) * 2;
}
// Transform back from normalized coordinates to the zoom scale
float zoomLevel =
(getMaxZoomLevel() == getMinZoomLevel())
? getMinZoomLevel()
: getMinZoomLevel()
+ transformedScale * (getMaxZoomLevel() - getMinZoomLevel());
setZoomLevel(rangeLimit(zoomLevel, getMaxZoomLevel(), getMinZoomLevel()));
float newRatio = getZoomRatio() * scale;
newRatio = rangeLimit(newRatio, getMaxZoomRatio(), getMinZoomRatio());
setZoomRatio(newRatio);
return true;
}
@Override
public boolean onScaleBegin(ScaleGestureDetector detector) {
float initialZoomLevel = getZoomLevel();
mNormalizedScaleFactor =
(getMaxZoomLevel() == getMinZoomLevel())
? 0
: (initialZoomLevel - getMinZoomLevel())
/ (getMaxZoomLevel() - getMinZoomLevel());
return true;
}

View File

@ -0,0 +1,78 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import static androidx.camera.core.ImageCapture.FLASH_MODE_AUTO;
import static androidx.camera.core.ImageCapture.FLASH_MODE_OFF;
import static androidx.camera.core.ImageCapture.FLASH_MODE_ON;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.ImageCapture.FlashMode;
/**
* Helper class that defines certain enum-like methods for {@link FlashMode}
*/
final class FlashModeConverter {
private FlashModeConverter() {
}
/**
* Returns the {@link FlashMode} constant for the specified name
*
* @param name The name of the {@link FlashMode} to return
* @return The {@link FlashMode} constant for the specified name
*/
@FlashMode
public static int valueOf(@Nullable final String name) {
if (name == null) {
throw new NullPointerException("name cannot be null");
}
switch (name) {
case "AUTO":
return FLASH_MODE_AUTO;
case "ON":
return FLASH_MODE_ON;
case "OFF":
return FLASH_MODE_OFF;
default:
throw new IllegalArgumentException("Unknown flash mode name " + name);
}
}
/**
* Returns the name of the {@link FlashMode} constant, exactly as it is declared.
*
* @param flashMode A {@link FlashMode} constant
* @return The name of the {@link FlashMode} constant.
*/
@NonNull
public static String nameOf(@FlashMode final int flashMode) {
switch (flashMode) {
case FLASH_MODE_AUTO:
return "AUTO";
case FLASH_MODE_ON:
return "ON";
case FLASH_MODE_OFF:
return "OFF";
default:
throw new IllegalArgumentException("Unknown flash mode " + flashMode);
}
}
}

View File

@ -0,0 +1,273 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.content.Context;
import android.content.res.TypedArray;
import android.hardware.display.DisplayManager;
import android.os.Build;
import android.util.AttributeSet;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.camera.core.Preview;
import org.thoughtcrime.securesms.R;
import java.util.concurrent.Executor;
/**
* Custom View that displays camera feed for CameraX's Preview use case.
*
* <p> This class manages the Surface lifecycle, as well as the preview aspect ratio and
* orientation. Internally, it uses either a {@link android.view.TextureView} or
* {@link android.view.SurfaceView} to display the camera feed.
*/
// Begin Signal Custom Code Block
@RequiresApi(21)
// End Signal Custom Code Block
public class PreviewView extends FrameLayout {
@SuppressWarnings("WeakerAccess") /* synthetic accessor */
Implementation mImplementation;
private ImplementationMode mImplementationMode;
private final DisplayManager.DisplayListener mDisplayListener =
new DisplayManager.DisplayListener() {
@Override
public void onDisplayAdded(int displayId) {
}
@Override
public void onDisplayRemoved(int displayId) {
}
@Override
public void onDisplayChanged(int displayId) {
mImplementation.onDisplayChanged();
}
};
public PreviewView(@NonNull Context context) {
this(context, null);
}
public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
this(context, attrs, defStyleAttr, 0);
}
public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr,
int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
final TypedArray attributes = context.getTheme().obtainStyledAttributes(attrs,
R.styleable.PreviewView, defStyleAttr, defStyleRes);
try {
final int implementationModeId = attributes.getInteger(
R.styleable.PreviewView_implementationMode,
ImplementationMode.TEXTURE_VIEW.getId());
mImplementationMode = ImplementationMode.fromId(implementationModeId);
} finally {
attributes.recycle();
}
setUp();
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
final DisplayManager displayManager =
(DisplayManager) getContext().getSystemService(Context.DISPLAY_SERVICE);
if (displayManager != null) {
displayManager.registerDisplayListener(mDisplayListener, getHandler());
}
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
final DisplayManager displayManager =
(DisplayManager) getContext().getSystemService(Context.DISPLAY_SERVICE);
if (displayManager != null) {
displayManager.unregisterDisplayListener(mDisplayListener);
}
}
private void setUp() {
removeAllViews();
switch (mImplementationMode) {
case SURFACE_VIEW:
mImplementation = new SurfaceViewImplementation();
break;
case TEXTURE_VIEW:
mImplementation = new TextureViewImplementation();
break;
default:
throw new IllegalStateException(
"Unsupported implementation mode " + mImplementationMode);
}
mImplementation.init(this);
}
/**
* Specifies the {@link ImplementationMode} to use for the preview.
*
* @param implementationMode <code>SURFACE_VIEW</code> if a {@link android.view.SurfaceView}
* should be used to display the camera feed, or
* <code>TEXTURE_VIEW</code> to use a {@link android.view.TextureView}
*/
public void setImplementationMode(@NonNull final ImplementationMode implementationMode) {
mImplementationMode = implementationMode;
setUp();
}
/**
* Returns the implementation mode of the {@link PreviewView}.
*
* @return <code>SURFACE_VIEW</code> if the {@link PreviewView} is internally using a
* {@link android.view.SurfaceView} to display the camera feed, or <code>TEXTURE_VIEW</code>
* if a {@link android.view.TextureView} is being used.
*/
@NonNull
public ImplementationMode getImplementationMode() {
return mImplementationMode;
}
/**
* Gets the {@link Preview.SurfaceProvider} to be used with
* {@link Preview#setSurfaceProvider(Executor, Preview.SurfaceProvider)}.
*/
@NonNull
public Preview.SurfaceProvider getPreviewSurfaceProvider() {
return mImplementation.getSurfaceProvider();
}
/**
* Implements this interface to create PreviewView implementation.
*/
interface Implementation {
/**
* Initializes the parent view with sub views.
*
* @param parent the containing parent {@link FrameLayout}.
*/
void init(@NonNull FrameLayout parent);
/**
* Gets the {@link Preview.SurfaceProvider} to be used with {@link Preview}.
*/
@NonNull
Preview.SurfaceProvider getSurfaceProvider();
/**
* Notifies that the display properties have changed.
*
* <p>Implementation might need to adjust transform by latest display properties such as
* display orientation in order to show the preview correctly.
*/
void onDisplayChanged();
}
/**
* The implementation mode of a {@link PreviewView}
*
* <p>Specifies how the Preview surface will be implemented internally: Using a
* {@link android.view.SurfaceView} or a {@link android.view.TextureView} (which is the default)
* </p>
*/
public enum ImplementationMode {
/** Use a {@link android.view.SurfaceView} for the preview */
SURFACE_VIEW(0),
/** Use a {@link android.view.TextureView} for the preview */
TEXTURE_VIEW(1);
private final int mId;
ImplementationMode(final int id) {
mId = id;
}
public int getId() {
return mId;
}
static ImplementationMode fromId(final int id) {
for (final ImplementationMode mode : values()) {
if (mode.mId == id) {
return mode;
}
}
throw new IllegalArgumentException("Unsupported implementation mode " + id);
}
}
/** Options for scaling the preview vis-à-vis its container {@link PreviewView}. */
public enum ScaleType {
/**
* Scale the preview, maintaining the source aspect ratio, so it fills the entire
* {@link PreviewView}, and align it to the top left corner of the view.
* This may cause the preview to be cropped if the camera preview aspect ratio does not
* match that of its container {@link PreviewView}.
*/
FILL_START,
/**
* Scale the preview, maintaining the source aspect ratio, so it fills the entire
* {@link PreviewView}, and center it inside the view.
* This may cause the preview to be cropped if the camera preview aspect ratio does not
* match that of its container {@link PreviewView}.
*/
FILL_CENTER,
/**
* Scale the preview, maintaining the source aspect ratio, so it fills the entire
* {@link PreviewView}, and align it to the bottom right corner of the view.
* This may cause the preview to be cropped if the camera preview aspect ratio does not
* match that of its container {@link PreviewView}.
*/
FILL_END,
/**
* Scale the preview, maintaining the source aspect ratio, so it is entirely contained
* within the {@link PreviewView}, and align it to the top left corner of the view.
* Both dimensions of the preview will be equal or less than the corresponding dimensions
* of its container {@link PreviewView}.
*/
FIT_START,
/**
* Scale the preview, maintaining the source aspect ratio, so it is entirely contained
* within the {@link PreviewView}, and center it inside the view.
* Both dimensions of the preview will be equal or less than the corresponding dimensions
* of its container {@link PreviewView}.
*/
FIT_CENTER,
/**
* Scale the preview, maintaining the source aspect ratio, so it is entirely contained
* within the {@link PreviewView}, and align it to the bottom right corner of the view.
* Both dimensions of the preview will be equal or less than the corresponding dimensions
* of its container {@link PreviewView}.
*/
FIT_END
}
}

View File

@ -0,0 +1,162 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.content.Context;
import android.graphics.Point;
import android.util.Pair;
import android.util.Size;
import android.view.Display;
import android.view.View;
import android.view.WindowManager;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
// Begin Signal Custom Code Block
@RequiresApi(21)
// End Signal Custom Code Block
final class ScaleTypeTransform {
/**
* Computes the scale by which a view has to scale in x and y in order to fill its parent
* while maintaining the buffer's aspect ratio.
*
* @param container A parent {@link android.view.View} that wraps {@code view}.
* @param view A child {@link android.view.View} of {@code container}.
* @param bufferSize A {@link android.util.Size} whose aspect ratio must be maintained when
* scaling {@code view} inside its parent {@code container}.
* @return The scale by which {@code view} has to scale in x and y in order to fill its
* parent while maintaining {@code bufferSize}'s aspect ratio.
*/
@SuppressWarnings("SuspiciousNameCombination")
static Pair<Float, Float> getFillScaleWithBufferAspectRatio(@NonNull final View container,
@NonNull final View view, @NonNull final Size bufferSize) {
// Scaling only makes sense when none of the dimensions are equal to zero. In the
// opposite case, a default scale of 1 is returned,
if (container.getWidth() == 0 || container.getHeight() == 0 || view.getWidth() == 0
|| view.getHeight() == 0 || bufferSize.getWidth() == 0
|| bufferSize.getHeight() == 0) {
return new Pair<>(1F, 1F);
}
final int viewRotationDegrees = getRotationDegrees(view);
final boolean isNaturalPortrait = isNaturalPortrait(view.getContext(), viewRotationDegrees);
final int bufferWidth;
final int bufferHeight;
if (isNaturalPortrait) {
bufferWidth = bufferSize.getHeight();
bufferHeight = bufferSize.getWidth();
} else {
bufferWidth = bufferSize.getWidth();
bufferHeight = bufferSize.getHeight();
}
// Scale the buffers back to the original output size.
float scaleX = bufferWidth / (float) view.getWidth();
float scaleY = bufferHeight / (float) view.getHeight();
int bufferRotatedWidth;
int bufferRotatedHeight;
if (viewRotationDegrees == 0 || viewRotationDegrees == 180) {
bufferRotatedWidth = bufferWidth;
bufferRotatedHeight = bufferHeight;
} else {
bufferRotatedWidth = bufferHeight;
bufferRotatedHeight = bufferWidth;
}
// Scale the buffer so that it completely fills the container.
final float scale = Math.max(container.getWidth() / (float) bufferRotatedWidth,
container.getHeight() / (float) bufferRotatedHeight);
scaleX *= scale;
scaleY *= scale;
return new Pair<>(scaleX, scaleY);
}
/**
* Computes the top left coordinates for the view to be centered inside its parent.
*
* @param container A parent {@link android.view.View} that wraps {@code view}.
* @param view A child {@link android.view.View} of {@code container}.
* @return A {@link android.graphics.Point} whose coordinates represent the top left of
* {@code view} when centered inside its parent.
*/
static Point getOriginOfCenteredView(@NonNull final View container,
@NonNull final View view) {
final int offsetX = (view.getWidth() - container.getWidth()) / 2;
final int offsetY = (view.getHeight() - container.getHeight()) / 2;
return new Point(-offsetX, -offsetY);
}
/**
* Computes the rotation of a {@link android.view.View} in degrees from its natural
* orientation.
*/
static int getRotationDegrees(@NonNull final View view) {
final WindowManager windowManager = (WindowManager) view.getContext().getSystemService(
Context.WINDOW_SERVICE);
if (windowManager == null) {
return 0;
}
final int rotation = windowManager.getDefaultDisplay().getRotation();
return SurfaceRotation.rotationDegreesFromSurfaceRotation(rotation);
}
/**
* Determines whether the current device is a natural portrait-oriented device
*
* <p>
* Using the current app's window to determine whether the device is a natural
* portrait-oriented device doesn't work in all scenarios, one example of this is multi-window
* mode.
* Taking a natural portrait-oriented device in multi-window mode, rotating it 90 degrees (so
* that it's in landscape), with the app open, and its window's width being smaller than its
* height. Using the app's width and height would determine that the device isn't
* naturally portrait-oriented, where in fact it is, which is why it is important to use the
* size of the device instead.
* </p>
*
* @param context Current context. Can be an {@link android.app.Application} context
* or an {@link android.app.Activity} context.
* @param rotationDegrees The device's rotation in degrees from its natural orientation.
* @return Whether the device is naturally portrait-oriented.
*/
private static boolean isNaturalPortrait(@NonNull final Context context,
final int rotationDegrees) {
final WindowManager windowManager = (WindowManager) context.getSystemService(
Context.WINDOW_SERVICE);
if (windowManager == null) {
return true;
}
final Display display = windowManager.getDefaultDisplay();
final Point deviceSize = new Point();
display.getRealSize(deviceSize);
final int width = deviceSize.x;
final int height = deviceSize.y;
return ((rotationDegrees == 0 || rotationDegrees == 180) && width < height) || (
(rotationDegrees == 90 || rotationDegrees == 270) && width >= height);
}
// Prevent creating an instance
private ScaleTypeTransform() {
}
}

View File

@ -0,0 +1,46 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.view.Surface;
final class SurfaceRotation {
/**
* Get the int value degree of a rotation from the {@link Surface} constants.
*
* <p>Valid values for the relative rotation are {@link Surface#ROTATION_0}, {@link
* * Surface#ROTATION_90}, {@link Surface#ROTATION_180}, {@link Surface#ROTATION_270}.
*/
static int rotationDegreesFromSurfaceRotation(int rotationConstant) {
switch (rotationConstant) {
case Surface.ROTATION_0:
return 0;
case Surface.ROTATION_90:
return 90;
case Surface.ROTATION_180:
return 180;
case Surface.ROTATION_270:
return 270;
default:
throw new UnsupportedOperationException(
"Unsupported surface rotation constant: " + rotationConstant);
}
}
/** Prevents construction */
private SurfaceRotation() {}
}

View File

@ -0,0 +1,180 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.UiThread;
import androidx.camera.core.Preview;
import androidx.camera.core.SurfaceRequest;
import androidx.core.content.ContextCompat;
/**
* The SurfaceView implementation for {@link PreviewView}.
*/
@RequiresApi(21)
final class SurfaceViewImplementation implements PreviewView.Implementation {
private static final String TAG = "SurfaceViewPreviewView";
// Synthetic Accessor
@SuppressWarnings("WeakerAccess")
TransformableSurfaceView mSurfaceView;
// Synthetic Accessor
@SuppressWarnings("WeakerAccess")
final SurfaceRequestCallback mSurfaceRequestCallback =
new SurfaceRequestCallback();
private Preview.SurfaceProvider mSurfaceProvider =
new Preview.SurfaceProvider() {
@Override
public void onSurfaceRequested(@NonNull SurfaceRequest surfaceRequest) {
mSurfaceView.post(
() -> mSurfaceRequestCallback.setSurfaceRequest(surfaceRequest));
}
};
/**
* {@inheritDoc}
*/
@Override
public void init(@NonNull FrameLayout parent) {
mSurfaceView = new TransformableSurfaceView(parent.getContext());
mSurfaceView.setLayoutParams(
new FrameLayout.LayoutParams(
FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.MATCH_PARENT));
parent.addView(mSurfaceView);
mSurfaceView.getHolder().addCallback(mSurfaceRequestCallback);
}
/**
* {@inheritDoc}
*/
@NonNull
@Override
public Preview.SurfaceProvider getSurfaceProvider() {
return mSurfaceProvider;
}
@Override
public void onDisplayChanged() {
}
/**
* The {@link SurfaceHolder.Callback} on mSurfaceView.
*
* <p> SurfaceView creates Surface on its own before we can do anything. This class makes
* sure only the Surface with correct size will be returned to Preview.
*/
class SurfaceRequestCallback implements SurfaceHolder.Callback {
// Target Surface size. Only complete the SurfaceRequest when the size of the Surface
// matches this value.
// Guarded by UI thread.
@Nullable
private Size mTargetSize;
// SurfaceRequest to set when the target size is met.
// Guarded by UI thread.
@Nullable
private SurfaceRequest mSurfaceRequest;
// The cached size of the current Surface.
// Guarded by UI thread.
@Nullable
private Size mCurrentSurfaceSize;
/**
* Sets the completer and the size. The completer will only be set if the current size of
* the Surface matches the target size.
*/
@UiThread
void setSurfaceRequest(@NonNull SurfaceRequest surfaceRequest) {
cancelPreviousRequest();
mSurfaceRequest = surfaceRequest;
Size targetSize = surfaceRequest.getResolution();
mTargetSize = targetSize;
if (!tryToComplete()) {
// The current size is incorrect. Wait for it to change.
Log.d(TAG, "Wait for new Surface creation.");
mSurfaceView.getHolder().setFixedSize(targetSize.getWidth(),
targetSize.getHeight());
}
}
/**
* Sets the completer if size matches.
*
* @return true if the completer is set.
*/
@UiThread
private boolean tryToComplete() {
Surface surface = mSurfaceView.getHolder().getSurface();
if (mSurfaceRequest != null && mTargetSize != null && mTargetSize.equals(
mCurrentSurfaceSize)) {
Log.d(TAG, "Surface set on Preview.");
mSurfaceRequest.provideSurface(surface,
ContextCompat.getMainExecutor(mSurfaceView.getContext()),
(result) -> Log.d(TAG, "Safe to release surface."));
mSurfaceRequest = null;
mTargetSize = null;
return true;
}
return false;
}
@UiThread
private void cancelPreviousRequest() {
if (mSurfaceRequest != null) {
Log.d(TAG, "Request canceled: " + mSurfaceRequest);
mSurfaceRequest.willNotProvideSurface();
mSurfaceRequest = null;
}
mTargetSize = null;
}
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
Log.d(TAG, "Surface created.");
// No-op. Handling surfaceChanged() is enough because it's always called afterwards.
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {
Log.d(TAG, "Surface changed. Size: " + width + "x" + height);
mCurrentSurfaceSize = new Size(width, height);
tryToComplete();
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
Log.d(TAG, "Surface destroyed.");
mCurrentSurfaceSize = null;
cancelPreviousRequest();
}
}
}

View File

@ -0,0 +1,238 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import static androidx.camera.core.SurfaceRequest.Result;
import android.annotation.SuppressLint;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.util.Log;
import android.util.Pair;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.camera.core.Preview;
import androidx.camera.core.SurfaceRequest;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import androidx.camera.core.impl.utils.futures.FutureCallback;
import androidx.camera.core.impl.utils.futures.Futures;
import androidx.concurrent.futures.CallbackToFutureAdapter;
import androidx.core.content.ContextCompat;
import androidx.core.util.Preconditions;
import com.google.common.util.concurrent.ListenableFuture;
/**
* The {@link TextureView} implementation for {@link PreviewView}
*/
// Begin Signal Custom Code Block
@RequiresApi(21)
@SuppressLint("RestrictedApi")
// End Signal Custom Code Block
public class TextureViewImplementation implements PreviewView.Implementation {
private static final String TAG = "TextureViewImpl";
private FrameLayout mParent;
TextureView mTextureView;
SurfaceTexture mSurfaceTexture;
private Size mResolution;
ListenableFuture<Result> mSurfaceReleaseFuture;
SurfaceRequest mSurfaceRequest;
@Override
public void init(@NonNull FrameLayout parent) {
mParent = parent;
}
@NonNull
@Override
public Preview.SurfaceProvider getSurfaceProvider() {
return (surfaceRequest) -> {
mResolution = surfaceRequest.getResolution();
initInternal();
if (mSurfaceRequest != null) {
mSurfaceRequest.willNotProvideSurface();
}
mSurfaceRequest = surfaceRequest;
surfaceRequest.addRequestCancellationListener(
ContextCompat.getMainExecutor(mTextureView.getContext()), () -> {
if (mSurfaceRequest != null && mSurfaceRequest == surfaceRequest) {
mSurfaceRequest = null;
mSurfaceReleaseFuture = null;
}
});
tryToProvidePreviewSurface();
};
}
@Override
public void onDisplayChanged() {
if (mParent == null || mTextureView == null || mResolution == null) {
return;
}
correctPreviewForCenterCrop(mParent, mTextureView, mResolution);
}
private void initInternal() {
mTextureView = new TextureView(mParent.getContext());
mTextureView.setLayoutParams(
new FrameLayout.LayoutParams(mResolution.getWidth(), mResolution.getHeight()));
mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(final SurfaceTexture surfaceTexture,
final int width, final int height) {
mSurfaceTexture = surfaceTexture;
tryToProvidePreviewSurface();
}
@Override
public void onSurfaceTextureSizeChanged(final SurfaceTexture surfaceTexture,
final int width, final int height) {
Log.d(TAG, "onSurfaceTextureSizeChanged(width:" + width + ", height: " + height
+ " )");
}
/**
* If a surface has been provided to the camera (meaning
* {@link TextureViewImplementation#mSurfaceRequest} is null), but the camera
* is still using it (meaning {@link TextureViewImplementation#mSurfaceReleaseFuture} is
* not null), a listener must be added to
* {@link TextureViewImplementation#mSurfaceReleaseFuture} to ensure the surface
* is properly released after the camera is done using it.
*
* @param surfaceTexture The {@link SurfaceTexture} about to be destroyed.
* @return false if the camera is not done with the surface, true otherwise.
*/
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture surfaceTexture) {
mSurfaceTexture = null;
if (mSurfaceRequest == null && mSurfaceReleaseFuture != null) {
Futures.addCallback(mSurfaceReleaseFuture,
new FutureCallback<Result>() {
@Override
public void onSuccess(Result result) {
Preconditions.checkState(result.getResultCode()
!= Result.RESULT_SURFACE_ALREADY_PROVIDED,
"Unexpected result from SurfaceRequest. Surface was "
+ "provided twice.");
surfaceTexture.release();
}
@Override
public void onFailure(Throwable t) {
throw new IllegalStateException("SurfaceReleaseFuture did not "
+ "complete nicely.", t);
}
}, ContextCompat.getMainExecutor(mTextureView.getContext()));
return false;
} else {
return true;
}
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture surfaceTexture) {
}
});
// Even though PreviewView calls `removeAllViews()` before calling init(), it should be
// called again here in case `getPreviewSurfaceProvider()` is called more than once on
// the same TextureViewImplementation instance.
mParent.removeAllViews();
mParent.addView(mTextureView);
}
@SuppressWarnings("WeakerAccess")
void tryToProvidePreviewSurface() {
/*
Should only continue if:
- The preview size has been specified.
- The textureView's surfaceTexture is available (after TextureView
.SurfaceTextureListener#onSurfaceTextureAvailable is invoked)
- The surfaceCompleter has been set (after CallbackToFutureAdapter
.Resolver#attachCompleter is invoked).
*/
if (mResolution == null || mSurfaceTexture == null || mSurfaceRequest == null) {
return;
}
mSurfaceTexture.setDefaultBufferSize(mResolution.getWidth(), mResolution.getHeight());
final Surface surface = new Surface(mSurfaceTexture);
final ListenableFuture<Result> surfaceReleaseFuture =
CallbackToFutureAdapter.getFuture(completer -> {
mSurfaceRequest.provideSurface(surface,
CameraXExecutors.directExecutor(), completer::set);
return "provideSurface[request=" + mSurfaceRequest + " surface=" + surface
+ "]";
});
mSurfaceReleaseFuture = surfaceReleaseFuture;
mSurfaceReleaseFuture.addListener(() -> {
surface.release();
if (mSurfaceReleaseFuture == surfaceReleaseFuture) {
mSurfaceReleaseFuture = null;
}
}, ContextCompat.getMainExecutor(mTextureView.getContext()));
mSurfaceRequest = null;
correctPreviewForCenterCrop(mParent, mTextureView, mResolution);
}
/**
* Corrects the preview to match the UI orientation and completely fill the PreviewView.
*
* <p>
* The camera produces a preview that depends on its sensor orientation and that has a
* specific resolution. In order to display it correctly, this preview must be rotated to
* match the UI orientation, and must be scaled up/down to fit inside the view that's
* displaying it. This method takes care of doing so while keeping the preview centered.
* </p>
*
* @param container The {@link PreviewView}'s root layout, which wraps the preview.
* @param textureView The {@link android.view.TextureView} that displays the preview, its size
* must match the camera sensor output size.
* @param bufferSize The camera sensor output size.
*/
private void correctPreviewForCenterCrop(@NonNull final View container,
@NonNull final TextureView textureView, @NonNull final Size bufferSize) {
// Scale TextureView to fill PreviewView while respecting sensor output size aspect ratio
final Pair<Float, Float> scale = ScaleTypeTransform.getFillScaleWithBufferAspectRatio(container, textureView,
bufferSize);
textureView.setScaleX(scale.first);
textureView.setScaleY(scale.second);
// Center TextureView inside PreviewView
final Point newOrigin = ScaleTypeTransform.getOriginOfCenteredView(container, textureView);
textureView.setX(newOrigin.x);
textureView.setY(newOrigin.y);
// Rotate TextureView to correct preview orientation
final int rotation = ScaleTypeTransform.getRotationDegrees(textureView);
textureView.setRotation(-rotation);
}
}

View File

@ -22,6 +22,7 @@ import android.graphics.SurfaceTexture;
import android.view.TextureView;
import androidx.annotation.NonNull;
import androidx.annotation.RestrictTo;
import androidx.camera.core.MeteringPoint;
import androidx.camera.core.MeteringPointFactory;
@ -37,57 +38,60 @@ import androidx.camera.core.MeteringPointFactory;
* to the lens face of current camera ouput.
*/
public class TextureViewMeteringPointFactory extends MeteringPointFactory {
private final TextureView mTextureView;
private final TextureView mTextureView;
public TextureViewMeteringPointFactory(@NonNull TextureView textureView) {
mTextureView = textureView;
}
public TextureViewMeteringPointFactory(@NonNull TextureView textureView) {
mTextureView = textureView;
}
/**
* Translates a (x,y) from TextureView.
*/
@NonNull
@Override
protected PointF translatePoint(float x, float y) {
Matrix transform = new Matrix();
mTextureView.getTransform(transform);
/**
* Translates a (x,y) from TextureView.
*
* @hide
*/
@NonNull
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
@Override
protected PointF convertPoint(float x, float y) {
Matrix transform = new Matrix();
mTextureView.getTransform(transform);
// applying reverse of TextureView#getTransform
Matrix inverse = new Matrix();
transform.invert(inverse);
float[] pt = new float[]{x, y};
inverse.mapPoints(pt);
// applying reverse of TextureView#getTransform
Matrix inverse = new Matrix();
transform.invert(inverse);
float[] pt = new float[]{x, y};
inverse.mapPoints(pt);
// get SurfaceTexture#getTransformMatrix
float[] surfaceTextureMat = new float[16];
mTextureView.getSurfaceTexture().getTransformMatrix(surfaceTextureMat);
// get SurfaceTexture#getTransformMatrix
float[] surfaceTextureMat = new float[16];
mTextureView.getSurfaceTexture().getTransformMatrix(surfaceTextureMat);
// convert SurfaceTexture#getTransformMatrix(4x4 column major 3D matrix) to
// android.graphics.Matrix(3x3 row major 2D matrix)
Matrix surfaceTextureTransform = glMatrixToGraphicsMatrix(surfaceTextureMat);
// convert SurfaceTexture#getTransformMatrix(4x4 column major 3D matrix) to
// android.graphics.Matrix(3x3 row major 2D matrix)
Matrix surfaceTextureTransform = glMatrixToGraphicsMatrix(surfaceTextureMat);
float[] pt2 = new float[2];
// convert to texture coordinates first.
pt2[0] = pt[0] / mTextureView.getWidth();
pt2[1] = (mTextureView.getHeight() - pt[1]) / mTextureView.getHeight();
surfaceTextureTransform.mapPoints(pt2);
float[] pt2 = new float[2];
// convert to texture coordinates first.
pt2[0] = pt[0] / mTextureView.getWidth();
pt2[1] = (mTextureView.getHeight() - pt[1]) / mTextureView.getHeight();
surfaceTextureTransform.mapPoints(pt2);
return new PointF(pt2[0], pt2[1]);
}
return new PointF(pt2[0], pt2[1]);
}
private Matrix glMatrixToGraphicsMatrix(float[] glMatrix) {
float[] convert = new float[9];
convert[0] = glMatrix[0];
convert[1] = glMatrix[4];
convert[2] = glMatrix[12];
convert[3] = glMatrix[1];
convert[4] = glMatrix[5];
convert[5] = glMatrix[13];
convert[6] = glMatrix[3];
convert[7] = glMatrix[7];
convert[8] = glMatrix[15];
Matrix graphicsMatrix = new Matrix();
graphicsMatrix.setValues(convert);
return graphicsMatrix;
}
private Matrix glMatrixToGraphicsMatrix(float[] glMatrix) {
float[] convert = new float[9];
convert[0] = glMatrix[0];
convert[1] = glMatrix[4];
convert[2] = glMatrix[12];
convert[3] = glMatrix[1];
convert[4] = glMatrix[5];
convert[5] = glMatrix[13];
convert[6] = glMatrix[3];
convert[7] = glMatrix[7];
convert[8] = glMatrix[15];
Matrix graphicsMatrix = new Matrix();
graphicsMatrix.setValues(convert);
return graphicsMatrix;
}
}

View File

@ -0,0 +1,130 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.content.Context;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.view.SurfaceView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
/**
* A subclass of {@link SurfaceView} that supports translation and scaling transformations.
*/
// Begin Signal Custom Code Block
@RequiresApi(21)
// End Signal Custom Code Block
final class TransformableSurfaceView extends SurfaceView {
private RectF mOverriddenLayoutRect;
TransformableSurfaceView(@NonNull Context context) {
super(context);
}
TransformableSurfaceView(@NonNull Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
}
TransformableSurfaceView(@NonNull Context context, @Nullable AttributeSet attrs,
int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
TransformableSurfaceView(@NonNull Context context, @Nullable AttributeSet attrs,
int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (mOverriddenLayoutRect == null) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
} else {
setMeasuredDimension((int) mOverriddenLayoutRect.width(),
(int) mOverriddenLayoutRect.height());
}
}
/**
* Sets the transform to associate with this surface view. Only translation and scaling are
* supported. If a rotated transformation is passed in, an exception is thrown.
*
* @param transform The transform to apply to the content of this view.
*/
void setTransform(final Matrix transform) {
if (hasRotation(transform)) {
throw new IllegalArgumentException("TransformableSurfaceView does not support "
+ "rotation transformations.");
}
final RectF rect = new RectF(getLeft(), getTop(), getRight(), getBottom());
transform.mapRect(rect);
overrideLayout(rect);
}
private boolean hasRotation(final Matrix matrix) {
final float[] values = new float[9];
matrix.getValues(values);
/*
A translation matrix can be represented as:
(1 0 transX)
(0 1 transX)
(0 0 1)
A rotation Matrix of ψ degrees can be represented as:
(cosψ -sinψ 0)
(sinψ cosψ 0)
(0 0 1)
A scale matrix can be represented as:
(scaleX 0 0)
(0 scaleY 0)
(0 0 0)
Meaning a transformed matrix can be represented as:
(scaleX * cosψ -scaleX * sinψ transX)
(scaleY * sinψ scaleY * cosψ transY)
(0 0 1)
Using the following 2 equalities:
scaleX * cosψ = matrix[0][0]
-scaleX * sinψ = matrix[0][1]
The following is deduced:
-tanψ = matrix[0][1] / matrix[0][0]
Or:
ψ = -arctan(matrix[0][1] / matrix[0][0])
*/
final double angle = -Math.atan2(values[Matrix.MSKEW_X], values[Matrix.MSCALE_X]);
return Math.round(angle * (180 / Math.PI)) != 0;
}
private void overrideLayout(final RectF overriddenLayoutRect) {
mOverriddenLayoutRect = overriddenLayoutRect;
setX(overriddenLayoutRect.left);
setY(overriddenLayoutRect.top);
requestLayout();
}
}

View File

@ -16,7 +16,22 @@
package org.thoughtcrime.securesms.mediasend.camerax;
import android.annotation.SuppressLint;
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.location.Location;
import android.media.AudioFormat;
import android.media.AudioRecord;
@ -30,39 +45,42 @@ import android.media.MediaMuxer;
import android.media.MediaRecorder.AudioSource;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.util.Log;
import android.util.Size;
import android.view.Display;
import android.view.Surface;
import androidx.annotation.GuardedBy;
import androidx.annotation.IntDef;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.RestrictTo;
import androidx.annotation.RestrictTo.Scope;
import androidx.camera.core.CameraInfoInternal;
import androidx.camera.core.CameraInfoUnavailableException;
import androidx.camera.core.CameraInfo;
import androidx.camera.core.CameraX;
import androidx.camera.core.CameraX.LensFacing;
import androidx.camera.core.CameraXThreads;
import androidx.camera.core.ConfigProvider;
import androidx.camera.core.DeferrableSurface;
import androidx.camera.core.ImageOutputConfig;
import androidx.camera.core.ImageOutputConfig.RotationValue;
import androidx.camera.core.ImmediateSurface;
import androidx.camera.core.SessionConfig;
import androidx.camera.core.UseCase;
import androidx.camera.core.UseCaseConfig;
import androidx.camera.core.VideoCaptureConfig;
import androidx.camera.core.impl.CameraInfoInternal;
import androidx.camera.core.impl.CameraInternal;
import androidx.camera.core.impl.ConfigProvider;
import androidx.camera.core.impl.DeferrableSurface;
import androidx.camera.core.impl.ImageOutputConfig;
import androidx.camera.core.impl.ImageOutputConfig.RotationValue;
import androidx.camera.core.impl.ImmediateSurface;
import androidx.camera.core.impl.SessionConfig;
import androidx.camera.core.impl.UseCaseConfig;
import androidx.camera.core.impl.VideoCaptureConfig;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import androidx.camera.core.internal.utils.UseCaseConfigUtil;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.File;
import java.io.FileDescriptor;
import java.io.IOException;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.concurrent.Executor;
@ -77,10 +95,31 @@ import java.util.concurrent.atomic.AtomicBoolean;
*
* @hide In the earlier stage, the VideoCapture is deprioritized.
*/
// Begin Signal Custom Code Block
@RequiresApi(26)
// End Signal Custom Code Block
@RestrictTo(Scope.LIBRARY_GROUP)
@SuppressWarnings("ClassCanBeStatic") // TODO(b/141958189): Suppressed during upgrade to AGP 3.6.
public class VideoCapture extends UseCase {
/**
* An unknown error occurred.
*
* <p>See message parameter in onError callback or log for more details.
*/
public static final int ERROR_UNKNOWN = 0;
/**
* An error occurred with encoder state, either when trying to change state or when an
* unexpected state change occurred.
*/
public static final int ERROR_ENCODER = 1;
/** An error with muxer state such as during creation or when stopping. */
public static final int ERROR_MUXER = 2;
/**
* An error indicating start recording was called when video recording is still in progress.
*/
public static final int ERROR_RECORDING_IN_PROGRESS = 3;
/**
* Provides a static configuration with implementation-agnostic options.
*
@ -131,7 +170,6 @@ public class VideoCapture extends UseCase {
/** For record the first sample written time. */
private final AtomicBoolean mIsFirstVideoSampleWrite = new AtomicBoolean(false);
private final AtomicBoolean mIsFirstAudioSampleWrite = new AtomicBoolean(false);
private final VideoCaptureConfig.Builder mUseCaseConfigBuilder;
@NonNull
MediaCodec mVideoEncoder;
@ -147,7 +185,9 @@ public class VideoCapture extends UseCase {
private int mAudioTrackIndex;
/** Surface the camera writes to, which the videoEncoder uses as input. */
Surface mCameraSurface;
/** audio raw data */
@NonNull
private AudioRecord mAudioRecorder;
private int mAudioBufferSize;
private boolean mIsRecording = false;
@ -163,7 +203,6 @@ public class VideoCapture extends UseCase {
*/
public VideoCapture(VideoCaptureConfig config) {
super(config);
mUseCaseConfigBuilder = VideoCaptureConfig.Builder.fromConfig(config);
// video thread start
mVideoHandlerThread.start();
@ -182,9 +221,6 @@ public class VideoCapture extends UseCase {
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, config.getBitRate());
format.setInteger(MediaFormat.KEY_FRAME_RATE, config.getVideoFrameRate());
// Begin Signal Custom Code Block
format.setInteger(MediaFormat.KEY_CAPTURE_RATE, config.getVideoFrameRate());
// End Signal Custom Code Block
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, config.getIFrameInterval());
return format;
@ -199,9 +235,9 @@ public class VideoCapture extends UseCase {
@Override
@Nullable
@RestrictTo(Scope.LIBRARY_GROUP)
protected UseCaseConfig.Builder<?, ?, ?> getDefaultBuilder(LensFacing lensFacing) {
VideoCaptureConfig defaults = CameraX.getDefaultUseCaseConfig(
VideoCaptureConfig.class, lensFacing);
protected UseCaseConfig.Builder<?, ?, ?> getDefaultBuilder(@Nullable CameraInfo cameraInfo) {
VideoCaptureConfig defaults = CameraX.getDefaultUseCaseConfig(VideoCaptureConfig.class,
cameraInfo);
if (defaults != null) {
return VideoCaptureConfig.Builder.fromConfig(defaults);
}
@ -216,9 +252,9 @@ public class VideoCapture extends UseCase {
*/
@Override
@RestrictTo(Scope.LIBRARY_GROUP)
@NonNull
protected Map<String, Size> onSuggestedResolutionUpdated(
Map<String, Size> suggestedResolutionMap) {
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
@NonNull Map<String, Size> suggestedResolutionMap) {
if (mCameraSurface != null) {
mVideoEncoder.stop();
mVideoEncoder.release();
@ -234,14 +270,14 @@ public class VideoCapture extends UseCase {
throw new IllegalStateException("Unable to create MediaCodec due to: " + e.getCause());
}
String cameraId = getCameraIdUnchecked(config);
String cameraId = getBoundCameraId();
Size resolution = suggestedResolutionMap.get(cameraId);
if (resolution == null) {
throw new IllegalArgumentException(
"Suggested resolution map missing resolution for camera " + cameraId);
}
setupEncoder(resolution);
setupEncoder(cameraId, resolution);
return suggestedResolutionMap;
}
@ -250,20 +286,19 @@ public class VideoCapture extends UseCase {
* called.
*
* <p>StartRecording() is asynchronous. User needs to check if any error occurs by setting the
* {@link OnVideoSavedListener#onError(VideoCaptureError, String, Throwable)}.
* {@link OnVideoSavedCallback#onError(int, String, Throwable)}.
*
* @param saveLocation Location to save the video capture
* @param executor The executor in which the listener callback methods will be run.
* @param listener Listener to call for the recorded video
* @param executor The executor in which the callback methods will be run.
* @param callback Callback for when the recorded video saving completion or failure.
*/
@SuppressLint("LambdaLast") // Maybe remove after https://issuetracker.google.com/135275901
// Begin Signal Custom Code Block
public void startRecording(@NonNull FileDescriptor saveLocation,
@NonNull Executor executor, @NonNull OnVideoSavedListener listener) {
// End Signal Custom Code Block
@NonNull Executor executor, @NonNull OnVideoSavedCallback callback) {
mIsFirstVideoSampleWrite.set(false);
mIsFirstAudioSampleWrite.set(false);
startRecording(saveLocation, EMPTY_METADATA, executor, listener);
startRecording(saveLocation, EMPTY_METADATA, executor, callback);
}
/**
@ -271,26 +306,26 @@ public class VideoCapture extends UseCase {
* called.
*
* <p>StartRecording() is asynchronous. User needs to check if any error occurs by setting the
* {@link OnVideoSavedListener#onError(VideoCaptureError, String, Throwable)}.
* {@link OnVideoSavedCallback#onError(int, String, Throwable)}.
*
* @param saveLocation Location to save the video capture
* @param metadata Metadata to save with the recorded video
* @param executor The executor in which the listener callback methods will be run.
* @param listener Listener to call for the recorded video
* @param executor The executor in which the callback methods will be run.
* @param callback Callback for when the recorded video saving completion or failure.
*/
@SuppressLint("LambdaLast") // Maybe remove after https://issuetracker.google.com/135275901
// Begin Signal Custom Code Block
public void startRecording(
@NonNull FileDescriptor saveLocation, @NonNull Metadata metadata,
// Begin Signal Custom Code Block
@NonNull FileDescriptor saveLocation,
// End Signal Custom Code Block
@NonNull Metadata metadata,
@NonNull Executor executor,
@NonNull OnVideoSavedListener listener) {
// End Signal Custom Code Block
@NonNull OnVideoSavedCallback callback) {
Log.i(TAG, "startRecording");
OnVideoSavedListener postListener = new VideoSavedListenerWrapper(executor, listener);
OnVideoSavedCallback postListener = new VideoSavedListenerWrapper(executor, callback);
if (!mEndOfAudioVideoSignal.get()) {
postListener.onError(
VideoCaptureError.RECORDING_IN_PROGRESS, "It is still in video recording!",
ERROR_RECORDING_IN_PROGRESS, "It is still in video recording!",
null);
return;
}
@ -305,12 +340,13 @@ public class VideoCapture extends UseCase {
}
// End Signal Custom Code Block
} catch (IllegalStateException e) {
postListener.onError(VideoCaptureError.ENCODER_ERROR, "AudioRecorder start fail", e);
postListener.onError(ERROR_ENCODER, "AudioRecorder start fail", e);
return;
}
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
String cameraId = getCameraIdUnchecked(config);
CameraInternal boundCamera = getBoundCamera();
String cameraId = getBoundCameraId();
Size resolution = getAttachedSurfaceResolution(cameraId);
try {
// video encoder start
Log.i(TAG, "videoEncoder start");
@ -320,23 +356,15 @@ public class VideoCapture extends UseCase {
mAudioEncoder.start();
} catch (IllegalStateException e) {
setupEncoder(getAttachedSurfaceResolution(cameraId));
postListener.onError(VideoCaptureError.ENCODER_ERROR, "Audio/Video encoder start fail",
setupEncoder(cameraId, resolution);
postListener.onError(ERROR_ENCODER, "Audio/Video encoder start fail",
e);
return;
}
// Get the relative rotation or default to 0 if the camera info is unavailable
int relativeRotation = 0;
try {
CameraInfoInternal cameraInfoInternal = CameraX.getCameraInfo(cameraId);
relativeRotation =
cameraInfoInternal.getSensorRotationDegrees(
((ImageOutputConfig) getUseCaseConfig())
.getTargetRotation(Surface.ROTATION_0));
} catch (CameraInfoUnavailableException e) {
Log.e(TAG, "Unable to retrieve camera sensor orientation.", e);
}
CameraInfoInternal cameraInfoInternal = boundCamera.getCameraInfoInternal();
int relativeRotation = cameraInfoInternal.getSensorRotationDegrees(
((ImageOutputConfig) getUseCaseConfig()).getTargetRotation(Surface.ROTATION_0));
try {
synchronized (mMuxerLock) {
@ -355,8 +383,8 @@ public class VideoCapture extends UseCase {
}
}
} catch (IOException e) {
setupEncoder(getAttachedSurfaceResolution(cameraId));
postListener.onError(VideoCaptureError.MUXER_ERROR, "MediaMuxer creation failed!", e);
setupEncoder(cameraId, resolution);
postListener.onError(ERROR_MUXER, "MediaMuxer creation failed!", e);
return;
}
@ -378,7 +406,8 @@ public class VideoCapture extends UseCase {
new Runnable() {
@Override
public void run() {
boolean errorOccurred = VideoCapture.this.videoEncode(postListener);
boolean errorOccurred = VideoCapture.this.videoEncode(postListener,
cameraId, resolution);
if (!errorOccurred) {
postListener.onVideoSaved(saveLocation);
}
@ -388,11 +417,11 @@ public class VideoCapture extends UseCase {
/**
* Stops recording video, this must be called after {@link
* VideoCapture#startRecording(File, Metadata, Executor, OnVideoSavedListener)} is called.
* VideoCapture#startRecording(File, Metadata, Executor, OnVideoSavedCallback)} is called.
*
* <p>stopRecording() is asynchronous API. User need to check if {@link
* OnVideoSavedListener#onVideoSaved(File)} or
* {@link OnVideoSavedListener#onError(VideoCaptureError, String, Throwable)} be called
* OnVideoSavedCallback#onVideoSaved(File)} or
* {@link OnVideoSavedCallback#onError(int, String, Throwable)} be called
* before startRecording.
*/
public void stopRecording() {
@ -438,23 +467,17 @@ public class VideoCapture extends UseCase {
return;
}
final Surface surface = mCameraSurface;
final MediaCodec videoEncoder = mVideoEncoder;
mDeferrableSurface.setOnSurfaceDetachedListener(
CameraXExecutors.mainThreadExecutor(),
new DeferrableSurface.OnSurfaceDetachedListener() {
@Override
public void onSurfaceDetached() {
if (releaseVideoEncoder && videoEncoder != null) {
videoEncoder.release();
}
if (surface != null) {
surface.release();
}
// Calling close should allow termination future to complete and close the surface with
// the listener that was added after constructing the DeferrableSurface.
mDeferrableSurface.close();
mDeferrableSurface.getTerminationFuture().addListener(
() -> {
if (releaseVideoEncoder && videoEncoder != null) {
videoEncoder.release();
}
});
}, CameraXExecutors.mainThreadExecutor());
if (releaseVideoEncoder) {
mVideoEncoder = null;
@ -473,11 +496,12 @@ public class VideoCapture extends UseCase {
* @param rotation Desired rotation of the output video.
*/
public void setTargetRotation(@RotationValue int rotation) {
ImageOutputConfig oldConfig = (ImageOutputConfig) getUseCaseConfig();
VideoCaptureConfig oldConfig = (VideoCaptureConfig) getUseCaseConfig();
VideoCaptureConfig.Builder builder = VideoCaptureConfig.Builder.fromConfig(oldConfig);
int oldRotation = oldConfig.getTargetRotation(ImageOutputConfig.INVALID_ROTATION);
if (oldRotation == ImageOutputConfig.INVALID_ROTATION || oldRotation != rotation) {
mUseCaseConfigBuilder.setTargetRotation(rotation);
updateUseCaseConfig(mUseCaseConfigBuilder.build());
UseCaseConfigUtil.updateTargetRotationAndRelatedConfigs(builder, rotation);
updateUseCaseConfig(builder.getUseCaseConfig());
// TODO(b/122846516): Update session configuration and possibly reconfigure session.
}
@ -488,7 +512,7 @@ public class VideoCapture extends UseCase {
* audio from selected audio source.
*/
@SuppressWarnings("WeakerAccess") /* synthetic accessor */
void setupEncoder(Size resolution) {
void setupEncoder(@NonNull String cameraId, @NonNull Size resolution) {
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
// video encoder setup
@ -501,21 +525,32 @@ public class VideoCapture extends UseCase {
if (mCameraSurface != null) {
releaseCameraSurface(false);
}
mCameraSurface = mVideoEncoder.createInputSurface();
Surface cameraSurface = mVideoEncoder.createInputSurface();
mCameraSurface = cameraSurface;
SessionConfig.Builder sessionConfigBuilder = SessionConfig.Builder.createFrom(config);
if (mDeferrableSurface != null) {
mDeferrableSurface.close();
}
mDeferrableSurface = new ImmediateSurface(mCameraSurface);
mDeferrableSurface.getTerminationFuture().addListener(
cameraSurface::release, CameraXExecutors.mainThreadExecutor()
);
sessionConfigBuilder.addSurface(mDeferrableSurface);
String cameraId = getCameraIdUnchecked(config);
sessionConfigBuilder.addErrorListener(new SessionConfig.ErrorListener() {
@Override
public void onError(@NonNull SessionConfig sessionConfig,
@NonNull SessionConfig.SessionError error) {
setupEncoder(resolution);
// Ensure the bound camera has not changed before calling setupEncoder.
// TODO(b/143915543): Ensure this never gets called by a camera that is not bound
// to this use case so we don't need to do this check.
if (isCurrentlyBoundCamera(cameraId)) {
// Only reset the pipeline when the bound camera is the same.
setupEncoder(cameraId, resolution);
}
}
});
@ -620,8 +655,8 @@ public class VideoCapture extends UseCase {
*
* @return returns {@code true} if an error condition occurred, otherwise returns {@code false}
*/
boolean videoEncode(OnVideoSavedListener videoSavedListener) {
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
boolean videoEncode(@NonNull OnVideoSavedCallback videoSavedCallback, @NonNull String cameraId,
@NonNull Size resolution) {
// Main encoding loop. Exits on end of stream.
boolean errorOccurred = false;
boolean videoEos = false;
@ -638,8 +673,8 @@ public class VideoCapture extends UseCase {
switch (outputBufferId) {
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
if (mMuxerStarted) {
videoSavedListener.onError(
VideoCaptureError.ENCODER_ERROR,
videoSavedCallback.onError(
ERROR_ENCODER,
"Unexpected change in video encoding format.",
null);
errorOccurred = true;
@ -656,10 +691,6 @@ public class VideoCapture extends UseCase {
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// Timed out. Just wait until next attempt to deque.
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
// Ignore output buffers changed since we dequeue a single buffer instead of
// multiple
break;
default:
videoEos = writeVideoEncodedBuffer(outputBufferId);
}
@ -669,7 +700,7 @@ public class VideoCapture extends UseCase {
Log.i(TAG, "videoEncoder stop");
mVideoEncoder.stop();
} catch (IllegalStateException e) {
videoSavedListener.onError(VideoCaptureError.ENCODER_ERROR,
videoSavedCallback.onError(ERROR_ENCODER,
"Video encoder stop failed!", e);
errorOccurred = true;
}
@ -686,16 +717,15 @@ public class VideoCapture extends UseCase {
}
}
} catch (IllegalStateException e) {
videoSavedListener.onError(VideoCaptureError.MUXER_ERROR, "Muxer stop failed!", e);
videoSavedCallback.onError(ERROR_MUXER, "Muxer stop failed!", e);
errorOccurred = true;
}
mMuxerStarted = false;
// Do the setup of the videoEncoder at the end of video recording instead of at the start of
// recording because it requires attaching a new Surface. This causes a glitch so we don't
// want
// that to incur latency at the start of capture.
setupEncoder(getAttachedSurfaceResolution(getCameraIdUnchecked(config)));
// want that to incur latency at the start of capture.
setupEncoder(cameraId, resolution);
notifyReset();
// notify the UI thread that the video recording has finished
@ -705,7 +735,7 @@ public class VideoCapture extends UseCase {
return errorOccurred;
}
boolean audioEncode(OnVideoSavedListener videoSavedListener) {
boolean audioEncode(OnVideoSavedCallback videoSavedCallback) {
// Audio encoding loop. Exits on end of stream.
boolean audioEos = false;
int outIndex;
@ -766,14 +796,14 @@ public class VideoCapture extends UseCase {
}
// End Signal Custom Code Block
} catch (IllegalStateException e) {
videoSavedListener.onError(
VideoCaptureError.ENCODER_ERROR, "Audio recorder stop failed!", e);
videoSavedCallback.onError(
ERROR_ENCODER, "Audio recorder stop failed!", e);
}
try {
mAudioEncoder.stop();
} catch (IllegalStateException e) {
videoSavedListener.onError(VideoCaptureError.ENCODER_ERROR,
videoSavedCallback.onError(ERROR_ENCODER,
"Audio encoder stop failed!", e);
}
@ -889,39 +919,29 @@ public class VideoCapture extends UseCase {
* Describes the error that occurred during video capture operations.
*
* <p>This is a parameter sent to the error callback functions set in listeners such as {@link
* VideoCapture.OnVideoSavedListener#onError(VideoCaptureError, String, Throwable)}.
* VideoCapture.OnVideoSavedCallback#onError(int, String, Throwable)}.
*
* <p>See message parameter in onError callback or log for more details.
*
* @hide
*/
public enum VideoCaptureError {
/**
* An unknown error occurred.
*
* <p>See message parameter in onError callback or log for more details.
*/
UNKNOWN_ERROR,
/**
* An error occurred with encoder state, either when trying to change state or when an
* unexpected state change occurred.
*/
ENCODER_ERROR,
/** An error with muxer state such as during creation or when stopping. */
MUXER_ERROR,
/**
* An error indicating start recording was called when video recording is still in progress.
*/
RECORDING_IN_PROGRESS
@IntDef({ERROR_UNKNOWN, ERROR_ENCODER, ERROR_MUXER, ERROR_RECORDING_IN_PROGRESS})
@Retention(RetentionPolicy.SOURCE)
@RestrictTo(Scope.LIBRARY_GROUP)
public @interface VideoCaptureError {
}
/** Listener containing callbacks for video file I/O events. */
public interface OnVideoSavedListener {
public interface OnVideoSavedCallback {
/** Called when the video has been successfully saved. */
// TODO: Should remove file argument to match ImageCapture.OnImageSavedCallback
// #onImageSaved()
// Begin Signal Custom Code Block
void onVideoSaved(@NonNull FileDescriptor file);
// End Signal Custom Code Block
/** Called when an error occurs while attempting to save the video. */
void onError(@NonNull VideoCaptureError videoCaptureError, @NonNull String message,
void onError(@VideoCaptureError int videoCaptureError, @NonNull String message,
@Nullable Throwable cause);
}
@ -936,7 +956,6 @@ public class VideoCapture extends UseCase {
@RestrictTo(Scope.LIBRARY_GROUP)
public static final class Defaults
implements ConfigProvider<VideoCaptureConfig> {
private static final Handler DEFAULT_HANDLER = new Handler(Looper.getMainLooper());
private static final int DEFAULT_VIDEO_FRAME_RATE = 30;
/** 8Mb/s the recommend rate for 30fps 1080p */
private static final int DEFAULT_BIT_RATE = 8 * 1024 * 1024;
@ -973,11 +992,12 @@ public class VideoCapture extends UseCase {
.setMaxResolution(DEFAULT_MAX_RESOLUTION)
.setSurfaceOccupancyPriority(DEFAULT_SURFACE_OCCUPANCY_PRIORITY);
DEFAULT_CONFIG = builder.build();
DEFAULT_CONFIG = builder.getUseCaseConfig();
}
@NonNull
@Override
public VideoCaptureConfig getConfig(LensFacing lensFacing) {
public VideoCaptureConfig getConfig(@Nullable CameraInfo cameraInfo) {
return DEFAULT_CONFIG;
}
}
@ -989,15 +1009,17 @@ public class VideoCapture extends UseCase {
public Location location;
}
private final class VideoSavedListenerWrapper implements OnVideoSavedListener {
private final class VideoSavedListenerWrapper implements OnVideoSavedCallback {
@NonNull Executor mExecutor;
@NonNull OnVideoSavedListener mOnVideoSavedListener;
@NonNull
Executor mExecutor;
@NonNull
OnVideoSavedCallback mOnVideoSavedCallback;
VideoSavedListenerWrapper(@NonNull Executor executor,
@NonNull OnVideoSavedListener onVideoSavedListener) {
@NonNull OnVideoSavedCallback onVideoSavedCallback) {
mExecutor = executor;
mOnVideoSavedListener = onVideoSavedListener;
mOnVideoSavedCallback = onVideoSavedCallback;
}
@Override
@ -1005,18 +1027,18 @@ public class VideoCapture extends UseCase {
public void onVideoSaved(@NonNull FileDescriptor file) {
// End Signal Custom Code Block
try {
mExecutor.execute(() -> mOnVideoSavedListener.onVideoSaved(file));
mExecutor.execute(() -> mOnVideoSavedCallback.onVideoSaved(file));
} catch (RejectedExecutionException e) {
Log.e(TAG, "Unable to post to the supplied executor.");
}
}
@Override
public void onError(@NonNull VideoCaptureError videoCaptureError, @NonNull String message,
public void onError(@VideoCaptureError int videoCaptureError, @NonNull String message,
@Nullable Throwable cause) {
try {
mExecutor.execute(
() -> mOnVideoSavedListener.onError(videoCaptureError, message, cause));
() -> mOnVideoSavedCallback.onError(videoCaptureError, message, cause));
} catch (RejectedExecutionException e) {
Log.e(TAG, "Unable to post to the supplied executor.");
}

View File

@ -401,4 +401,11 @@
<declare-styleable name="MaxHeightFrameLayout">
<attr name="mhfl_maxHeight" format="dimension" />
</declare-styleable>
<declare-styleable name="PreviewView">
<attr format="enum" name="implementationMode">
<enum name="surfaceView" value="0"/>
<enum name="textureView" value="1"/>
</attr>
</declare-styleable>
</resources>

View File

@ -6,6 +6,9 @@ dependencyVerification {
['androidx.activity:activity:1.0.0',
'd1bc9842455c2e534415d88c44df4d52413b478db9093a1ba36324f705f44c3d'],
['androidx.annotation:annotation-experimental:1.0.0-rc01',
'2f113195f61ecd08ea46cef545f0ea338898391d95425cf4c8836ba2b701f5d6'],
['androidx.annotation:annotation:1.1.0',
'd38d63edb30f1467818d50aaf05f8a692dea8b31392a049bfa991b159ad5b692'],
@ -24,11 +27,14 @@ dependencyVerification {
['androidx.asynclayoutinflater:asynclayoutinflater:1.0.0',
'f7eab60c57addd94bb06275832fe7600611beaaae1a1ec597c231956faf96c8b'],
['androidx.camera:camera-camera2:1.0.0-alpha06',
'e50f20deb950ffebcd4d1de5408ef7a5404bec80ec77119e05663c890739b903'],
['androidx.camera:camera-camera2:1.0.0-beta01',
'02e15ad76153d09adcd6631627960707a8786333a8276d05dcbefc2bfe4ef5a1'],
['androidx.camera:camera-core:1.0.0-alpha06',
'0096cabe539d9b4288f406acfb44264b137ebd600e38e33504ff425c979016c9'],
['androidx.camera:camera-core:1.0.0-beta01',
'acba5f196b3bd3ecf756067c9f4fae4f15b81228c2bd6b52212d9454e27599f0'],
['androidx.camera:camera-lifecycle:1.0.0-beta01',
'25c761a863555d5e008e428cf271caca774c867bf269495dc32a0fdc65770fd5'],
['androidx.cardview:cardview:1.0.0',
'1193c04c22a3d6b5946dae9f4e8c59d6adde6a71b6bd5d87fb99d82dda1afec7'],
@ -36,8 +42,8 @@ dependencyVerification {
['androidx.collection:collection:1.1.0',
'632a0e5407461de774409352940e292a291037724207a787820c77daf7d33b72'],
['androidx.concurrent:concurrent-futures:1.0.0-alpha03',
'50812a53912255e3e0f2147d13bbbb81937c3726fda2e984e77a27c7207d96a1'],
['androidx.concurrent:concurrent-futures:1.0.0',
'5595a40e278a7b39fa78a09490e3d7f3faa95c7b01447148bd38b5ade0605c35'],
['androidx.constraintlayout:constraintlayout-solver:1.1.3',
'965c177e64fbd81bd1d27b402b66ef9d7bc7b5cb5f718044bf7a453abc542045'],