Update CameraX to 1.0.0-beta11.

This commit is contained in:
Alex Hart
2020-11-09 09:21:59 -04:00
committed by Cody Henthorne
parent f4c1e34402
commit c4c32d80b2
19 changed files with 267 additions and 2521 deletions

View File

@@ -318,9 +318,10 @@ dependencies {
implementation 'androidx.lifecycle:lifecycle-extensions:2.1.0'
implementation 'androidx.lifecycle:lifecycle-viewmodel-savedstate:1.0.0-alpha05'
implementation 'androidx.lifecycle:lifecycle-common-java8:2.1.0'
implementation "androidx.camera:camera-core:1.0.0-beta01"
implementation "androidx.camera:camera-camera2:1.0.0-beta01"
implementation "androidx.camera:camera-lifecycle:1.0.0-beta01"
implementation "androidx.camera:camera-core:1.0.0-beta11"
implementation "androidx.camera:camera-camera2:1.0.0-beta11"
implementation "androidx.camera:camera-lifecycle:1.0.0-beta11"
implementation "androidx.camera:camera-view:1.0.0-alpha18"
implementation "androidx.concurrent:concurrent-futures:1.0.0"
implementation "androidx.autofill:autofill:1.0.0"
implementation "androidx.paging:paging-common:2.1.2"

View File

@@ -3,7 +3,7 @@
xmlns:tools="http://schemas.android.com/tools"
package="org.thoughtcrime.securesms">
<uses-sdk tools:overrideLibrary="androidx.camera.core,androidx.camera.camera2,androidx.camera.lifecycle" />
<uses-sdk tools:overrideLibrary="androidx.camera.core,androidx.camera.camera2,androidx.camera.lifecycle,androidx.camera.view" />
<permission android:name="${applicationId}.ACCESS_SECRETS"
android:label="Access to TextSecure Secrets"

View File

@@ -14,7 +14,7 @@
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
package androidx.camera.view;
import android.Manifest.permission;
import android.annotation.SuppressLint;
@@ -45,43 +45,44 @@ import androidx.annotation.RestrictTo;
import androidx.annotation.RestrictTo.Scope;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.DisplayOrientedMeteringPointFactory;
import androidx.camera.core.FocusMeteringAction;
import androidx.camera.core.FocusMeteringResult;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCapture.OnImageCapturedCallback;
import androidx.camera.core.ImageCapture.OnImageSavedCallback;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Logger;
import androidx.camera.core.MeteringPoint;
import androidx.camera.core.MeteringPointFactory;
import androidx.camera.core.VideoCapture;
import androidx.camera.core.VideoCapture.OnVideoSavedCallback;
import androidx.camera.core.impl.LensFacingConverter;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import androidx.camera.core.impl.utils.futures.FutureCallback;
import androidx.camera.core.impl.utils.futures.Futures;
import androidx.lifecycle.LifecycleOwner;
import androidx.lifecycle.LiveData;
import com.google.common.util.concurrent.ListenableFuture;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.logging.Log;
import java.io.FileDescriptor;
import java.io.File;
import java.util.concurrent.Executor;
/**
* A {@link View} that displays a preview of the camera with methods {@link
* #takePicture(Executor, OnImageCapturedCallback)},
* {@link #startRecording(FileDescriptor, Executor, VideoCapture.OnVideoSavedCallback)} and {@link #stopRecording()}.
* {@link #takePicture(ImageCapture.OutputFileOptions, Executor, OnImageSavedCallback)},
* {@link #startRecording(File , Executor , OnVideoSavedCallback callback)}
* and {@link #stopRecording()}.
*
* <p>Because the Camera is a limited resource and consumes a high amount of power, CameraView must
* be opened/closed. CameraView will handle opening/closing automatically through use of a {@link
* LifecycleOwner}. Use {@link #bindToLifecycle(LifecycleOwner)} to start the camera.
*/
// Begin Signal Custom Code Block
@RequiresApi(21)
@SuppressLint("RestrictedApi")
// End Signal Custom Code Block
public final class CameraXView extends FrameLayout {
static final String TAG = CameraXView.class.getSimpleName();
static final boolean DEBUG = false;
public final class SignalCameraView extends FrameLayout {
static final String TAG = SignalCameraView.class.getSimpleName();
static final int INDEFINITE_VIDEO_DURATION = -1;
static final int INDEFINITE_VIDEO_SIZE = -1;
@@ -107,7 +108,7 @@ public final class CameraXView extends FrameLayout {
// For pinch-to-zoom
private PinchToZoomGestureDetector mPinchToZoomGestureDetector;
private boolean mIsPinchToZoomEnabled = true;
CameraXModule mCameraModule;
SignalCameraXModule mCameraModule;
private final DisplayManager.DisplayListener mDisplayListener =
new DisplayListener() {
@Override
@@ -124,26 +125,25 @@ public final class CameraXView extends FrameLayout {
}
};
private PreviewView mPreviewView;
private ScaleType mScaleType = ScaleType.CENTER_CROP;
// For accessibility event
private MotionEvent mUpEvent;
public CameraXView(@NonNull Context context) {
public SignalCameraView(@NonNull Context context) {
this(context, null);
}
public CameraXView(@NonNull Context context, @Nullable AttributeSet attrs) {
public SignalCameraView(@NonNull Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public CameraXView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyle) {
public SignalCameraView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context, attrs);
}
@RequiresApi(21)
public CameraXView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr,
int defStyleRes) {
public SignalCameraView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr,
int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init(context, attrs);
}
@@ -172,23 +172,23 @@ public final class CameraXView extends FrameLayout {
private void init(Context context, @Nullable AttributeSet attrs) {
addView(mPreviewView = new PreviewView(getContext()), 0 /* view position */);
mCameraModule = new CameraXModule(this);
mCameraModule = new SignalCameraXModule(this);
if (attrs != null) {
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CameraXView);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CameraView);
setScaleType(
ScaleType.fromId(
a.getInteger(R.styleable.CameraXView_scaleType,
PreviewView.ScaleType.fromId(
a.getInteger(R.styleable.CameraView_scaleType,
getScaleType().getId())));
setPinchToZoomEnabled(
a.getBoolean(
R.styleable.CameraXView_pinchToZoomEnabled, isPinchToZoomEnabled()));
R.styleable.CameraView_pinchToZoomEnabled, isPinchToZoomEnabled()));
setCaptureMode(
CaptureMode.fromId(
a.getInteger(R.styleable.CameraXView_captureMode,
a.getInteger(R.styleable.CameraView_captureMode,
getCaptureMode().getId())));
int lensFacing = a.getInt(R.styleable.CameraXView_lensFacing, LENS_FACING_BACK);
int lensFacing = a.getInt(R.styleable.CameraView_lensFacing, LENS_FACING_BACK);
switch (lensFacing) {
case LENS_FACING_NONE:
setCameraLensFacing(null);
@@ -203,7 +203,7 @@ public final class CameraXView extends FrameLayout {
// Unhandled event.
}
int flashMode = a.getInt(R.styleable.CameraXView_flash, 0);
int flashMode = a.getInt(R.styleable.CameraView_flash, 0);
switch (flashMode) {
case FLASH_MODE_AUTO:
setFlash(ImageCapture.FLASH_MODE_AUTO);
@@ -265,7 +265,7 @@ public final class CameraXView extends FrameLayout {
if (savedState instanceof Bundle) {
Bundle state = (Bundle) savedState;
super.onRestoreInstanceState(state.getParcelable(EXTRA_SUPER));
setScaleType(ScaleType.fromId(state.getInt(EXTRA_SCALE_TYPE)));
setScaleType(PreviewView.ScaleType.fromId(state.getInt(EXTRA_SCALE_TYPE)));
setZoomRatio(state.getFloat(EXTRA_ZOOM_RATIO));
setPinchToZoomEnabled(state.getBoolean(EXTRA_PINCH_TO_ZOOM_ENABLED));
setFlash(FlashModeConverter.valueOf(state.getString(EXTRA_FLASH)));
@@ -298,6 +298,21 @@ public final class CameraXView extends FrameLayout {
dpyMgr.unregisterDisplayListener(mDisplayListener);
}
/**
* Gets the {@link LiveData} of the underlying {@link PreviewView}'s
* {@link PreviewView.StreamState}.
*
* @return A {@link LiveData} containing the {@link PreviewView.StreamState}. Apps can either
* get current value by {@link LiveData#getValue()} or register a observer by
* {@link LiveData#observe}.
* @see PreviewView#getPreviewStreamState()
*/
@NonNull
public LiveData<PreviewView.StreamState> getPreviewStreamState() {
return mPreviewView.getPreviewStreamState();
}
@NonNull
PreviewView getPreviewView() {
return mPreviewView;
}
@@ -347,11 +362,11 @@ public final class CameraXView extends FrameLayout {
/**
* Returns the scale type used to scale the preview.
*
* @return The current {@link ScaleType}.
* @return The current {@link PreviewView.ScaleType}.
*/
@NonNull
public ScaleType getScaleType() {
return mScaleType;
public PreviewView.ScaleType getScaleType() {
return mPreviewView.getScaleType();
}
/**
@@ -359,13 +374,10 @@ public final class CameraXView extends FrameLayout {
*
* <p>This controls how the view finder should be scaled and positioned within the view.
*
* @param scaleType The desired {@link ScaleType}.
* @param scaleType The desired {@link PreviewView.ScaleType}.
*/
public void setScaleType(@NonNull ScaleType scaleType) {
if (scaleType != mScaleType) {
mScaleType = scaleType;
requestLayout();
}
public void setScaleType(@NonNull PreviewView.ScaleType scaleType) {
mPreviewView.setScaleType(scaleType);
}
/**
@@ -401,8 +413,10 @@ public final class CameraXView extends FrameLayout {
}
/**
* Sets the maximum video duration before {@link VideoCapture.OnVideoSavedCallback#onVideoSaved(FileDescriptor)} is
* called automatically. Use {@link #INDEFINITE_VIDEO_DURATION} to disable the timeout.
* Sets the maximum video duration before
* {@link OnVideoSavedCallback#onVideoSaved(VideoCapture.OutputFileResults)} is called
* automatically.
* Use {@link #INDEFINITE_VIDEO_DURATION} to disable the timeout.
*/
private void setMaxVideoDuration(long duration) {
mCameraModule.setMaxVideoDuration(duration);
@@ -417,7 +431,8 @@ public final class CameraXView extends FrameLayout {
}
/**
* Sets the maximum video size in bytes before {@link VideoCapture.OnVideoSavedCallback#onVideoSaved(FileDescriptor)}
* Sets the maximum video size in bytes before
* {@link OnVideoSavedCallback#onVideoSaved(VideoCapture.OutputFileResults)}
* is called automatically. Use {@link #INDEFINITE_VIDEO_SIZE} to disable the size restriction.
*/
private void setMaxVideoSize(long size) {
@@ -435,28 +450,38 @@ public final class CameraXView extends FrameLayout {
mCameraModule.takePicture(executor, callback);
}
/**
* Takes a picture and calls
* {@link OnImageSavedCallback#onImageSaved(ImageCapture.OutputFileResults)} when done.
*
* <p> The value of {@link ImageCapture.Metadata#isReversedHorizontal()} in the
* {@link ImageCapture.OutputFileOptions} will be overwritten based on camera direction. For
* front camera, it will be set to true; for back camera, it will be set to false.
*
* @param outputFileOptions Options to store the newly captured image.
* @param executor The executor in which the callback methods will be run.
* @param callback Callback which will receive success or failure.
*/
public void takePicture(@NonNull ImageCapture.OutputFileOptions outputFileOptions,
@NonNull Executor executor,
@NonNull OnImageSavedCallback callback) {
mCameraModule.takePicture(outputFileOptions, executor, callback);
}
/**
* Takes a video and calls the OnVideoSavedCallback when done.
*
* @param file The destination.
* @param executor The executor in which the callback methods will be run.
* @param callback Callback which will receive success or failure.
* @param outputFileOptions Options to store the newly captured video.
* @param executor The executor in which the callback methods will be run.
* @param callback Callback which will receive success or failure.
*/
// Begin Signal Custom Code Block
@RequiresApi(26)
// End Signal Custom Code Block
public void startRecording(// Begin Signal Custom Code Block
@NonNull FileDescriptor file,
// End Signal Custom Code Block
public void startRecording(@NonNull VideoCapture.OutputFileOptions outputFileOptions,
@NonNull Executor executor,
@NonNull VideoCapture.OnVideoSavedCallback callback) {
mCameraModule.startRecording(file, executor, callback);
@NonNull OnVideoSavedCallback callback) {
mCameraModule.startRecording(outputFileOptions, executor, callback);
}
/** Stops an in progress video. */
// Begin Signal Custom Code Block
@RequiresApi(26)
// End Signal Custom Code Block
public void stopRecording() {
mCameraModule.stopRecording();
}
@@ -554,7 +579,8 @@ public final class CameraXView extends FrameLayout {
mDownEventTimestamp = System.currentTimeMillis();
break;
case MotionEvent.ACTION_UP:
if (delta() < ViewConfiguration.getLongPressTimeout()) {
if (delta() < ViewConfiguration.getLongPressTimeout()
&& mCameraModule.isBoundToLifecycle()) {
mUpEvent = event;
performClick();
}
@@ -578,19 +604,14 @@ public final class CameraXView extends FrameLayout {
final float y = (mUpEvent != null) ? mUpEvent.getY() : getY() + getHeight() / 2f;
mUpEvent = null;
CameraSelector cameraSelector =
new CameraSelector.Builder().requireLensFacing(
mCameraModule.getLensFacing()).build();
DisplayOrientedMeteringPointFactory pointFactory = new DisplayOrientedMeteringPointFactory(
getDisplay(), cameraSelector, mPreviewView.getWidth(), mPreviewView.getHeight());
float afPointWidth = 1.0f / 6.0f; // 1/6 total area
float aePointWidth = afPointWidth * 1.5f;
MeteringPoint afPoint = pointFactory.createPoint(x, y, afPointWidth);
MeteringPoint aePoint = pointFactory.createPoint(x, y, aePointWidth);
Camera camera = mCameraModule.getCamera();
if (camera != null) {
MeteringPointFactory pointFactory = mPreviewView.getMeteringPointFactory();
float afPointWidth = 1.0f / 6.0f; // 1/6 total area
float aePointWidth = afPointWidth * 1.5f;
MeteringPoint afPoint = pointFactory.createPoint(x, y, afPointWidth);
MeteringPoint aePoint = pointFactory.createPoint(x, y, aePointWidth);
ListenableFuture<FocusMeteringResult> future =
camera.getCameraControl().startFocusAndMetering(
new FocusMeteringAction.Builder(afPoint,
@@ -609,7 +630,7 @@ public final class CameraXView extends FrameLayout {
}, CameraXExecutors.directExecutor());
} else {
Log.d(TAG, "cannot access camera");
Logger.d(TAG, "cannot access camera");
}
return true;
@@ -711,45 +732,11 @@ public final class CameraXView extends FrameLayout {
return mCameraModule.isTorchOn();
}
/** Options for scaling the bounds of the view finder to the bounds of this view. */
public enum ScaleType {
/**
* Scale the view finder, maintaining the source aspect ratio, so the view finder fills the
* entire view. This will cause the view finder to crop the source image if the camera
* aspect ratio does not match the view aspect ratio.
*/
CENTER_CROP(0),
/**
* Scale the view finder, maintaining the source aspect ratio, so the view finder is
* entirely contained within the view.
*/
CENTER_INSIDE(1);
private final int mId;
int getId() {
return mId;
}
ScaleType(int id) {
mId = id;
}
static ScaleType fromId(int id) {
for (ScaleType st : values()) {
if (st.mId == id) {
return st;
}
}
throw new IllegalArgumentException();
}
}
/**
* The capture mode used by CameraView.
*
* <p>This enum can be used to determine which capture mode will be enabled for {@link
* CameraXView}.
* SignalCameraView}.
*/
public enum CaptureMode {
/** A mode where image capture is enabled. */
@@ -832,4 +819,4 @@ public final class CameraXView extends FrameLayout {
public void onScaleEnd(ScaleGestureDetector detector) {
}
}
}
}

View File

@@ -14,7 +14,9 @@
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
package androidx.camera.view;
import static androidx.camera.core.ImageCapture.FLASH_MODE_OFF;
import android.Manifest.permission;
import android.annotation.SuppressLint;
@@ -27,17 +29,21 @@ import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.RequiresPermission;
import androidx.camera.core.AspectRatio;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraInfoUnavailableException;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.CameraX;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCapture.OnImageCapturedCallback;
import androidx.camera.core.ImageCapture.OnImageSavedCallback;
import androidx.camera.core.Logger;
import androidx.camera.core.Preview;
import androidx.camera.core.TorchState;
import androidx.camera.core.UseCase;
import androidx.camera.core.VideoCapture;
import androidx.camera.core.VideoCapture.OnVideoSavedCallback;
import androidx.camera.core.impl.CameraInternal;
import androidx.camera.core.impl.LensFacingConverter;
import androidx.camera.core.impl.VideoCaptureConfig;
import androidx.camera.core.impl.utils.CameraOrientationUtil;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import androidx.camera.core.impl.utils.futures.FutureCallback;
@@ -51,11 +57,10 @@ import androidx.lifecycle.OnLifecycleEvent;
import com.google.common.util.concurrent.ListenableFuture;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXUtil;
import org.thoughtcrime.securesms.mms.MediaConstraints;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.FileDescriptor;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
@@ -65,13 +70,10 @@ import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import static androidx.camera.core.ImageCapture.FLASH_MODE_OFF;
/** CameraX use case operation built on @{link androidx.camera.core}. */
// Begin Signal Custom Code Block
@RequiresApi(21)
// End Signal Custom Code Block
final class CameraXModule {
@SuppressLint("RestrictedApi")
final class SignalCameraXModule {
public static final String TAG = "CameraXModule";
private static final float UNITY_ZOOM_SCALE = 1f;
@@ -82,13 +84,13 @@ final class CameraXModule {
private static final Rational ASPECT_RATIO_3_4 = new Rational(3, 4);
private final Preview.Builder mPreviewBuilder;
private final VideoCaptureConfig.Builder mVideoCaptureConfigBuilder;
private final VideoCapture.Builder mVideoCaptureBuilder;
private final ImageCapture.Builder mImageCaptureBuilder;
private final CameraXView mCameraXView;
private final SignalCameraView mCameraView;
final AtomicBoolean mVideoIsRecording = new AtomicBoolean(false);
private CameraXView.CaptureMode mCaptureMode = CameraXView.CaptureMode.IMAGE;
private long mMaxVideoDuration = CameraXView.INDEFINITE_VIDEO_DURATION;
private long mMaxVideoSize = CameraXView.INDEFINITE_VIDEO_SIZE;
private SignalCameraView.CaptureMode mCaptureMode = SignalCameraView.CaptureMode.IMAGE;
private long mMaxVideoDuration = SignalCameraView.INDEFINITE_VIDEO_DURATION;
private long mMaxVideoSize = SignalCameraView.INDEFINITE_VIDEO_SIZE;
@ImageCapture.FlashMode
private int mFlash = FLASH_MODE_OFF;
@Nullable
@@ -110,7 +112,6 @@ final class CameraXModule {
public void onDestroy(LifecycleOwner owner) {
if (owner == mCurrentLifecycle) {
clearCurrentLifecycle();
mPreview.setSurfaceProvider(null);
}
}
};
@@ -123,8 +124,8 @@ final class CameraXModule {
@Nullable
ProcessCameraProvider mCameraProvider;
CameraXModule(CameraXView view) {
mCameraXView = view;
SignalCameraXModule(SignalCameraView view) {
mCameraView = view;
Futures.addCallback(ProcessCameraProvider.getInstance(view.getContext()),
new FutureCallback<ProcessCameraProvider>() {
@@ -149,14 +150,12 @@ final class CameraXModule {
mImageCaptureBuilder = new ImageCapture.Builder().setTargetName("ImageCapture");
// Begin Signal Custom Code Block
mVideoCaptureConfigBuilder =
new VideoCaptureConfig.Builder().setTargetName("VideoCapture")
.setAudioBitRate(VideoUtil.AUDIO_BIT_RATE)
.setVideoFrameRate(VideoUtil.VIDEO_FRAME_RATE)
.setBitRate(VideoUtil.VIDEO_BIT_RATE);
// End Signal Custom Code Block
mVideoCaptureBuilder = new VideoCapture.Builder().setTargetName("VideoCapture")
.setAudioBitRate(VideoUtil.AUDIO_BIT_RATE)
.setVideoFrameRate(VideoUtil.VIDEO_FRAME_RATE)
.setBitRate(VideoUtil.VIDEO_BIT_RATE);
}
@RequiresPermission(permission.CAMERA)
void bindToLifecycle(LifecycleOwner lifecycleOwner) {
mNewLifecycle = lifecycleOwner;
@@ -173,12 +172,15 @@ final class CameraXModule {
}
clearCurrentLifecycle();
if (mNewLifecycle.getLifecycle().getCurrentState() == Lifecycle.State.DESTROYED) {
// Lifecycle is already in a destroyed state. Since it may have been a valid
// lifecycle when bound, but became destroyed while waiting for layout, treat this as
// a no-op now that we have cleared the previous lifecycle.
mNewLifecycle = null;
return;
}
mCurrentLifecycle = mNewLifecycle;
mNewLifecycle = null;
if (mCurrentLifecycle.getLifecycle().getCurrentState() == Lifecycle.State.DESTROYED) {
mCurrentLifecycle = null;
throw new IllegalArgumentException("Cannot bind to lifecycle in a destroyed state.");
}
if (mCameraProvider == null) {
// try again once the camera provider is no longer null
@@ -188,18 +190,18 @@ final class CameraXModule {
Set<Integer> available = getAvailableCameraLensFacing();
if (available.isEmpty()) {
Log.w(TAG, "Unable to bindToLifeCycle since no cameras available");
Logger.w(TAG, "Unable to bindToLifeCycle since no cameras available");
mCameraLensFacing = null;
}
// Ensure the current camera exists, or default to another camera
if (mCameraLensFacing != null && !available.contains(mCameraLensFacing)) {
Log.w(TAG, "Camera does not exist with direction " + mCameraLensFacing);
Logger.w(TAG, "Camera does not exist with direction " + mCameraLensFacing);
// Default to the first available camera direction
mCameraLensFacing = available.iterator().next();
Log.w(TAG, "Defaulting to primary camera with direction " + mCameraLensFacing);
Logger.w(TAG, "Defaulting to primary camera with direction " + mCameraLensFacing);
}
// Do not attempt to create use cases for a null cameraLensFacing. This could occur if
@@ -216,14 +218,12 @@ final class CameraXModule {
boolean isDisplayPortrait = getDisplayRotationDegrees() == 0
|| getDisplayRotationDegrees() == 180;
Rational targetAspectRatio;
// Begin Signal Custom Code Block
int resolution = CameraXUtil.getIdealResolution(Resources.getSystem().getDisplayMetrics().widthPixels, Resources.getSystem().getDisplayMetrics().heightPixels);
// End Signal Custom Code Block
if (getCaptureMode() == CameraXView.CaptureMode.IMAGE) {
// mImageCaptureBuilder.setTargetAspectRatio(AspectRatio.RATIO_4_3);
Rational targetAspectRatio;
if (getCaptureMode() == SignalCameraView.CaptureMode.IMAGE) {
// Begin Signal Custom Code Block
mImageCaptureBuilder.setTargetResolution(CameraXUtil.buildResolutionForRatio(resolution, ASPECT_RATIO_4_3, isDisplayPortrait));
// End Signal Custom Code Block
@@ -232,7 +232,6 @@ final class CameraXModule {
// Begin Signal Custom Code Block
mImageCaptureBuilder.setTargetResolution(CameraXUtil.buildResolutionForRatio(resolution, ASPECT_RATIO_16_9, isDisplayPortrait));
// End Signal Custom Code Block
// mImageCaptureBuilder.setTargetAspectRatio(AspectRatio.RATIO_16_9);
targetAspectRatio = isDisplayPortrait ? ASPECT_RATIO_9_16 : ASPECT_RATIO_16_9;
}
@@ -245,15 +244,14 @@ final class CameraXModule {
// Begin Signal Custom Code Block
Size size = VideoUtil.getVideoRecordingSize();
mVideoCaptureConfigBuilder.setTargetResolution(size);
mVideoCaptureConfigBuilder.setMaxResolution(size);
mVideoCaptureBuilder.setTargetResolution(size);
mVideoCaptureBuilder.setMaxResolution(size);
// End Signal Custom Code Block
mVideoCaptureConfigBuilder.setTargetRotation(getDisplaySurfaceRotation());
mVideoCaptureBuilder.setTargetRotation(getDisplaySurfaceRotation());
// Begin Signal Custom Code Block
if (MediaConstraints.isVideoTranscodeAvailable()) {
mVideoCapture = new VideoCapture(mVideoCaptureConfigBuilder.getUseCaseConfig());
mVideoCapture = mVideoCaptureBuilder.build();
}
// End Signal Custom Code Block
@@ -262,15 +260,15 @@ final class CameraXModule {
mPreviewBuilder.setTargetResolution(new Size(getMeasuredWidth(), height));
mPreview = mPreviewBuilder.build();
mPreview.setSurfaceProvider(mCameraXView.getPreviewView().getPreviewSurfaceProvider());
mPreview.setSurfaceProvider(mCameraView.getPreviewView().getSurfaceProvider());
CameraSelector cameraSelector =
new CameraSelector.Builder().requireLensFacing(mCameraLensFacing).build();
if (getCaptureMode() == CameraXView.CaptureMode.IMAGE) {
if (getCaptureMode() == SignalCameraView.CaptureMode.IMAGE) {
mCamera = mCameraProvider.bindToLifecycle(mCurrentLifecycle, cameraSelector,
mImageCapture,
mPreview);
} else if (getCaptureMode() == CameraXView.CaptureMode.VIDEO) {
} else if (getCaptureMode() == SignalCameraView.CaptureMode.VIDEO) {
mCamera = mCameraProvider.bindToLifecycle(mCurrentLifecycle, cameraSelector,
mVideoCapture,
mPreview);
@@ -301,7 +299,7 @@ final class CameraXModule {
return;
}
if (getCaptureMode() == CameraXView.CaptureMode.VIDEO) {
if (getCaptureMode() == SignalCameraView.CaptureMode.VIDEO) {
throw new IllegalStateException("Can not take picture under VIDEO capture mode.");
}
@@ -312,17 +310,32 @@ final class CameraXModule {
mImageCapture.takePicture(executor, callback);
}
// Begin Signal Custom Code Block
@RequiresApi(26)
public void startRecording(FileDescriptor file,
// End Signal Custom Code Block
Executor executor,
final VideoCapture.OnVideoSavedCallback callback) {
public void takePicture(@NonNull ImageCapture.OutputFileOptions outputFileOptions,
@NonNull Executor executor, OnImageSavedCallback callback) {
if (mImageCapture == null) {
return;
}
if (getCaptureMode() == SignalCameraView.CaptureMode.VIDEO) {
throw new IllegalStateException("Can not take picture under VIDEO capture mode.");
}
if (callback == null) {
throw new IllegalArgumentException("OnImageSavedCallback should not be empty");
}
outputFileOptions.getMetadata().setReversedHorizontal(mCameraLensFacing != null
&& mCameraLensFacing == CameraSelector.LENS_FACING_FRONT);
mImageCapture.takePicture(outputFileOptions, executor, callback);
}
public void startRecording(VideoCapture.OutputFileOptions outputFileOptions,
Executor executor, final OnVideoSavedCallback callback) {
if (mVideoCapture == null) {
return;
}
if (getCaptureMode() == CameraXView.CaptureMode.IMAGE) {
if (getCaptureMode() == SignalCameraView.CaptureMode.IMAGE) {
throw new IllegalStateException("Can not record video under IMAGE capture mode.");
}
@@ -332,15 +345,14 @@ final class CameraXModule {
mVideoIsRecording.set(true);
mVideoCapture.startRecording(
file,
outputFileOptions,
executor,
new VideoCapture.OnVideoSavedCallback() {
@Override
// Begin Signal Custom Code Block
public void onVideoSaved(@NonNull FileDescriptor savedFile) {
// End Signal Custom Code Block
public void onVideoSaved(
@NonNull VideoCapture.OutputFileResults outputFileResults) {
mVideoIsRecording.set(false);
callback.onVideoSaved(savedFile);
callback.onVideoSaved(outputFileResults);
}
@Override
@@ -349,15 +361,12 @@ final class CameraXModule {
@NonNull String message,
@Nullable Throwable cause) {
mVideoIsRecording.set(false);
Log.e(TAG, message, cause);
Logger.e(TAG, message, cause);
callback.onError(videoCaptureError, message, cause);
}
});
}
// Begin Signal Custom Code Block
@RequiresApi(26)
// End Signal Custom Code Block
public void stopRecording() {
if (mVideoCapture == null) {
return;
@@ -388,14 +397,15 @@ final class CameraXModule {
@RequiresPermission(permission.CAMERA)
public boolean hasCameraWithLensFacing(@CameraSelector.LensFacing int lensFacing) {
String cameraId;
try {
cameraId = CameraX.getCameraWithLensFacing(lensFacing);
} catch (Exception e) {
throw new IllegalStateException("Unable to query lens facing.", e);
if (mCameraProvider == null) {
return false;
}
try {
return mCameraProvider.hasCamera(
new CameraSelector.Builder().requireLensFacing(lensFacing).build());
} catch (CameraInfoUnavailableException e) {
return false;
}
return cameraId != null;
}
@Nullable
@@ -454,7 +464,7 @@ final class CameraXModule {
}
}, CameraXExecutors.directExecutor());
} else {
Log.e(TAG, "Failed to set zoom ratio");
Logger.e(TAG, "Failed to set zoom ratio");
}
}
@@ -486,6 +496,10 @@ final class CameraXModule {
}
}
boolean isBoundToLifecycle() {
return mCamera != null;
}
int getRelativeCameraOrientation(boolean compensateForMirroring) {
int rotationDegrees = 0;
if (mCamera != null) {
@@ -520,6 +534,11 @@ final class CameraXModule {
if (!toUnbind.isEmpty()) {
mCameraProvider.unbind(toUnbind.toArray((new UseCase[0])));
}
// Remove surface provider once unbound.
if (mPreview != null) {
mPreview.setSurfaceProvider(null);
}
}
mCamera = null;
mCurrentLifecycle = null;
@@ -532,7 +551,7 @@ final class CameraXModule {
mImageCapture.setTargetRotation(getDisplaySurfaceRotation());
}
if (mVideoCapture != null && MediaConstraints.isVideoTranscodeAvailable()) {
if (mVideoCapture != null) {
mVideoCapture.setTargetRotation(getDisplaySurfaceRotation());
}
}
@@ -567,7 +586,7 @@ final class CameraXModule {
return false;
}
CameraInternal camera = mImageCapture.getBoundCamera();
CameraInternal camera = mImageCapture.getCamera();
if (camera == null) {
return false;
@@ -614,15 +633,15 @@ final class CameraXModule {
}
public Context getContext() {
return mCameraXView.getContext();
return mCameraView.getContext();
}
public int getWidth() {
return mCameraXView.getWidth();
return mCameraView.getWidth();
}
public int getHeight() {
return mCameraXView.getHeight();
return mCameraView.getHeight();
}
public int getDisplayRotationDegrees() {
@@ -630,15 +649,15 @@ final class CameraXModule {
}
protected int getDisplaySurfaceRotation() {
return mCameraXView.getDisplaySurfaceRotation();
return mCameraView.getDisplaySurfaceRotation();
}
private int getMeasuredWidth() {
return mCameraXView.getMeasuredWidth();
return mCameraView.getMeasuredWidth();
}
private int getMeasuredHeight() {
return mCameraXView.getMeasuredHeight();
return mCameraView.getMeasuredHeight();
}
@Nullable
@@ -647,11 +666,11 @@ final class CameraXModule {
}
@NonNull
public CameraXView.CaptureMode getCaptureMode() {
public SignalCameraView.CaptureMode getCaptureMode() {
return mCaptureMode;
}
public void setCaptureMode(@NonNull CameraXView.CaptureMode captureMode) {
public void setCaptureMode(@NonNull SignalCameraView.CaptureMode captureMode) {
this.mCaptureMode = captureMode;
rebindToLifecycle();
}

View File

@@ -26,6 +26,10 @@ import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureException;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.camera.view.SignalCameraView;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.ViewModelProviders;
@@ -38,13 +42,13 @@ import org.thoughtcrime.securesms.components.TooltipPopup;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXFlashToggleView;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXUtil;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXView;
import org.thoughtcrime.securesms.mms.DecryptableStreamUriLoader.DecryptableUri;
import org.thoughtcrime.securesms.mms.MediaConstraints;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor;
import org.thoughtcrime.securesms.util.Stopwatch;
import org.thoughtcrime.securesms.util.TextSecurePreferences;
import org.thoughtcrime.securesms.util.ThemeUtil;
import org.thoughtcrime.securesms.util.concurrent.SignalExecutors;
import org.thoughtcrime.securesms.util.concurrent.SimpleTask;
import org.thoughtcrime.securesms.video.VideoUtil;
import org.whispersystems.libsignal.util.guava.Optional;
@@ -62,7 +66,7 @@ public class CameraXFragment extends LoggingFragment implements CameraFragment {
private static final String TAG = Log.tag(CameraXFragment.class);
private static final String IS_VIDEO_ENABLED = "is_video_enabled";
private CameraXView camera;
private SignalCameraView camera;
private ViewGroup controlsContainer;
private Controller controller;
private MediaSendViewModel viewModel;
@@ -205,37 +209,11 @@ public class CameraXFragment extends LoggingFragment implements CameraFragment {
onCaptureClicked();
});
camera.setScaleType(CameraXView.ScaleType.CENTER_INSIDE);
camera.setScaleType(PreviewView.ScaleType.FILL_CENTER);
if (camera.hasCameraWithLensFacing(CameraSelector.LENS_FACING_FRONT) && camera.hasCameraWithLensFacing(CameraSelector.LENS_FACING_BACK)) {
flipButton.setVisibility(View.VISIBLE);
flipButton.setOnClickListener(v -> {
camera.toggleCamera();
TextSecurePreferences.setDirectCaptureCameraId(getContext(), CameraXUtil.toCameraDirectionInt(camera.getCameraLensFacing()));
Animation animation = new RotateAnimation(0, -180, RotateAnimation.RELATIVE_TO_SELF, 0.5f, RotateAnimation.RELATIVE_TO_SELF, 0.5f);
animation.setDuration(200);
animation.setInterpolator(new DecelerateInterpolator());
flipButton.startAnimation(animation);
flashButton.setAutoFlashEnabled(camera.hasFlash());
flashButton.setFlash(camera.getFlash());
});
GestureDetector gestureDetector = new GestureDetector(requireContext(), new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onDoubleTap(MotionEvent e) {
if (flipButton.isEnabled()) {
flipButton.performClick();
}
return true;
}
});
camera.setOnTouchListener((v, event) -> gestureDetector.onTouchEvent(event));
} else {
flipButton.setVisibility(View.GONE);
}
ProcessCameraProvider.getInstance(requireContext())
.addListener(() -> initializeFlipButton(flipButton, flashButton),
Executors.mainThreadExecutor());
flashButton.setAutoFlashEnabled(camera.hasFlash());
flashButton.setFlash(camera.getFlash());
@@ -252,7 +230,7 @@ public class CameraXFragment extends LoggingFragment implements CameraFragment {
Animation inAnimation = AnimationUtils.loadAnimation(requireContext(), R.anim.fade_in);
Animation outAnimation = AnimationUtils.loadAnimation(requireContext(), R.anim.fade_out);
camera.setCaptureMode(CameraXView.CaptureMode.MIXED);
camera.setCaptureMode(SignalCameraView.CaptureMode.MIXED);
int maxDuration = VideoUtil.getMaxVideoDurationInSeconds(requireContext(), viewModel.getMediaConstraints());
Log.d(TAG, "Max duration: " + maxDuration + " sec");
@@ -410,4 +388,36 @@ public class CameraXFragment extends LoggingFragment implements CameraFragment {
}
}
}
private void initializeFlipButton(@NonNull View flipButton, @NonNull CameraXFlashToggleView flashButton) {
if (camera.hasCameraWithLensFacing(CameraSelector.LENS_FACING_FRONT) && camera.hasCameraWithLensFacing(CameraSelector.LENS_FACING_BACK)) {
flipButton.setVisibility(View.VISIBLE);
flipButton.setOnClickListener(v -> {
camera.toggleCamera();
TextSecurePreferences.setDirectCaptureCameraId(getContext(), CameraXUtil.toCameraDirectionInt(camera.getCameraLensFacing()));
Animation animation = new RotateAnimation(0, -180, RotateAnimation.RELATIVE_TO_SELF, 0.5f, RotateAnimation.RELATIVE_TO_SELF, 0.5f);
animation.setDuration(200);
animation.setInterpolator(new DecelerateInterpolator());
flipButton.startAnimation(animation);
flashButton.setAutoFlashEnabled(camera.hasFlash());
flashButton.setFlash(camera.getFlash());
});
GestureDetector gestureDetector = new GestureDetector(requireContext(), new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onDoubleTap(MotionEvent e) {
if (flipButton.isEnabled()) {
flipButton.performClick();
}
return true;
}
});
camera.setOnTouchListener((v, event) -> gestureDetector.onTouchEvent(event));
} else {
flipButton.setVisibility(View.GONE);
}
}
}

View File

@@ -7,10 +7,8 @@ import android.view.WindowManager;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.CameraX;
import androidx.camera.core.ImageCapture;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXView;
import androidx.camera.view.SignalCameraView;
@RequiresApi(21)
final class CameraXSelfieFlashHelper {
@@ -19,15 +17,15 @@ final class CameraXSelfieFlashHelper {
private static final float MAX_SELFIE_FLASH_ALPHA = 0.75f;
private static final long SELFIE_FLASH_DURATION_MS = 250;
private final Window window;
private final CameraXView camera;
private final View selfieFlash;
private final Window window;
private final SignalCameraView camera;
private final View selfieFlash;
private float brightnessBeforeFlash;
private boolean inFlash;
CameraXSelfieFlashHelper(@NonNull Window window,
@NonNull CameraXView camera,
@NonNull SignalCameraView camera,
@NonNull View selfieFlash)
{
this.window = window;
@@ -69,7 +67,8 @@ final class CameraXSelfieFlashHelper {
Integer cameraLensFacing = camera.getCameraLensFacing();
return camera.getFlash() == ImageCapture.FLASH_MODE_ON &&
!camera.hasFlash() &&
cameraLensFacing != null && cameraLensFacing == CameraSelector.LENS_FACING_BACK;
!camera.hasFlash() &&
cameraLensFacing != null &&
cameraLensFacing == CameraSelector.LENS_FACING_FRONT;
}
}

View File

@@ -1,6 +1,7 @@
package org.thoughtcrime.securesms.mediasend;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.util.DisplayMetrics;
import android.util.Size;
@@ -11,6 +12,8 @@ import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.camera.core.VideoCapture;
import androidx.camera.view.SignalCameraView;
import androidx.fragment.app.Fragment;
import com.bumptech.glide.util.Executors;
@@ -19,8 +22,6 @@ import com.nineoldandroids.animation.ValueAnimator;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXView;
import org.thoughtcrime.securesms.mediasend.camerax.VideoCapture;
import org.thoughtcrime.securesms.permissions.Permissions;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor;
import org.thoughtcrime.securesms.video.VideoUtil;
@@ -36,7 +37,7 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
private static final long VIDEO_SIZE = 10 * 1024 * 1024;
private final @NonNull Fragment fragment;
private final @NonNull CameraXView camera;
private final @NonNull SignalCameraView camera;
private final @NonNull Callback callback;
private final @NonNull MemoryFileDescriptor memoryFileDescriptor;
private final @NonNull ValueAnimator updateProgressAnimator;
@@ -46,12 +47,12 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
private final VideoCapture.OnVideoSavedCallback videoSavedListener = new VideoCapture.OnVideoSavedCallback() {
@Override
public void onVideoSaved(@NonNull FileDescriptor fileDescriptor) {
public void onVideoSaved(@NonNull VideoCapture.OutputFileResults outputFileResults) {
try {
isRecording = false;
camera.setZoomRatio(camera.getMinZoomRatio());
memoryFileDescriptor.seek(0);
callback.onVideoSaved(fileDescriptor);
callback.onVideoSaved(memoryFileDescriptor.getFileDescriptor());
} catch (IOException e) {
callback.onVideoError(e);
}
@@ -66,7 +67,7 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
CameraXVideoCaptureHelper(@NonNull Fragment fragment,
@NonNull CameraButtonView captureButton,
@NonNull CameraXView camera,
@NonNull SignalCameraView camera,
@NonNull MemoryFileDescriptor memoryFileDescriptor,
int maxVideoDurationSec,
@NonNull Callback callback)
@@ -113,11 +114,15 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
.execute();
}
@SuppressLint("RestrictedApi")
private void beginCameraRecording() {
this.camera.setZoomRatio(this.camera.getMinZoomRatio());
callback.onVideoRecordStarted();
shrinkCaptureArea();
camera.startRecording(memoryFileDescriptor.getFileDescriptor(), Executors.mainThreadExecutor(), videoSavedListener);
VideoCapture.OutputFileOptions options = new VideoCapture.OutputFileOptions.Builder(memoryFileDescriptor.getFileDescriptor()).build();
camera.startRecording(options, Executors.mainThreadExecutor(), videoSavedListener);
updateProgressAnimator.start();
}

View File

@@ -1,78 +0,0 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import static androidx.camera.core.ImageCapture.FLASH_MODE_AUTO;
import static androidx.camera.core.ImageCapture.FLASH_MODE_OFF;
import static androidx.camera.core.ImageCapture.FLASH_MODE_ON;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.ImageCapture.FlashMode;
/**
* Helper class that defines certain enum-like methods for {@link FlashMode}
*/
final class FlashModeConverter {
private FlashModeConverter() {
}
/**
* Returns the {@link FlashMode} constant for the specified name
*
* @param name The name of the {@link FlashMode} to return
* @return The {@link FlashMode} constant for the specified name
*/
@FlashMode
public static int valueOf(@Nullable final String name) {
if (name == null) {
throw new NullPointerException("name cannot be null");
}
switch (name) {
case "AUTO":
return FLASH_MODE_AUTO;
case "ON":
return FLASH_MODE_ON;
case "OFF":
return FLASH_MODE_OFF;
default:
throw new IllegalArgumentException("Unknown flash mode name " + name);
}
}
/**
* Returns the name of the {@link FlashMode} constant, exactly as it is declared.
*
* @param flashMode A {@link FlashMode} constant
* @return The name of the {@link FlashMode} constant.
*/
@NonNull
public static String nameOf(@FlashMode final int flashMode) {
switch (flashMode) {
case FLASH_MODE_AUTO:
return "AUTO";
case FLASH_MODE_ON:
return "ON";
case FLASH_MODE_OFF:
return "OFF";
default:
throw new IllegalArgumentException("Unknown flash mode " + flashMode);
}
}
}

View File

@@ -1,273 +0,0 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.content.Context;
import android.content.res.TypedArray;
import android.hardware.display.DisplayManager;
import android.os.Build;
import android.util.AttributeSet;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.camera.core.Preview;
import org.thoughtcrime.securesms.R;
import java.util.concurrent.Executor;
/**
* Custom View that displays camera feed for CameraX's Preview use case.
*
* <p> This class manages the Surface lifecycle, as well as the preview aspect ratio and
* orientation. Internally, it uses either a {@link android.view.TextureView} or
* {@link android.view.SurfaceView} to display the camera feed.
*/
// Begin Signal Custom Code Block
@RequiresApi(21)
// End Signal Custom Code Block
public class PreviewView extends FrameLayout {
@SuppressWarnings("WeakerAccess") /* synthetic accessor */
Implementation mImplementation;
private ImplementationMode mImplementationMode;
private final DisplayManager.DisplayListener mDisplayListener =
new DisplayManager.DisplayListener() {
@Override
public void onDisplayAdded(int displayId) {
}
@Override
public void onDisplayRemoved(int displayId) {
}
@Override
public void onDisplayChanged(int displayId) {
mImplementation.onDisplayChanged();
}
};
public PreviewView(@NonNull Context context) {
this(context, null);
}
public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
this(context, attrs, defStyleAttr, 0);
}
public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr,
int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
final TypedArray attributes = context.getTheme().obtainStyledAttributes(attrs,
R.styleable.PreviewView, defStyleAttr, defStyleRes);
try {
final int implementationModeId = attributes.getInteger(
R.styleable.PreviewView_implementationMode,
ImplementationMode.TEXTURE_VIEW.getId());
mImplementationMode = ImplementationMode.fromId(implementationModeId);
} finally {
attributes.recycle();
}
setUp();
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
final DisplayManager displayManager =
(DisplayManager) getContext().getSystemService(Context.DISPLAY_SERVICE);
if (displayManager != null) {
displayManager.registerDisplayListener(mDisplayListener, getHandler());
}
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
final DisplayManager displayManager =
(DisplayManager) getContext().getSystemService(Context.DISPLAY_SERVICE);
if (displayManager != null) {
displayManager.unregisterDisplayListener(mDisplayListener);
}
}
private void setUp() {
removeAllViews();
switch (mImplementationMode) {
case SURFACE_VIEW:
mImplementation = new SurfaceViewImplementation();
break;
case TEXTURE_VIEW:
mImplementation = new TextureViewImplementation();
break;
default:
throw new IllegalStateException(
"Unsupported implementation mode " + mImplementationMode);
}
mImplementation.init(this);
}
/**
* Specifies the {@link ImplementationMode} to use for the preview.
*
* @param implementationMode <code>SURFACE_VIEW</code> if a {@link android.view.SurfaceView}
* should be used to display the camera feed, or
* <code>TEXTURE_VIEW</code> to use a {@link android.view.TextureView}
*/
public void setImplementationMode(@NonNull final ImplementationMode implementationMode) {
mImplementationMode = implementationMode;
setUp();
}
/**
* Returns the implementation mode of the {@link PreviewView}.
*
* @return <code>SURFACE_VIEW</code> if the {@link PreviewView} is internally using a
* {@link android.view.SurfaceView} to display the camera feed, or <code>TEXTURE_VIEW</code>
* if a {@link android.view.TextureView} is being used.
*/
@NonNull
public ImplementationMode getImplementationMode() {
return mImplementationMode;
}
/**
* Gets the {@link Preview.SurfaceProvider} to be used with
* {@link Preview#setSurfaceProvider(Executor, Preview.SurfaceProvider)}.
*/
@NonNull
public Preview.SurfaceProvider getPreviewSurfaceProvider() {
return mImplementation.getSurfaceProvider();
}
/**
* Implements this interface to create PreviewView implementation.
*/
interface Implementation {
/**
* Initializes the parent view with sub views.
*
* @param parent the containing parent {@link FrameLayout}.
*/
void init(@NonNull FrameLayout parent);
/**
* Gets the {@link Preview.SurfaceProvider} to be used with {@link Preview}.
*/
@NonNull
Preview.SurfaceProvider getSurfaceProvider();
/**
* Notifies that the display properties have changed.
*
* <p>Implementation might need to adjust transform by latest display properties such as
* display orientation in order to show the preview correctly.
*/
void onDisplayChanged();
}
/**
* The implementation mode of a {@link PreviewView}
*
* <p>Specifies how the Preview surface will be implemented internally: Using a
* {@link android.view.SurfaceView} or a {@link android.view.TextureView} (which is the default)
* </p>
*/
public enum ImplementationMode {
/** Use a {@link android.view.SurfaceView} for the preview */
SURFACE_VIEW(0),
/** Use a {@link android.view.TextureView} for the preview */
TEXTURE_VIEW(1);
private final int mId;
ImplementationMode(final int id) {
mId = id;
}
public int getId() {
return mId;
}
static ImplementationMode fromId(final int id) {
for (final ImplementationMode mode : values()) {
if (mode.mId == id) {
return mode;
}
}
throw new IllegalArgumentException("Unsupported implementation mode " + id);
}
}
/** Options for scaling the preview vis-à-vis its container {@link PreviewView}. */
public enum ScaleType {
/**
* Scale the preview, maintaining the source aspect ratio, so it fills the entire
* {@link PreviewView}, and align it to the top left corner of the view.
* This may cause the preview to be cropped if the camera preview aspect ratio does not
* match that of its container {@link PreviewView}.
*/
FILL_START,
/**
* Scale the preview, maintaining the source aspect ratio, so it fills the entire
* {@link PreviewView}, and center it inside the view.
* This may cause the preview to be cropped if the camera preview aspect ratio does not
* match that of its container {@link PreviewView}.
*/
FILL_CENTER,
/**
* Scale the preview, maintaining the source aspect ratio, so it fills the entire
* {@link PreviewView}, and align it to the bottom right corner of the view.
* This may cause the preview to be cropped if the camera preview aspect ratio does not
* match that of its container {@link PreviewView}.
*/
FILL_END,
/**
* Scale the preview, maintaining the source aspect ratio, so it is entirely contained
* within the {@link PreviewView}, and align it to the top left corner of the view.
* Both dimensions of the preview will be equal or less than the corresponding dimensions
* of its container {@link PreviewView}.
*/
FIT_START,
/**
* Scale the preview, maintaining the source aspect ratio, so it is entirely contained
* within the {@link PreviewView}, and center it inside the view.
* Both dimensions of the preview will be equal or less than the corresponding dimensions
* of its container {@link PreviewView}.
*/
FIT_CENTER,
/**
* Scale the preview, maintaining the source aspect ratio, so it is entirely contained
* within the {@link PreviewView}, and align it to the bottom right corner of the view.
* Both dimensions of the preview will be equal or less than the corresponding dimensions
* of its container {@link PreviewView}.
*/
FIT_END
}
}

View File

@@ -1,162 +0,0 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.content.Context;
import android.graphics.Point;
import android.util.Pair;
import android.util.Size;
import android.view.Display;
import android.view.View;
import android.view.WindowManager;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
// Begin Signal Custom Code Block
@RequiresApi(21)
// End Signal Custom Code Block
final class ScaleTypeTransform {
/**
* Computes the scale by which a view has to scale in x and y in order to fill its parent
* while maintaining the buffer's aspect ratio.
*
* @param container A parent {@link android.view.View} that wraps {@code view}.
* @param view A child {@link android.view.View} of {@code container}.
* @param bufferSize A {@link android.util.Size} whose aspect ratio must be maintained when
* scaling {@code view} inside its parent {@code container}.
* @return The scale by which {@code view} has to scale in x and y in order to fill its
* parent while maintaining {@code bufferSize}'s aspect ratio.
*/
@SuppressWarnings("SuspiciousNameCombination")
static Pair<Float, Float> getFillScaleWithBufferAspectRatio(@NonNull final View container,
@NonNull final View view, @NonNull final Size bufferSize) {
// Scaling only makes sense when none of the dimensions are equal to zero. In the
// opposite case, a default scale of 1 is returned,
if (container.getWidth() == 0 || container.getHeight() == 0 || view.getWidth() == 0
|| view.getHeight() == 0 || bufferSize.getWidth() == 0
|| bufferSize.getHeight() == 0) {
return new Pair<>(1F, 1F);
}
final int viewRotationDegrees = getRotationDegrees(view);
final boolean isNaturalPortrait = isNaturalPortrait(view.getContext(), viewRotationDegrees);
final int bufferWidth;
final int bufferHeight;
if (isNaturalPortrait) {
bufferWidth = bufferSize.getHeight();
bufferHeight = bufferSize.getWidth();
} else {
bufferWidth = bufferSize.getWidth();
bufferHeight = bufferSize.getHeight();
}
// Scale the buffers back to the original output size.
float scaleX = bufferWidth / (float) view.getWidth();
float scaleY = bufferHeight / (float) view.getHeight();
int bufferRotatedWidth;
int bufferRotatedHeight;
if (viewRotationDegrees == 0 || viewRotationDegrees == 180) {
bufferRotatedWidth = bufferWidth;
bufferRotatedHeight = bufferHeight;
} else {
bufferRotatedWidth = bufferHeight;
bufferRotatedHeight = bufferWidth;
}
// Scale the buffer so that it completely fills the container.
final float scale = Math.max(container.getWidth() / (float) bufferRotatedWidth,
container.getHeight() / (float) bufferRotatedHeight);
scaleX *= scale;
scaleY *= scale;
return new Pair<>(scaleX, scaleY);
}
/**
* Computes the top left coordinates for the view to be centered inside its parent.
*
* @param container A parent {@link android.view.View} that wraps {@code view}.
* @param view A child {@link android.view.View} of {@code container}.
* @return A {@link android.graphics.Point} whose coordinates represent the top left of
* {@code view} when centered inside its parent.
*/
static Point getOriginOfCenteredView(@NonNull final View container,
@NonNull final View view) {
final int offsetX = (view.getWidth() - container.getWidth()) / 2;
final int offsetY = (view.getHeight() - container.getHeight()) / 2;
return new Point(-offsetX, -offsetY);
}
/**
* Computes the rotation of a {@link android.view.View} in degrees from its natural
* orientation.
*/
static int getRotationDegrees(@NonNull final View view) {
final WindowManager windowManager = (WindowManager) view.getContext().getSystemService(
Context.WINDOW_SERVICE);
if (windowManager == null) {
return 0;
}
final int rotation = windowManager.getDefaultDisplay().getRotation();
return SurfaceRotation.rotationDegreesFromSurfaceRotation(rotation);
}
/**
* Determines whether the current device is a natural portrait-oriented device
*
* <p>
* Using the current app's window to determine whether the device is a natural
* portrait-oriented device doesn't work in all scenarios, one example of this is multi-window
* mode.
* Taking a natural portrait-oriented device in multi-window mode, rotating it 90 degrees (so
* that it's in landscape), with the app open, and its window's width being smaller than its
* height. Using the app's width and height would determine that the device isn't
* naturally portrait-oriented, where in fact it is, which is why it is important to use the
* size of the device instead.
* </p>
*
* @param context Current context. Can be an {@link android.app.Application} context
* or an {@link android.app.Activity} context.
* @param rotationDegrees The device's rotation in degrees from its natural orientation.
* @return Whether the device is naturally portrait-oriented.
*/
private static boolean isNaturalPortrait(@NonNull final Context context,
final int rotationDegrees) {
final WindowManager windowManager = (WindowManager) context.getSystemService(
Context.WINDOW_SERVICE);
if (windowManager == null) {
return true;
}
final Display display = windowManager.getDefaultDisplay();
final Point deviceSize = new Point();
display.getRealSize(deviceSize);
final int width = deviceSize.x;
final int height = deviceSize.y;
return ((rotationDegrees == 0 || rotationDegrees == 180) && width < height) || (
(rotationDegrees == 90 || rotationDegrees == 270) && width >= height);
}
// Prevent creating an instance
private ScaleTypeTransform() {
}
}

View File

@@ -1,46 +0,0 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.view.Surface;
final class SurfaceRotation {
/**
* Get the int value degree of a rotation from the {@link Surface} constants.
*
* <p>Valid values for the relative rotation are {@link Surface#ROTATION_0}, {@link
* * Surface#ROTATION_90}, {@link Surface#ROTATION_180}, {@link Surface#ROTATION_270}.
*/
static int rotationDegreesFromSurfaceRotation(int rotationConstant) {
switch (rotationConstant) {
case Surface.ROTATION_0:
return 0;
case Surface.ROTATION_90:
return 90;
case Surface.ROTATION_180:
return 180;
case Surface.ROTATION_270:
return 270;
default:
throw new UnsupportedOperationException(
"Unsupported surface rotation constant: " + rotationConstant);
}
}
/** Prevents construction */
private SurfaceRotation() {}
}

View File

@@ -1,181 +0,0 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.util.Size;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.UiThread;
import androidx.camera.core.Preview;
import androidx.camera.core.SurfaceRequest;
import androidx.core.content.ContextCompat;
import org.thoughtcrime.securesms.logging.Log;
/**
* The SurfaceView implementation for {@link PreviewView}.
*/
@RequiresApi(21)
final class SurfaceViewImplementation implements PreviewView.Implementation {
private static final String TAG = "SurfaceViewPreviewView";
// Synthetic Accessor
@SuppressWarnings("WeakerAccess")
TransformableSurfaceView mSurfaceView;
// Synthetic Accessor
@SuppressWarnings("WeakerAccess")
final SurfaceRequestCallback mSurfaceRequestCallback =
new SurfaceRequestCallback();
private Preview.SurfaceProvider mSurfaceProvider =
new Preview.SurfaceProvider() {
@Override
public void onSurfaceRequested(@NonNull SurfaceRequest surfaceRequest) {
mSurfaceView.post(
() -> mSurfaceRequestCallback.setSurfaceRequest(surfaceRequest));
}
};
/**
* {@inheritDoc}
*/
@Override
public void init(@NonNull FrameLayout parent) {
mSurfaceView = new TransformableSurfaceView(parent.getContext());
mSurfaceView.setLayoutParams(
new FrameLayout.LayoutParams(
FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.MATCH_PARENT));
parent.addView(mSurfaceView);
mSurfaceView.getHolder().addCallback(mSurfaceRequestCallback);
}
/**
* {@inheritDoc}
*/
@NonNull
@Override
public Preview.SurfaceProvider getSurfaceProvider() {
return mSurfaceProvider;
}
@Override
public void onDisplayChanged() {
}
/**
* The {@link SurfaceHolder.Callback} on mSurfaceView.
*
* <p> SurfaceView creates Surface on its own before we can do anything. This class makes
* sure only the Surface with correct size will be returned to Preview.
*/
class SurfaceRequestCallback implements SurfaceHolder.Callback {
// Target Surface size. Only complete the SurfaceRequest when the size of the Surface
// matches this value.
// Guarded by UI thread.
@Nullable
private Size mTargetSize;
// SurfaceRequest to set when the target size is met.
// Guarded by UI thread.
@Nullable
private SurfaceRequest mSurfaceRequest;
// The cached size of the current Surface.
// Guarded by UI thread.
@Nullable
private Size mCurrentSurfaceSize;
/**
* Sets the completer and the size. The completer will only be set if the current size of
* the Surface matches the target size.
*/
@UiThread
void setSurfaceRequest(@NonNull SurfaceRequest surfaceRequest) {
cancelPreviousRequest();
mSurfaceRequest = surfaceRequest;
Size targetSize = surfaceRequest.getResolution();
mTargetSize = targetSize;
if (!tryToComplete()) {
// The current size is incorrect. Wait for it to change.
Log.d(TAG, "Wait for new Surface creation.");
mSurfaceView.getHolder().setFixedSize(targetSize.getWidth(),
targetSize.getHeight());
}
}
/**
* Sets the completer if size matches.
*
* @return true if the completer is set.
*/
@UiThread
private boolean tryToComplete() {
Surface surface = mSurfaceView.getHolder().getSurface();
if (mSurfaceRequest != null && mTargetSize != null && mTargetSize.equals(
mCurrentSurfaceSize)) {
Log.d(TAG, "Surface set on Preview.");
mSurfaceRequest.provideSurface(surface,
ContextCompat.getMainExecutor(mSurfaceView.getContext()),
(result) -> Log.d(TAG, "Safe to release surface."));
mSurfaceRequest = null;
mTargetSize = null;
return true;
}
return false;
}
@UiThread
private void cancelPreviousRequest() {
if (mSurfaceRequest != null) {
Log.d(TAG, "Request canceled: " + mSurfaceRequest);
mSurfaceRequest.willNotProvideSurface();
mSurfaceRequest = null;
}
mTargetSize = null;
}
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
Log.d(TAG, "Surface created.");
// No-op. Handling surfaceChanged() is enough because it's always called afterwards.
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {
Log.d(TAG, "Surface changed. Size: " + width + "x" + height);
mCurrentSurfaceSize = new Size(width, height);
tryToComplete();
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
Log.d(TAG, "Surface destroyed.");
mCurrentSurfaceSize = null;
cancelPreviousRequest();
}
}
}

View File

@@ -1,239 +0,0 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import static androidx.camera.core.SurfaceRequest.Result;
import android.annotation.SuppressLint;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.util.Pair;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.camera.core.Preview;
import androidx.camera.core.SurfaceRequest;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import androidx.camera.core.impl.utils.futures.FutureCallback;
import androidx.camera.core.impl.utils.futures.Futures;
import androidx.concurrent.futures.CallbackToFutureAdapter;
import androidx.core.content.ContextCompat;
import androidx.core.util.Preconditions;
import com.google.common.util.concurrent.ListenableFuture;
import org.thoughtcrime.securesms.logging.Log;
/**
* The {@link TextureView} implementation for {@link PreviewView}
*/
// Begin Signal Custom Code Block
@RequiresApi(21)
@SuppressLint("RestrictedApi")
// End Signal Custom Code Block
public class TextureViewImplementation implements PreviewView.Implementation {
private static final String TAG = "TextureViewImpl";
private FrameLayout mParent;
TextureView mTextureView;
SurfaceTexture mSurfaceTexture;
private Size mResolution;
ListenableFuture<Result> mSurfaceReleaseFuture;
SurfaceRequest mSurfaceRequest;
@Override
public void init(@NonNull FrameLayout parent) {
mParent = parent;
}
@NonNull
@Override
public Preview.SurfaceProvider getSurfaceProvider() {
return (surfaceRequest) -> {
mResolution = surfaceRequest.getResolution();
initInternal();
if (mSurfaceRequest != null) {
mSurfaceRequest.willNotProvideSurface();
}
mSurfaceRequest = surfaceRequest;
surfaceRequest.addRequestCancellationListener(
ContextCompat.getMainExecutor(mTextureView.getContext()), () -> {
if (mSurfaceRequest != null && mSurfaceRequest == surfaceRequest) {
mSurfaceRequest = null;
mSurfaceReleaseFuture = null;
}
});
tryToProvidePreviewSurface();
};
}
@Override
public void onDisplayChanged() {
if (mParent == null || mTextureView == null || mResolution == null) {
return;
}
correctPreviewForCenterCrop(mParent, mTextureView, mResolution);
}
private void initInternal() {
mTextureView = new TextureView(mParent.getContext());
mTextureView.setLayoutParams(
new FrameLayout.LayoutParams(mResolution.getWidth(), mResolution.getHeight()));
mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(final SurfaceTexture surfaceTexture,
final int width, final int height) {
mSurfaceTexture = surfaceTexture;
tryToProvidePreviewSurface();
}
@Override
public void onSurfaceTextureSizeChanged(final SurfaceTexture surfaceTexture,
final int width, final int height) {
Log.d(TAG, "onSurfaceTextureSizeChanged(width:" + width + ", height: " + height
+ " )");
}
/**
* If a surface has been provided to the camera (meaning
* {@link TextureViewImplementation#mSurfaceRequest} is null), but the camera
* is still using it (meaning {@link TextureViewImplementation#mSurfaceReleaseFuture} is
* not null), a listener must be added to
* {@link TextureViewImplementation#mSurfaceReleaseFuture} to ensure the surface
* is properly released after the camera is done using it.
*
* @param surfaceTexture The {@link SurfaceTexture} about to be destroyed.
* @return false if the camera is not done with the surface, true otherwise.
*/
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture surfaceTexture) {
mSurfaceTexture = null;
if (mSurfaceRequest == null && mSurfaceReleaseFuture != null) {
Futures.addCallback(mSurfaceReleaseFuture,
new FutureCallback<Result>() {
@Override
public void onSuccess(Result result) {
Preconditions.checkState(result.getResultCode()
!= Result.RESULT_SURFACE_ALREADY_PROVIDED,
"Unexpected result from SurfaceRequest. Surface was "
+ "provided twice.");
surfaceTexture.release();
}
@Override
public void onFailure(Throwable t) {
throw new IllegalStateException("SurfaceReleaseFuture did not "
+ "complete nicely.", t);
}
}, ContextCompat.getMainExecutor(mTextureView.getContext()));
return false;
} else {
return true;
}
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture surfaceTexture) {
}
});
// Even though PreviewView calls `removeAllViews()` before calling init(), it should be
// called again here in case `getPreviewSurfaceProvider()` is called more than once on
// the same TextureViewImplementation instance.
mParent.removeAllViews();
mParent.addView(mTextureView);
}
@SuppressWarnings("WeakerAccess")
void tryToProvidePreviewSurface() {
/*
Should only continue if:
- The preview size has been specified.
- The textureView's surfaceTexture is available (after TextureView
.SurfaceTextureListener#onSurfaceTextureAvailable is invoked)
- The surfaceCompleter has been set (after CallbackToFutureAdapter
.Resolver#attachCompleter is invoked).
*/
if (mResolution == null || mSurfaceTexture == null || mSurfaceRequest == null) {
return;
}
mSurfaceTexture.setDefaultBufferSize(mResolution.getWidth(), mResolution.getHeight());
final Surface surface = new Surface(mSurfaceTexture);
final ListenableFuture<Result> surfaceReleaseFuture =
CallbackToFutureAdapter.getFuture(completer -> {
mSurfaceRequest.provideSurface(surface,
CameraXExecutors.directExecutor(), completer::set);
return "provideSurface[request=" + mSurfaceRequest + " surface=" + surface
+ "]";
});
mSurfaceReleaseFuture = surfaceReleaseFuture;
mSurfaceReleaseFuture.addListener(() -> {
surface.release();
if (mSurfaceReleaseFuture == surfaceReleaseFuture) {
mSurfaceReleaseFuture = null;
}
}, ContextCompat.getMainExecutor(mTextureView.getContext()));
mSurfaceRequest = null;
correctPreviewForCenterCrop(mParent, mTextureView, mResolution);
}
/**
* Corrects the preview to match the UI orientation and completely fill the PreviewView.
*
* <p>
* The camera produces a preview that depends on its sensor orientation and that has a
* specific resolution. In order to display it correctly, this preview must be rotated to
* match the UI orientation, and must be scaled up/down to fit inside the view that's
* displaying it. This method takes care of doing so while keeping the preview centered.
* </p>
*
* @param container The {@link PreviewView}'s root layout, which wraps the preview.
* @param textureView The {@link android.view.TextureView} that displays the preview, its size
* must match the camera sensor output size.
* @param bufferSize The camera sensor output size.
*/
private void correctPreviewForCenterCrop(@NonNull final View container,
@NonNull final TextureView textureView, @NonNull final Size bufferSize) {
// Scale TextureView to fill PreviewView while respecting sensor output size aspect ratio
final Pair<Float, Float> scale = ScaleTypeTransform.getFillScaleWithBufferAspectRatio(container, textureView,
bufferSize);
textureView.setScaleX(scale.first);
textureView.setScaleY(scale.second);
// Center TextureView inside PreviewView
final Point newOrigin = ScaleTypeTransform.getOriginOfCenteredView(container, textureView);
textureView.setX(newOrigin.x);
textureView.setY(newOrigin.y);
// Rotate TextureView to correct preview orientation
final int rotation = ScaleTypeTransform.getRotationDegrees(textureView);
textureView.setRotation(-rotation);
}
}

View File

@@ -1,97 +0,0 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.graphics.Matrix;
import android.graphics.PointF;
import android.graphics.SurfaceTexture;
import android.view.TextureView;
import androidx.annotation.NonNull;
import androidx.annotation.RestrictTo;
import androidx.camera.core.MeteringPoint;
import androidx.camera.core.MeteringPointFactory;
/**
* A {@link MeteringPointFactory} for creating a {@link MeteringPoint} by {@link TextureView} and
* (x,y).
*
* <p>SurfaceTexture in TextureView could be cropped, scaled or rotated by
* {@link TextureView#getTransform(Matrix)}. This factory translates the (x, y) into the sensor
* crop region normalized (x,y) by this transform. {@link SurfaceTexture#getTransformMatrix} is
* also used during the translation. No lens facing information is required because
* {@link SurfaceTexture#getTransformMatrix} contains the necessary transformation corresponding
* to the lens face of current camera ouput.
*/
public class TextureViewMeteringPointFactory extends MeteringPointFactory {
private final TextureView mTextureView;
public TextureViewMeteringPointFactory(@NonNull TextureView textureView) {
mTextureView = textureView;
}
/**
* Translates a (x,y) from TextureView.
*
* @hide
*/
@NonNull
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
@Override
protected PointF convertPoint(float x, float y) {
Matrix transform = new Matrix();
mTextureView.getTransform(transform);
// applying reverse of TextureView#getTransform
Matrix inverse = new Matrix();
transform.invert(inverse);
float[] pt = new float[]{x, y};
inverse.mapPoints(pt);
// get SurfaceTexture#getTransformMatrix
float[] surfaceTextureMat = new float[16];
mTextureView.getSurfaceTexture().getTransformMatrix(surfaceTextureMat);
// convert SurfaceTexture#getTransformMatrix(4x4 column major 3D matrix) to
// android.graphics.Matrix(3x3 row major 2D matrix)
Matrix surfaceTextureTransform = glMatrixToGraphicsMatrix(surfaceTextureMat);
float[] pt2 = new float[2];
// convert to texture coordinates first.
pt2[0] = pt[0] / mTextureView.getWidth();
pt2[1] = (mTextureView.getHeight() - pt[1]) / mTextureView.getHeight();
surfaceTextureTransform.mapPoints(pt2);
return new PointF(pt2[0], pt2[1]);
}
private Matrix glMatrixToGraphicsMatrix(float[] glMatrix) {
float[] convert = new float[9];
convert[0] = glMatrix[0];
convert[1] = glMatrix[4];
convert[2] = glMatrix[12];
convert[3] = glMatrix[1];
convert[4] = glMatrix[5];
convert[5] = glMatrix[13];
convert[6] = glMatrix[3];
convert[7] = glMatrix[7];
convert[8] = glMatrix[15];
Matrix graphicsMatrix = new Matrix();
graphicsMatrix.setValues(convert);
return graphicsMatrix;
}
}

View File

@@ -1,130 +0,0 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.content.Context;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.view.SurfaceView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
/**
* A subclass of {@link SurfaceView} that supports translation and scaling transformations.
*/
// Begin Signal Custom Code Block
@RequiresApi(21)
// End Signal Custom Code Block
final class TransformableSurfaceView extends SurfaceView {
private RectF mOverriddenLayoutRect;
TransformableSurfaceView(@NonNull Context context) {
super(context);
}
TransformableSurfaceView(@NonNull Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
}
TransformableSurfaceView(@NonNull Context context, @Nullable AttributeSet attrs,
int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
TransformableSurfaceView(@NonNull Context context, @Nullable AttributeSet attrs,
int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (mOverriddenLayoutRect == null) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
} else {
setMeasuredDimension((int) mOverriddenLayoutRect.width(),
(int) mOverriddenLayoutRect.height());
}
}
/**
* Sets the transform to associate with this surface view. Only translation and scaling are
* supported. If a rotated transformation is passed in, an exception is thrown.
*
* @param transform The transform to apply to the content of this view.
*/
void setTransform(final Matrix transform) {
if (hasRotation(transform)) {
throw new IllegalArgumentException("TransformableSurfaceView does not support "
+ "rotation transformations.");
}
final RectF rect = new RectF(getLeft(), getTop(), getRight(), getBottom());
transform.mapRect(rect);
overrideLayout(rect);
}
private boolean hasRotation(final Matrix matrix) {
final float[] values = new float[9];
matrix.getValues(values);
/*
A translation matrix can be represented as:
(1 0 transX)
(0 1 transX)
(0 0 1)
A rotation Matrix of ψ degrees can be represented as:
(cosψ -sinψ 0)
(sinψ cosψ 0)
(0 0 1)
A scale matrix can be represented as:
(scaleX 0 0)
(0 scaleY 0)
(0 0 0)
Meaning a transformed matrix can be represented as:
(scaleX * cosψ -scaleX * sinψ transX)
(scaleY * sinψ scaleY * cosψ transY)
(0 0 1)
Using the following 2 equalities:
scaleX * cosψ = matrix[0][0]
-scaleX * sinψ = matrix[0][1]
The following is deduced:
-tanψ = matrix[0][1] / matrix[0][0]
Or:
ψ = -arctan(matrix[0][1] / matrix[0][0])
*/
final double angle = -Math.atan2(values[Matrix.MSKEW_X], values[Matrix.MSCALE_X]);
return Math.round(angle * (180 / Math.PI)) != 0;
}
private void overrideLayout(final RectF overriddenLayoutRect) {
mOverriddenLayoutRect = overriddenLayoutRect;
setX(overriddenLayoutRect.left);
setY(overriddenLayoutRect.top);
requestLayout();
}
}

View File

@@ -4,7 +4,7 @@
android:layout_width="match_parent"
android:layout_height="match_parent">
<org.thoughtcrime.securesms.mediasend.camerax.CameraXView
<androidx.camera.view.SignalCameraView
android:id="@+id/camerax_camera"
android:layout_gravity="center"
android:layout_width="match_parent"

View File

@@ -403,30 +403,6 @@
<attr name="recordSize" format="dimension" />
</declare-styleable>
<declare-styleable name="CameraXView">
<attr format="enum" name="scaleType">
<enum name="centerCrop" value="0"/>
<enum name="centerInside" value="1"/>
</attr>
<attr format="enum" name="lensFacing">
<enum name="none" value="0"/>
<enum name="front" value="1"/>
<enum name="back" value="2"/>
</attr>
<attr format="enum" name="captureMode">
<enum name="image" value="0"/>
<enum name="video" value="1"/>
<enum name="mixed" value="2"/>
</attr>
<attr format="enum" name="flash">
<enum name="auto" value="1"/>
<enum name="on" value="2"/>
<enum name="off" value="4"/>
</attr>
<attr format="boolean" name="pinchToZoomEnabled"/>
</declare-styleable>
<declare-styleable name="ArcProgressBar">
<attr name="arcWidth" format="dimension" />
<attr name="arcBackgroundColor" format="color" />

View File

@@ -30,14 +30,17 @@ dependencyVerification {
['androidx.autofill:autofill:1.0.0',
'c9468f56e05006ea151a426c54957cd0799b8b83a579d2846dd22061f33e5ecd'],
['androidx.camera:camera-camera2:1.0.0-beta01',
'02e15ad76153d09adcd6631627960707a8786333a8276d05dcbefc2bfe4ef5a1'],
['androidx.camera:camera-camera2:1.0.0-beta11',
'54d7c975ea7387f0d7c65faf531005fc543fe8e8d826eb696bb2c7f950041a9c'],
['androidx.camera:camera-core:1.0.0-beta01',
'acba5f196b3bd3ecf756067c9f4fae4f15b81228c2bd6b52212d9454e27599f0'],
['androidx.camera:camera-core:1.0.0-beta11',
'58ed18e385d3eea3e17568b5904629f5111de8af87035942f908d49815331645'],
['androidx.camera:camera-lifecycle:1.0.0-beta01',
'25c761a863555d5e008e428cf271caca774c867bf269495dc32a0fdc65770fd5'],
['androidx.camera:camera-lifecycle:1.0.0-beta11',
'f8617971cbfb2d249eea3558756bc466cb35a946a9269b966b24b6787f449631'],
['androidx.camera:camera-view:1.0.0-alpha18',
'029652f71430e4acc97c41d40e509bf6774a86dd35a19cea5c856f0b96a6c70c'],
['androidx.cardview:cardview:1.0.0',
'1193c04c22a3d6b5946dae9f4e8c59d6adde6a71b6bd5d87fb99d82dda1afec7'],