Commit 87b3a36a authored by Christian Fremerey's avatar Christian Fremerey Committed by Commit Bot

[Image Capture, Android] Fix various threading issues

This CL simplifies the threading of class VideoCaptureCamera2.java
and by doing so resolves potential issues caused by concurrent
access to member variables and Android video API calls.

See https://bugs.chromium.org/p/chromium/issues/detail?id=857530 for
details on issues.

The simplified model is to (still) have the constructor and public API
calls happen on a native thread, and to use a single dedicated thread
owned by the class instance to post to, do work on, and call back into
the native code.

This CL is part of a series, see Design Doc at
https://docs.google.com/document/d/1h1kva4VR1gaV3HVXaSYZFY41icfaB58j-WnHmJdyqc8/edit?usp=sharing

Bug: 857530
Change-Id: I75ffcc4a14f2395d833d80f300acef7b456676e8
Reviewed-on: https://chromium-review.googlesource.com/1117857
Commit-Queue: Christian Fremerey <chfremer@chromium.org>
Reviewed-by: default avatarEmircan Uysaler <emircan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#576155}
parent 00d64e2e
...@@ -62,14 +62,22 @@ public abstract class VideoCapture { ...@@ -62,14 +62,22 @@ public abstract class VideoCapture {
@CalledByNative @CalledByNative
public abstract boolean allocate(int width, int height, int frameRate); public abstract boolean allocate(int width, int height, int frameRate);
// Success is indicated by returning true and a callback to
// nativeOnStarted(), which may occur synchronously or asynchronously.
// Failure can be indicated by one of the following:
// * Returning false. In this case no callback to nativeOnStarted() is made.
// * Returning true, and asynchronously invoking nativeOnError. In this case
// also no callback to nativeOnStarted() is made.
@CalledByNative @CalledByNative
public abstract boolean startCapture(); public abstract boolean startCaptureMaybeAsync();
// Blocks until it is guaranteed that no more frames are sent.
@CalledByNative @CalledByNative
public abstract boolean stopCapture(); public abstract boolean stopCaptureAndBlockUntilStopped();
// Replies by calling nativeOnGetPhotoCapabilitiesReply().
@CalledByNative @CalledByNative
public abstract PhotoCapabilities getPhotoCapabilities(); public abstract void getPhotoCapabilitiesAsync(long callbackId);
/** /**
* @param zoom Zoom level, should be ignored if 0. * @param zoom Zoom level, should be ignored if 0.
...@@ -95,8 +103,9 @@ public abstract class VideoCapture { ...@@ -95,8 +103,9 @@ public abstract class VideoCapture {
boolean hasRedEyeReduction, boolean redEyeReduction, int fillLightMode, boolean hasRedEyeReduction, boolean redEyeReduction, int fillLightMode,
boolean hasTorch, boolean torch, double colorTemperature); boolean hasTorch, boolean torch, double colorTemperature);
// Replies by calling nativeOnPhotoTaken().
@CalledByNative @CalledByNative
public abstract boolean takePhoto(final long callbackId); public abstract void takePhotoAsync(long callbackId);
@CalledByNative @CalledByNative
public abstract void deallocate(); public abstract void deallocate();
...@@ -164,6 +173,12 @@ public abstract class VideoCapture { ...@@ -164,6 +173,12 @@ public abstract class VideoCapture {
return orientation; return orientation;
} }
// {@link nativeOnPhotoTaken()} needs to be called back if there's any
// problem after {@link takePhotoAsync()} has returned true.
protected void notifyTakePhotoError(long callbackId) {
nativeOnPhotoTaken(mNativeVideoCaptureDeviceAndroid, callbackId, null);
}
/** /**
* Finds the framerate range matching |targetFramerate|. Tries to find a range with as low of a * Finds the framerate range matching |targetFramerate|. Tries to find a range with as low of a
* minimum value as possible to allow the camera adjust based on the lighting conditions. * minimum value as possible to allow the camera adjust based on the lighting conditions.
...@@ -230,10 +245,17 @@ public abstract class VideoCapture { ...@@ -230,10 +245,17 @@ public abstract class VideoCapture {
// Method for VideoCapture implementations to signal an asynchronous error. // Method for VideoCapture implementations to signal an asynchronous error.
public native void nativeOnError(long nativeVideoCaptureDeviceAndroid, String message); public native void nativeOnError(long nativeVideoCaptureDeviceAndroid, String message);
// Method for VideoCapture implementations to send Photos back to. public native void nativeOnGetPhotoCapabilitiesReply(
long nativeVideoCaptureDeviceAndroid, long callbackId, PhotoCapabilities result);
// Callback for calls to takePhoto(). This can indicate both success and
// failure. Failure is indicated by |data| being null.
public native void nativeOnPhotoTaken( public native void nativeOnPhotoTaken(
long nativeVideoCaptureDeviceAndroid, long callbackId, byte[] data); long nativeVideoCaptureDeviceAndroid, long callbackId, byte[] data);
// Method for VideoCapture implementations to report device started event. // Method for VideoCapture implementations to report device started event.
public native void nativeOnStarted(long nativeVideoCaptureDeviceAndroid); public native void nativeOnStarted(long nativeVideoCaptureDeviceAndroid);
public native void nativeDCheckCurrentlyOnIncomingTaskRunner(
long nativeVideoCaptureDeviceAndroid);
} }
...@@ -146,8 +146,7 @@ public class VideoCaptureCamera ...@@ -146,8 +146,7 @@ public class VideoCaptureCamera
synchronized (mPhotoTakenCallbackLock) { synchronized (mPhotoTakenCallbackLock) {
if (mPhotoTakenCallbackId == 0) return; if (mPhotoTakenCallbackId == 0) return;
nativeOnPhotoTaken( notifyTakePhotoError(mPhotoTakenCallbackId);
mNativeVideoCaptureDeviceAndroid, mPhotoTakenCallbackId, new byte[0]);
mPhotoTakenCallbackId = 0; mPhotoTakenCallbackId = 0;
} }
} }
...@@ -425,9 +424,9 @@ public class VideoCaptureCamera ...@@ -425,9 +424,9 @@ public class VideoCaptureCamera
} }
@Override @Override
public boolean startCapture() { public boolean startCaptureMaybeAsync() {
if (mCamera == null) { if (mCamera == null) {
Log.e(TAG, "startCapture: mCamera is null"); Log.e(TAG, "startCaptureAsync: mCamera is null");
return false; return false;
} }
...@@ -444,7 +443,7 @@ public class VideoCaptureCamera ...@@ -444,7 +443,7 @@ public class VideoCaptureCamera
try { try {
mCamera.startPreview(); mCamera.startPreview();
} catch (RuntimeException ex) { } catch (RuntimeException ex) {
Log.e(TAG, "startCapture: Camera.startPreview: " + ex); Log.e(TAG, "startCaptureAsync: Camera.startPreview: " + ex);
return false; return false;
} }
...@@ -459,9 +458,9 @@ public class VideoCaptureCamera ...@@ -459,9 +458,9 @@ public class VideoCaptureCamera
} }
@Override @Override
public boolean stopCapture() { public boolean stopCaptureAndBlockUntilStopped() {
if (mCamera == null) { if (mCamera == null) {
Log.e(TAG, "stopCapture: mCamera is null"); Log.e(TAG, "stopCaptureAndBlockUntilStopped: mCamera is null");
return true; return true;
} }
...@@ -481,7 +480,7 @@ public class VideoCaptureCamera ...@@ -481,7 +480,7 @@ public class VideoCaptureCamera
} }
@Override @Override
public PhotoCapabilities getPhotoCapabilities() { public void getPhotoCapabilitiesAsync(long callbackId) {
final android.hardware.Camera.Parameters parameters = getCameraParameters(mCamera); final android.hardware.Camera.Parameters parameters = getCameraParameters(mCamera);
PhotoCapabilities.Builder builder = new PhotoCapabilities.Builder(); PhotoCapabilities.Builder builder = new PhotoCapabilities.Builder();
Log.i(TAG, " CAM params: %s", parameters.flatten()); Log.i(TAG, " CAM params: %s", parameters.flatten());
...@@ -634,7 +633,8 @@ public class VideoCaptureCamera ...@@ -634,7 +633,8 @@ public class VideoCaptureCamera
builder.setFillLightModes(integerArrayListToArray(modes)); builder.setFillLightModes(integerArrayListToArray(modes));
} }
return builder.build(); nativeOnGetPhotoCapabilitiesReply(
mNativeVideoCaptureDeviceAndroid, callbackId, builder.build());
} }
@Override @Override
...@@ -776,15 +776,19 @@ public class VideoCaptureCamera ...@@ -776,15 +776,19 @@ public class VideoCaptureCamera
} }
@Override @Override
public boolean takePhoto(final long callbackId) { public void takePhotoAsync(final long callbackId) {
if (mCamera == null || !mIsRunning) { if (mCamera == null || !mIsRunning) {
Log.e(TAG, "takePhoto: mCamera is null or is not running"); Log.e(TAG, "takePhotoAsync: mCamera is null or is not running");
return false; notifyTakePhotoError(callbackId);
return;
} }
// Only one picture can be taken at once. // Only one picture can be taken at once.
synchronized (mPhotoTakenCallbackLock) { synchronized (mPhotoTakenCallbackLock) {
if (mPhotoTakenCallbackId != 0) return false; if (mPhotoTakenCallbackId != 0) {
notifyTakePhotoError(callbackId);
return;
}
mPhotoTakenCallbackId = callbackId; mPhotoTakenCallbackId = callbackId;
} }
mPreviewParameters = getCameraParameters(mCamera); mPreviewParameters = getCameraParameters(mCamera);
...@@ -817,18 +821,18 @@ public class VideoCaptureCamera ...@@ -817,18 +821,18 @@ public class VideoCaptureCamera
mCamera.setParameters(photoParameters); mCamera.setParameters(photoParameters);
} catch (RuntimeException ex) { } catch (RuntimeException ex) {
Log.e(TAG, "setParameters " + ex); Log.e(TAG, "setParameters " + ex);
return false; notifyTakePhotoError(callbackId);
return;
} }
mCamera.takePicture(null, null, null, new CrPictureCallback()); mCamera.takePicture(null, null, null, new CrPictureCallback());
return true;
} }
@Override @Override
public void deallocate() { public void deallocate() {
if (mCamera == null) return; if (mCamera == null) return;
stopCapture(); stopCaptureAndBlockUntilStopped();
try { try {
mCamera.setPreviewTexture(null); mCamera.setPreviewTexture(null);
if (mGlTextures != null) GLES20.glDeleteTextures(1, mGlTextures, 0); if (mGlTextures != null) GLES20.glDeleteTextures(1, mGlTextures, 0);
......
...@@ -22,6 +22,7 @@ import android.hardware.camera2.params.StreamConfigurationMap; ...@@ -22,6 +22,7 @@ import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image; import android.media.Image;
import android.media.ImageReader; import android.media.ImageReader;
import android.os.Build; import android.os.Build;
import android.os.ConditionVariable;
import android.os.Handler; import android.os.Handler;
import android.os.HandlerThread; import android.os.HandlerThread;
import android.os.Looper; import android.os.Looper;
...@@ -32,7 +33,6 @@ import android.view.Surface; ...@@ -32,7 +33,6 @@ import android.view.Surface;
import org.chromium.base.ContextUtils; import org.chromium.base.ContextUtils;
import org.chromium.base.Log; import org.chromium.base.Log;
import org.chromium.base.ThreadUtils;
import org.chromium.base.annotations.JNINamespace; import org.chromium.base.annotations.JNINamespace;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
...@@ -54,17 +54,19 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -54,17 +54,19 @@ public class VideoCaptureCamera2 extends VideoCapture {
private class CrStateListener extends CameraDevice.StateCallback { private class CrStateListener extends CameraDevice.StateCallback {
@Override @Override
public void onOpened(CameraDevice cameraDevice) { public void onOpened(CameraDevice cameraDevice) {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
Log.e(TAG, "CameraDevice.StateCallback onOpened");
mCameraDevice = cameraDevice; mCameraDevice = cameraDevice;
changeCameraStateAndNotify(CameraState.CONFIGURING); changeCameraStateAndNotify(CameraState.CONFIGURING);
if (createPreviewObjectsAndStartPreview()) return; createPreviewObjectsAndStartPreviewOrFail();
changeCameraStateAndNotify(CameraState.STOPPED);
nativeOnError(mNativeVideoCaptureDeviceAndroid, "Error configuring camera");
} }
@Override @Override
public void onDisconnected(CameraDevice cameraDevice) { public void onDisconnected(CameraDevice cameraDevice) {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
Log.e(TAG, "cameraDevice was closed unexpectedly"); Log.e(TAG, "cameraDevice was closed unexpectedly");
cameraDevice.close(); cameraDevice.close();
mCameraDevice = null; mCameraDevice = null;
changeCameraStateAndNotify(CameraState.STOPPED); changeCameraStateAndNotify(CameraState.STOPPED);
...@@ -72,7 +74,9 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -72,7 +74,9 @@ public class VideoCaptureCamera2 extends VideoCapture {
@Override @Override
public void onError(CameraDevice cameraDevice, int error) { public void onError(CameraDevice cameraDevice, int error) {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
Log.e(TAG, "cameraDevice encountered an error"); Log.e(TAG, "cameraDevice encountered an error");
cameraDevice.close(); cameraDevice.close();
mCameraDevice = null; mCameraDevice = null;
changeCameraStateAndNotify(CameraState.STOPPED); changeCameraStateAndNotify(CameraState.STOPPED);
...@@ -90,6 +94,8 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -90,6 +94,8 @@ public class VideoCaptureCamera2 extends VideoCapture {
@Override @Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) { public void onConfigured(CameraCaptureSession cameraCaptureSession) {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
Log.d(TAG, "CrPreviewSessionListener.onConfigured"); Log.d(TAG, "CrPreviewSessionListener.onConfigured");
mPreviewSession = cameraCaptureSession; mPreviewSession = cameraCaptureSession;
try { try {
...@@ -116,18 +122,35 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -116,18 +122,35 @@ public class VideoCaptureCamera2 extends VideoCapture {
Log.e(TAG, "setRepeatingRequest: ", ex); Log.e(TAG, "setRepeatingRequest: ", ex);
return; return;
} }
// Now wait for trigger on CrPreviewReaderListener.onImageAvailable();
nativeOnStarted(mNativeVideoCaptureDeviceAndroid);
changeCameraStateAndNotify(CameraState.STARTED); changeCameraStateAndNotify(CameraState.STARTED);
nativeOnStarted(mNativeVideoCaptureDeviceAndroid);
// Frames will be arriving at CrPreviewReaderListener.onImageAvailable();
} }
@Override @Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) { public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
Log.d(TAG, "CrPreviewSessionListener.onConfigureFailed");
// TODO(mcasas): When signalling error, C++ will tear us down. Is there need for // TODO(mcasas): When signalling error, C++ will tear us down. Is there need for
// cleanup? // cleanup?
changeCameraStateAndNotify(CameraState.STOPPED); changeCameraStateAndNotify(CameraState.STOPPED);
mPreviewSession = null;
nativeOnError(mNativeVideoCaptureDeviceAndroid, "Camera session configuration error"); nativeOnError(mNativeVideoCaptureDeviceAndroid, "Camera session configuration error");
} }
@Override
public void onClosed(CameraCaptureSession cameraCaptureSession) {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
Log.d(TAG, "CrPreviewSessionListener.onClosed");
// The preview session gets closed temporarily when a takePhoto
// request is being processed. A new preview session will be
// started after that.
mPreviewSession = null;
}
}; };
// Internal class implementing an ImageReader listener for Preview frames. Gets pinged when a // Internal class implementing an ImageReader listener for Preview frames. Gets pinged when a
...@@ -135,6 +158,8 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -135,6 +158,8 @@ public class VideoCaptureCamera2 extends VideoCapture {
private class CrPreviewReaderListener implements ImageReader.OnImageAvailableListener { private class CrPreviewReaderListener implements ImageReader.OnImageAvailableListener {
@Override @Override
public void onImageAvailable(ImageReader reader) { public void onImageAvailable(ImageReader reader) {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
try (Image image = reader.acquireLatestImage()) { try (Image image = reader.acquireLatestImage()) {
if (image == null) return; if (image == null) return;
...@@ -180,6 +205,8 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -180,6 +205,8 @@ public class VideoCaptureCamera2 extends VideoCapture {
@Override @Override
public void onConfigured(CameraCaptureSession session) { public void onConfigured(CameraCaptureSession session) {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
Log.d(TAG, "CrPhotoSessionListener.onConfigured"); Log.d(TAG, "CrPhotoSessionListener.onConfigured");
try { try {
// This line triggers a single photo capture. No |listener| is registered, so we // This line triggers a single photo capture. No |listener| is registered, so we
...@@ -199,6 +226,8 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -199,6 +226,8 @@ public class VideoCaptureCamera2 extends VideoCapture {
@Override @Override
public void onConfigureFailed(CameraCaptureSession session) { public void onConfigureFailed(CameraCaptureSession session) {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
Log.e(TAG, "failed configuring capture session"); Log.e(TAG, "failed configuring capture session");
notifyTakePhotoError(mCallbackId); notifyTakePhotoError(mCallbackId);
return; return;
...@@ -234,6 +263,8 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -234,6 +263,8 @@ public class VideoCaptureCamera2 extends VideoCapture {
@Override @Override
public void onImageAvailable(ImageReader reader) { public void onImageAvailable(ImageReader reader) {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
try (Image image = reader.acquireLatestImage()) { try (Image image = reader.acquireLatestImage()) {
if (image == null) { if (image == null) {
throw new IllegalStateException(); throw new IllegalStateException();
...@@ -252,32 +283,470 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -252,32 +283,470 @@ public class VideoCaptureCamera2 extends VideoCapture {
return; return;
} }
if (createPreviewObjectsAndStartPreview()) return; createPreviewObjectsAndStartPreviewOrFail();
nativeOnError(mNativeVideoCaptureDeviceAndroid, "Error restarting preview");
} }
}; };
// Inner Runnable to reconfigure the preview session, must be run on application context looper. private class StopCaptureTask implements Runnable {
private final Runnable mReconfigureCaptureTask = new Runnable() { private final ConditionVariable mDoneConditionVariable;
public StopCaptureTask(ConditionVariable doneConditionVariable) {
mDoneConditionVariable = doneConditionVariable;
}
@Override
public void run() {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
if (mPreviewSession != null) {
try {
mPreviewSession.abortCaptures();
} catch (CameraAccessException | IllegalStateException ex) {
// Stopping a device whose CameraCaptureSession is closed is not an error:
// ignore this.
Log.w(TAG, "abortCaptures: ", ex);
}
}
if (mCameraDevice == null) return;
mCameraDevice.close();
changeCameraStateAndNotify(CameraState.STOPPED);
mCropRect = new Rect();
mDoneConditionVariable.open();
}
}
private class GetPhotoCapabilitiesTask implements Runnable {
private final long mCallbackId;
public GetPhotoCapabilitiesTask(long callbackId) {
mCallbackId = callbackId;
}
@Override @Override
public void run() { public void run() {
ThreadUtils.assertOnUiThread(); assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
assert mPreviewRequestBuilder != null : "preview request builder";
assert mPreviewSession != null : "preview session"; final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId);
PhotoCapabilities.Builder builder = new PhotoCapabilities.Builder();
int minIso = 0;
int maxIso = 0;
final Range<Integer> iso_range =
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE);
if (iso_range != null) {
minIso = iso_range.getLower();
maxIso = iso_range.getUpper();
}
builder.setMinIso(minIso).setMaxIso(maxIso).setStepIso(1);
if (mPreviewRequestBuilder.get(CaptureRequest.SENSOR_SENSITIVITY) != null) {
builder.setCurrentIso(mPreviewRequest.get(CaptureRequest.SENSOR_SENSITIVITY));
}
// Reuse most of |mPreviewRequestBuilder| since it has expensive items inside that have final StreamConfigurationMap streamMap = cameraCharacteristics.get(
// to do with preview, e.g. the ImageReader and its associated Surface. CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
configureCommonCaptureSettings(mPreviewRequestBuilder); final Size[] supportedSizes = streamMap.getOutputSizes(ImageFormat.JPEG);
int minWidth = Integer.MAX_VALUE;
int minHeight = Integer.MAX_VALUE;
int maxWidth = 0;
int maxHeight = 0;
for (Size size : supportedSizes) {
if (size.getWidth() < minWidth) minWidth = size.getWidth();
if (size.getHeight() < minHeight) minHeight = size.getHeight();
if (size.getWidth() > maxWidth) maxWidth = size.getWidth();
if (size.getHeight() > maxHeight) maxHeight = size.getHeight();
}
builder.setMinHeight(minHeight).setMaxHeight(maxHeight).setStepHeight(1);
builder.setMinWidth(minWidth).setMaxWidth(maxWidth).setStepWidth(1);
builder.setCurrentHeight(
(mPhotoHeight > 0) ? mPhotoHeight : mCaptureFormat.getHeight());
builder.setCurrentWidth((mPhotoWidth > 0) ? mPhotoWidth : mCaptureFormat.getWidth());
final float currentZoom =
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)
.width()
/ (float) mPreviewRequest.get(CaptureRequest.SCALER_CROP_REGION).width();
// There is no min-zoom per se, so clamp it to always 1.
builder.setMinZoom(1.0).setMaxZoom(mMaxZoom);
builder.setCurrentZoom(currentZoom).setStepZoom(0.1);
// Classify the Focus capabilities. In CONTINUOUS and SINGLE_SHOT, we can call
// autoFocus(AutoFocusCallback) to configure region(s) to focus onto.
final int[] jniFocusModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
ArrayList<Integer> focusModes = new ArrayList<Integer>(3);
for (int mode : jniFocusModes) {
if (mode == CameraMetadata.CONTROL_AF_MODE_OFF) {
focusModes.add(Integer.valueOf(AndroidMeteringMode.FIXED));
} else if (mode == CameraMetadata.CONTROL_AF_MODE_AUTO
|| mode == CameraMetadata.CONTROL_AF_MODE_MACRO) {
// CONTROL_AF_MODE_{AUTO,MACRO} do not imply continuously focusing.
if (!focusModes.contains(Integer.valueOf(AndroidMeteringMode.SINGLE_SHOT))) {
focusModes.add(Integer.valueOf(AndroidMeteringMode.SINGLE_SHOT));
}
} else if (mode == CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_VIDEO
|| mode == CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE
|| mode == CameraMetadata.CONTROL_AF_MODE_EDOF) {
if (!focusModes.contains(Integer.valueOf(AndroidMeteringMode.CONTINUOUS))) {
focusModes.add(Integer.valueOf(AndroidMeteringMode.CONTINUOUS));
}
}
}
builder.setFocusModes(integerArrayListToArray(focusModes));
final int focusMode = mPreviewRequest.get(CaptureRequest.CONTROL_AF_MODE);
int jniFocusMode = AndroidMeteringMode.NONE;
if (focusMode == CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_VIDEO
|| focusMode == CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE) {
jniFocusMode = AndroidMeteringMode.CONTINUOUS;
} else if (focusMode == CameraMetadata.CONTROL_AF_MODE_AUTO
|| focusMode == CameraMetadata.CONTROL_AF_MODE_MACRO) {
jniFocusMode = AndroidMeteringMode.SINGLE_SHOT;
} else if (focusMode == CameraMetadata.CONTROL_AF_MODE_OFF) {
jniFocusMode = AndroidMeteringMode.FIXED;
} else {
assert jniFocusMode == CameraMetadata.CONTROL_AF_MODE_EDOF;
}
builder.setFocusMode(jniFocusMode);
// Auto Exposure is the usual capability and state, unless AE is not available at all,
// which is signalled by an empty CONTROL_AE_AVAILABLE_MODES list. Exposure Compensation
// can also support or be locked, this is equivalent to AndroidMeteringMode.FIXED.
final int[] jniExposureModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
ArrayList<Integer> exposureModes = new ArrayList<Integer>(1);
for (int mode : jniExposureModes) {
if (mode == CameraMetadata.CONTROL_AE_MODE_ON
|| mode == CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH
|| mode == CameraMetadata.CONTROL_AE_MODE_ON_ALWAYS_FLASH
|| mode == CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
exposureModes.add(Integer.valueOf(AndroidMeteringMode.CONTINUOUS));
break;
}
}
try { try {
mPreviewSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, null); if (cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_LOCK_AVAILABLE)) {
} catch (CameraAccessException | SecurityException | IllegalStateException exposureModes.add(Integer.valueOf(AndroidMeteringMode.FIXED));
| IllegalArgumentException ex) { }
Log.e(TAG, "setRepeatingRequest: ", ex); } catch (NoSuchFieldError e) {
// Ignore this exception, it means CONTROL_AE_LOCK_AVAILABLE is not known.
} }
builder.setExposureModes(integerArrayListToArray(exposureModes));
int jniExposureMode = AndroidMeteringMode.CONTINUOUS;
if (mPreviewRequest.get(CaptureRequest.CONTROL_AE_MODE)
== CameraMetadata.CONTROL_AE_MODE_OFF) {
jniExposureMode = AndroidMeteringMode.NONE;
}
if (mPreviewRequest.get(CaptureRequest.CONTROL_AE_LOCK)) {
jniExposureMode = AndroidMeteringMode.FIXED;
}
builder.setExposureMode(jniExposureMode);
final float step =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP)
.floatValue();
builder.setStepExposureCompensation(step);
final Range<Integer> exposureCompensationRange =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
builder.setMinExposureCompensation(exposureCompensationRange.getLower() * step);
builder.setMaxExposureCompensation(exposureCompensationRange.getUpper() * step);
builder.setCurrentExposureCompensation(
mPreviewRequest.get(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION) * step);
final int[] jniWhiteBalanceMode =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES);
ArrayList<Integer> whiteBalanceModes = new ArrayList<Integer>(1);
for (int mode : jniWhiteBalanceMode) {
if (mode == CameraMetadata.CONTROL_AWB_MODE_AUTO) {
whiteBalanceModes.add(Integer.valueOf(AndroidMeteringMode.CONTINUOUS));
break;
}
}
try {
if (cameraCharacteristics.get(CameraCharacteristics.CONTROL_AWB_LOCK_AVAILABLE)) {
whiteBalanceModes.add(Integer.valueOf(AndroidMeteringMode.FIXED));
}
} catch (NoSuchFieldError e) {
// Ignore this exception, it means CONTROL_AWB_LOCK_AVAILABLE is not known.
}
builder.setWhiteBalanceModes(integerArrayListToArray(whiteBalanceModes));
final int whiteBalanceMode = mPreviewRequest.get(CaptureRequest.CONTROL_AWB_MODE);
if (whiteBalanceMode == CameraMetadata.CONTROL_AWB_MODE_OFF) {
builder.setWhiteBalanceMode(AndroidMeteringMode.NONE);
} else if (whiteBalanceMode == CameraMetadata.CONTROL_AWB_MODE_AUTO) {
builder.setWhiteBalanceMode(AndroidMeteringMode.CONTINUOUS);
} else {
builder.setWhiteBalanceMode(AndroidMeteringMode.FIXED);
}
builder.setMinColorTemperature(COLOR_TEMPERATURES_MAP.keyAt(0));
builder.setMaxColorTemperature(
COLOR_TEMPERATURES_MAP.keyAt(COLOR_TEMPERATURES_MAP.size() - 1));
final int index = COLOR_TEMPERATURES_MAP.indexOfValue(whiteBalanceMode);
if (index >= 0) {
builder.setCurrentColorTemperature(COLOR_TEMPERATURES_MAP.keyAt(index));
}
builder.setStepColorTemperature(50);
if (!cameraCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)) {
builder.setSupportsTorch(false);
builder.setRedEyeReduction(false);
} else {
// There's no way to query if torch and/or red eye reduction modes are available
// using Camera2 API but since there's a Flash unit, we assume so.
builder.setSupportsTorch(true);
builder.setTorch(mPreviewRequest.get(CaptureRequest.FLASH_MODE)
== CameraMetadata.FLASH_MODE_TORCH);
builder.setRedEyeReduction(true);
final int[] flashModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
ArrayList<Integer> modes = new ArrayList<Integer>(0);
for (int flashMode : flashModes) {
if (flashMode == CameraMetadata.FLASH_MODE_OFF) {
modes.add(Integer.valueOf(AndroidFillLightMode.OFF));
} else if (flashMode == CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH) {
modes.add(Integer.valueOf(AndroidFillLightMode.AUTO));
} else if (flashMode == CameraMetadata.CONTROL_AE_MODE_ON_ALWAYS_FLASH) {
modes.add(Integer.valueOf(AndroidFillLightMode.FLASH));
}
}
builder.setFillLightModes(integerArrayListToArray(modes));
}
nativeOnGetPhotoCapabilitiesReply(
mNativeVideoCaptureDeviceAndroid, mCallbackId, builder.build());
} }
}; }
private class PhotoOptions {
public final double zoom;
public final int focusMode;
public final int exposureMode;
public final double width;
public final double height;
public final float[] pointsOfInterest2D;
public final boolean hasExposureCompensation;
public final double exposureCompensation;
public final int whiteBalanceMode;
public final double iso;
public final boolean hasRedEyeReduction;
public final boolean redEyeReduction;
public final int fillLightMode;
public final boolean hasTorch;
public final boolean torch;
public final double colorTemperature;
public PhotoOptions(double zoom, int focusMode, int exposureMode, double width,
double height, float[] pointsOfInterest2D, boolean hasExposureCompensation,
double exposureCompensation, int whiteBalanceMode, double iso,
boolean hasRedEyeReduction, boolean redEyeReduction, int fillLightMode,
boolean hasTorch, boolean torch, double colorTemperature) {
this.zoom = zoom;
this.focusMode = focusMode;
this.exposureMode = exposureMode;
this.width = width;
this.height = height;
this.pointsOfInterest2D = pointsOfInterest2D;
this.hasExposureCompensation = hasExposureCompensation;
this.exposureCompensation = exposureCompensation;
this.whiteBalanceMode = whiteBalanceMode;
this.iso = iso;
this.hasRedEyeReduction = hasRedEyeReduction;
this.redEyeReduction = redEyeReduction;
this.fillLightMode = fillLightMode;
this.hasTorch = hasTorch;
this.torch = torch;
this.colorTemperature = colorTemperature;
}
}
private class SetPhotoOptionsTask implements Runnable {
private final PhotoOptions mOptions;
public SetPhotoOptionsTask(PhotoOptions options) {
mOptions = options;
}
@Override
public void run() {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId);
final Rect canvas =
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
if (mOptions.zoom != 0) {
final float normalizedZoom =
Math.max(1.0f, Math.min((float) mOptions.zoom, mMaxZoom));
final float cropFactor = (normalizedZoom - 1) / (2 * normalizedZoom);
mCropRect = new Rect(Math.round(canvas.width() * cropFactor),
Math.round(canvas.height() * cropFactor),
Math.round(canvas.width() * (1 - cropFactor)),
Math.round(canvas.height() * (1 - cropFactor)));
Log.d(TAG, "zoom level %f, rectangle: %s", normalizedZoom, mCropRect.toString());
}
if (mOptions.focusMode != AndroidMeteringMode.NOT_SET) mFocusMode = mOptions.focusMode;
if (mOptions.exposureMode != AndroidMeteringMode.NOT_SET)
mExposureMode = mOptions.exposureMode;
if (mOptions.whiteBalanceMode != AndroidMeteringMode.NOT_SET)
mWhiteBalanceMode = mOptions.whiteBalanceMode;
if (mOptions.width > 0) mPhotoWidth = (int) Math.round(mOptions.width);
if (mOptions.height > 0) mPhotoHeight = (int) Math.round(mOptions.height);
// Upon new |zoom| configuration, clear up the previous |mAreaOfInterest| if any.
if (mAreaOfInterest != null && !mAreaOfInterest.getRect().isEmpty()
&& mOptions.zoom > 0) {
mAreaOfInterest = null;
}
// Also clear |mAreaOfInterest| if the user sets it as NONE.
if (mFocusMode == AndroidMeteringMode.NONE
|| mExposureMode == AndroidMeteringMode.NONE) {
mAreaOfInterest = null;
}
// Update |mAreaOfInterest| if the camera supports and there are |pointsOfInterest2D|.
final boolean pointsOfInterestSupported =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF) > 0
|| cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) > 0
|| cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB) > 0;
if (pointsOfInterestSupported && mOptions.pointsOfInterest2D.length > 0) {
assert mOptions.pointsOfInterest2D.length
== 2 : "Only 1 point of interest supported";
assert mOptions.pointsOfInterest2D[0] <= 1.0
&& mOptions.pointsOfInterest2D[0] >= 0.0;
assert mOptions.pointsOfInterest2D[1] <= 1.0
&& mOptions.pointsOfInterest2D[1] >= 0.0;
// Calculate a Rect of 1/8 the |visibleRect| dimensions, and center it w.r.t.
// |canvas|.
final Rect visibleRect = (mCropRect.isEmpty()) ? canvas : mCropRect;
int centerX = Math.round(mOptions.pointsOfInterest2D[0] * visibleRect.width());
int centerY = Math.round(mOptions.pointsOfInterest2D[1] * visibleRect.height());
if (visibleRect.equals(mCropRect)) {
centerX += (canvas.width() - visibleRect.width()) / 2;
centerY += (canvas.height() - visibleRect.height()) / 2;
}
final int regionWidth = visibleRect.width() / 8;
final int regionHeight = visibleRect.height() / 8;
mAreaOfInterest = new MeteringRectangle(Math.max(0, centerX - regionWidth / 2),
Math.max(0, centerY - regionHeight / 2), regionWidth, regionHeight,
MeteringRectangle.METERING_WEIGHT_MAX);
Log.d(TAG, "Calculating (%.2fx%.2f) wrt to %s (canvas being %s)",
mOptions.pointsOfInterest2D[0], mOptions.pointsOfInterest2D[1],
visibleRect.toString(), canvas.toString());
Log.d(TAG, "Area of interest %s", mAreaOfInterest.toString());
}
if (mOptions.hasExposureCompensation) {
mExposureCompensation = (int) Math.round(mOptions.exposureCompensation
/ cameraCharacteristics
.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP)
.floatValue());
}
if (mOptions.iso > 0) mIso = (int) Math.round(mOptions.iso);
if (mOptions.colorTemperature > 0)
mColorTemperature = (int) Math.round(mOptions.colorTemperature);
if (mOptions.hasRedEyeReduction) mRedEyeReduction = mOptions.redEyeReduction;
if (mOptions.fillLightMode != AndroidFillLightMode.NOT_SET)
mFillLightMode = mOptions.fillLightMode;
if (mOptions.hasTorch) mTorch = mOptions.torch;
if (mPreviewSession != null) {
assert mPreviewRequestBuilder != null : "preview request builder";
// Reuse most of |mPreviewRequestBuilder| since it has expensive items inside that
// have to do with preview, e.g. the ImageReader and its associated Surface.
configureCommonCaptureSettings(mPreviewRequestBuilder);
try {
mPreviewSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, null);
} catch (CameraAccessException | SecurityException | IllegalStateException
| IllegalArgumentException ex) {
Log.e(TAG, "setRepeatingRequest: ", ex);
}
}
}
}
private class TakePhotoTask implements Runnable {
private final long mCallbackId;
public TakePhotoTask(long callbackId) {
mCallbackId = callbackId;
}
@Override
public void run() {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
if (mCameraDevice == null || mCameraState != CameraState.STARTED) {
Log.e(TAG,
"TakePhoto failed because mCameraDevice == null || "
+ "mCameraState != CameraState.STARTED");
notifyTakePhotoError(mCallbackId);
return;
}
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId);
final StreamConfigurationMap streamMap = cameraCharacteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
final Size[] supportedSizes = streamMap.getOutputSizes(ImageFormat.JPEG);
final Size closestSize =
findClosestSizeInArray(supportedSizes, mPhotoWidth, mPhotoHeight);
Log.d(TAG, "requested resolution: (%dx%d)", mPhotoWidth, mPhotoHeight);
if (closestSize != null) {
Log.d(TAG, " matched (%dx%d)", closestSize.getWidth(), closestSize.getHeight());
}
final ImageReader imageReader = ImageReader.newInstance(
(closestSize != null) ? closestSize.getWidth() : mCaptureFormat.getWidth(),
(closestSize != null) ? closestSize.getHeight() : mCaptureFormat.getHeight(),
ImageFormat.JPEG, 1 /* maxImages */);
final CrPhotoReaderListener photoReaderListener =
new CrPhotoReaderListener(mCallbackId);
imageReader.setOnImageAvailableListener(photoReaderListener, mCameraThreadHandler);
final List<Surface> surfaceList = new ArrayList<Surface>(1);
// TODO(mcasas): release this Surface when not needed, https://crbug.com/643884.
surfaceList.add(imageReader.getSurface());
CaptureRequest.Builder photoRequestBuilder = null;
try {
photoRequestBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
} catch (CameraAccessException ex) {
Log.e(TAG, "createCaptureRequest() error ", ex);
notifyTakePhotoError(mCallbackId);
return;
}
if (photoRequestBuilder == null) {
Log.e(TAG, "photoRequestBuilder error");
notifyTakePhotoError(mCallbackId);
return;
}
photoRequestBuilder.addTarget(imageReader.getSurface());
photoRequestBuilder.set(CaptureRequest.JPEG_ORIENTATION, getCameraRotation());
configureCommonCaptureSettings(photoRequestBuilder);
final CaptureRequest photoRequest = photoRequestBuilder.build();
final CrPhotoSessionListener sessionListener =
new CrPhotoSessionListener(imageReader, photoRequest, mCallbackId);
try {
mCameraDevice.createCaptureSession(
surfaceList, sessionListener, mCameraThreadHandler);
} catch (CameraAccessException | IllegalArgumentException | SecurityException ex) {
Log.e(TAG, "createCaptureSession: " + ex);
notifyTakePhotoError(mCallbackId);
}
}
}
private static final double kNanosecondsPerSecond = 1000000000; private static final double kNanosecondsPerSecond = 1000000000;
private static final String TAG = "VideoCapture"; private static final String TAG = "VideoCapture";
...@@ -305,9 +774,13 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -305,9 +774,13 @@ public class VideoCaptureCamera2 extends VideoCapture {
private CameraCaptureSession mPreviewSession; private CameraCaptureSession mPreviewSession;
private CaptureRequest mPreviewRequest; private CaptureRequest mPreviewRequest;
private CaptureRequest.Builder mPreviewRequestBuilder; private CaptureRequest.Builder mPreviewRequestBuilder;
private Handler mMainHandler;
private ImageReader mImageReader = null; private ImageReader mImageReader = null;
private final Looper mLooper; // We create a dedicated HandlerThread for operating the camera on. This
// is needed, because the camera APIs requires a Looper for posting
// asynchronous callbacks to. The native thread that calls the constructor
// and public API cannot be used for this, because it does not have a
// Looper.
private Handler mCameraThreadHandler;
private Range<Integer> mAeFpsRange; private Range<Integer> mAeFpsRange;
private CameraState mCameraState = CameraState.STOPPED; private CameraState mCameraState = CameraState.STOPPED;
...@@ -340,24 +813,25 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -340,24 +813,25 @@ public class VideoCaptureCamera2 extends VideoCapture {
return null; return null;
} }
// {@link nativeOnPhotoTaken()} needs to be called back if there's any private void createPreviewObjectsAndStartPreviewOrFail() {
// problem after {@link takePhoto()} has returned true. assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
private void notifyTakePhotoError(long callbackId) {
nativeOnPhotoTaken(mNativeVideoCaptureDeviceAndroid, callbackId, new byte[0]); if (createPreviewObjectsAndStartPreview()) return;
changeCameraStateAndNotify(CameraState.STOPPED);
nativeOnError(mNativeVideoCaptureDeviceAndroid, "Error starting or restarting preview");
} }
private boolean createPreviewObjectsAndStartPreview() { private boolean createPreviewObjectsAndStartPreview() {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
if (mCameraDevice == null) return false; if (mCameraDevice == null) return false;
// Create an ImageReader and plug a thread looper into it to have // Create an ImageReader and plug a thread looper into it to have
// readback take place on its own thread. // readback take place on its own thread.
mImageReader = ImageReader.newInstance(mCaptureFormat.getWidth(), mImageReader = ImageReader.newInstance(mCaptureFormat.getWidth(),
mCaptureFormat.getHeight(), mCaptureFormat.getPixelFormat(), 2 /* maxImages */); mCaptureFormat.getHeight(), mCaptureFormat.getPixelFormat(), 2 /* maxImages */);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
final Handler backgroundHandler = new Handler(thread.getLooper());
final CrPreviewReaderListener imageReaderListener = new CrPreviewReaderListener(); final CrPreviewReaderListener imageReaderListener = new CrPreviewReaderListener();
mImageReader.setOnImageAvailableListener(imageReaderListener, backgroundHandler); mImageReader.setOnImageAvailableListener(imageReaderListener, mCameraThreadHandler);
try { try {
// TEMPLATE_PREVIEW specifically means "high frame rate is given // TEMPLATE_PREVIEW specifically means "high frame rate is given
...@@ -417,6 +891,8 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -417,6 +891,8 @@ public class VideoCaptureCamera2 extends VideoCapture {
} }
private void configureCommonCaptureSettings(CaptureRequest.Builder requestBuilder) { private void configureCommonCaptureSettings(CaptureRequest.Builder requestBuilder) {
assert mCameraThreadHandler.getLooper() == Looper.myLooper() : "called on wrong thread";
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId); final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId);
// |mFocusMode| indicates if we're in auto/continuous, single-shot or manual mode. // |mFocusMode| indicates if we're in auto/continuous, single-shot or manual mode.
...@@ -567,7 +1043,7 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -567,7 +1043,7 @@ public class VideoCaptureCamera2 extends VideoCapture {
return -1; return -1;
} }
private int getClosestWhiteBalance(int colorTemperature, int[] supportedTemperatures) { private static int getClosestWhiteBalance(int colorTemperature, int[] supportedTemperatures) {
int minDiff = Integer.MAX_VALUE; int minDiff = Integer.MAX_VALUE;
int matchedTemperature = -1; int matchedTemperature = -1;
...@@ -583,14 +1059,14 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -583,14 +1059,14 @@ public class VideoCaptureCamera2 extends VideoCapture {
return matchedTemperature; return matchedTemperature;
} }
static boolean isLegacyDevice(int id) { public static boolean isLegacyDevice(int id) {
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id); final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id);
return cameraCharacteristics != null return cameraCharacteristics != null
&& cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) && cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
== CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY; == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
} }
static int getNumberOfCameras() { public static int getNumberOfCameras() {
CameraManager manager = null; CameraManager manager = null;
try { try {
manager = (CameraManager) ContextUtils.getApplicationContext().getSystemService( manager = (CameraManager) ContextUtils.getApplicationContext().getSystemService(
...@@ -609,7 +1085,7 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -609,7 +1085,7 @@ public class VideoCaptureCamera2 extends VideoCapture {
} }
} }
static int getCaptureApiType(int id) { public static int getCaptureApiType(int id) {
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id); final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id);
if (cameraCharacteristics == null) { if (cameraCharacteristics == null) {
return VideoCaptureApi.UNKNOWN; return VideoCaptureApi.UNKNOWN;
...@@ -629,7 +1105,7 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -629,7 +1105,7 @@ public class VideoCaptureCamera2 extends VideoCapture {
} }
} }
static int getFacingMode(int id) { public static int getFacingMode(int id) {
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id); final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id);
if (cameraCharacteristics == null) { if (cameraCharacteristics == null) {
return VideoFacingMode.MEDIA_VIDEO_FACING_NONE; return VideoFacingMode.MEDIA_VIDEO_FACING_NONE;
...@@ -646,7 +1122,7 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -646,7 +1122,7 @@ public class VideoCaptureCamera2 extends VideoCapture {
} }
} }
static String getName(int id) { public static String getName(int id) {
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id); final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id);
if (cameraCharacteristics == null) return null; if (cameraCharacteristics == null) return null;
final int facing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING); final int facing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
...@@ -654,7 +1130,7 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -654,7 +1130,7 @@ public class VideoCaptureCamera2 extends VideoCapture {
+ ((facing == CameraCharacteristics.LENS_FACING_FRONT) ? "front" : "back"); + ((facing == CameraCharacteristics.LENS_FACING_FRONT) ? "front" : "back");
} }
static VideoCaptureFormat[] getDeviceSupportedFormats(int id) { public static VideoCaptureFormat[] getDeviceSupportedFormats(int id) {
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id); final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id);
if (cameraCharacteristics == null) return null; if (cameraCharacteristics == null) return null;
...@@ -699,7 +1175,13 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -699,7 +1175,13 @@ public class VideoCaptureCamera2 extends VideoCapture {
VideoCaptureCamera2(int id, long nativeVideoCaptureDeviceAndroid) { VideoCaptureCamera2(int id, long nativeVideoCaptureDeviceAndroid) {
super(id, nativeVideoCaptureDeviceAndroid); super(id, nativeVideoCaptureDeviceAndroid);
mLooper = Looper.myLooper();
nativeDCheckCurrentlyOnIncomingTaskRunner(mNativeVideoCaptureDeviceAndroid);
HandlerThread thread = new HandlerThread("VideoCaptureCamera2_CameraThread");
thread.start();
mCameraThreadHandler = new Handler(thread.getLooper());
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id); final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id);
if (cameraCharacteristics != null) { if (cameraCharacteristics != null) {
mMaxZoom = cameraCharacteristics.get( mMaxZoom = cameraCharacteristics.get(
...@@ -707,10 +1189,15 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -707,10 +1189,15 @@ public class VideoCaptureCamera2 extends VideoCapture {
} }
} }
@Override
public void finalize() {
mCameraThreadHandler.getLooper().quit();
}
@Override @Override
public boolean allocate(int width, int height, int frameRate) { public boolean allocate(int width, int height, int frameRate) {
Log.d(TAG, "allocate: requested (%d x %d) @%dfps", width, height, frameRate); Log.d(TAG, "allocate: requested (%d x %d) @%dfps", width, height, frameRate);
assert mLooper == Looper.myLooper() : "called on wrong thread"; nativeDCheckCurrentlyOnIncomingTaskRunner(mNativeVideoCaptureDeviceAndroid);
synchronized (mCameraStateLock) { synchronized (mCameraStateLock) {
if (mCameraState == CameraState.OPENING || mCameraState == CameraState.CONFIGURING) { if (mCameraState == CameraState.OPENING || mCameraState == CameraState.CONFIGURING) {
Log.e(TAG, "allocate() invoked while Camera is busy opening/configuring."); Log.e(TAG, "allocate() invoked while Camera is busy opening/configuring.");
...@@ -765,26 +1252,17 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -765,26 +1252,17 @@ public class VideoCaptureCamera2 extends VideoCapture {
} }
@Override @Override
public boolean startCapture() { public boolean startCaptureMaybeAsync() {
assert mLooper == Looper.myLooper() : "called on wrong thread"; nativeDCheckCurrentlyOnIncomingTaskRunner(mNativeVideoCaptureDeviceAndroid);
changeCameraStateAndNotify(CameraState.OPENING); changeCameraStateAndNotify(CameraState.OPENING);
final CameraManager manager = final CameraManager manager =
(CameraManager) ContextUtils.getApplicationContext().getSystemService( (CameraManager) ContextUtils.getApplicationContext().getSystemService(
Context.CAMERA_SERVICE); Context.CAMERA_SERVICE);
if (!mUseBackgroundThreadForTesting) {
mMainHandler = new Handler(ContextUtils.getApplicationContext().getMainLooper());
} else {
// Usually we deliver frames on application context thread, but unit tests
// occupy its Looper; deliver frames on a background thread instead.
HandlerThread thread = new HandlerThread("CameraPicture");
thread.start();
mMainHandler = new Handler(thread.getLooper());
}
final CrStateListener stateListener = new CrStateListener(); final CrStateListener stateListener = new CrStateListener();
try { try {
manager.openCamera(Integer.toString(mId), stateListener, mMainHandler); manager.openCamera(Integer.toString(mId), stateListener, mCameraThreadHandler);
} catch (CameraAccessException | IllegalArgumentException | SecurityException ex) { } catch (CameraAccessException | IllegalArgumentException | SecurityException ex) {
Log.e(TAG, "allocate: manager.openCamera: ", ex); Log.e(TAG, "allocate: manager.openCamera: ", ex);
return false; return false;
...@@ -794,8 +1272,8 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -794,8 +1272,8 @@ public class VideoCaptureCamera2 extends VideoCapture {
} }
@Override @Override
public boolean stopCapture() { public boolean stopCaptureAndBlockUntilStopped() {
assert mLooper == Looper.myLooper() : "called on wrong thread"; nativeDCheckCurrentlyOnIncomingTaskRunner(mNativeVideoCaptureDeviceAndroid);
// With Camera2 API, the capture is started asynchronously, which will cause problem if // With Camera2 API, the capture is started asynchronously, which will cause problem if
// stopCapture comes too quickly. Without stopping the previous capture properly, the next // stopCapture comes too quickly. Without stopping the previous capture properly, the next
...@@ -811,214 +1289,17 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -811,214 +1289,17 @@ public class VideoCaptureCamera2 extends VideoCapture {
if (mCameraState == CameraState.STOPPED) return true; if (mCameraState == CameraState.STOPPED) return true;
} }
try { ConditionVariable doneConditionVariable = new ConditionVariable();
mPreviewSession.abortCaptures(); mCameraThreadHandler.post(new StopCaptureTask(doneConditionVariable));
} catch (CameraAccessException | IllegalStateException ex) { doneConditionVariable.block();
// Stopping a device whose CameraCaptureSession is closed is not an error: ignore this.
Log.w(TAG, "abortCaptures: ", ex);
}
if (mCameraDevice == null) return false;
mCameraDevice.close();
if (mUseBackgroundThreadForTesting) mMainHandler.getLooper().quit();
changeCameraStateAndNotify(CameraState.STOPPED);
mCropRect = new Rect();
return true; return true;
} }
@Override @Override
public PhotoCapabilities getPhotoCapabilities() { public void getPhotoCapabilitiesAsync(long callbackId) {
assert mLooper == Looper.myLooper() : "called on wrong thread"; nativeDCheckCurrentlyOnIncomingTaskRunner(mNativeVideoCaptureDeviceAndroid);
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId); mCameraThreadHandler.post(new GetPhotoCapabilitiesTask(callbackId));
PhotoCapabilities.Builder builder = new PhotoCapabilities.Builder();
int minIso = 0;
int maxIso = 0;
final Range<Integer> iso_range =
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE);
if (iso_range != null) {
minIso = iso_range.getLower();
maxIso = iso_range.getUpper();
}
builder.setMinIso(minIso).setMaxIso(maxIso).setStepIso(1);
if (mPreviewRequestBuilder.get(CaptureRequest.SENSOR_SENSITIVITY) != null) {
builder.setCurrentIso(mPreviewRequest.get(CaptureRequest.SENSOR_SENSITIVITY));
}
final StreamConfigurationMap streamMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
final Size[] supportedSizes = streamMap.getOutputSizes(ImageFormat.JPEG);
int minWidth = Integer.MAX_VALUE;
int minHeight = Integer.MAX_VALUE;
int maxWidth = 0;
int maxHeight = 0;
for (Size size : supportedSizes) {
if (size.getWidth() < minWidth) minWidth = size.getWidth();
if (size.getHeight() < minHeight) minHeight = size.getHeight();
if (size.getWidth() > maxWidth) maxWidth = size.getWidth();
if (size.getHeight() > maxHeight) maxHeight = size.getHeight();
}
builder.setMinHeight(minHeight).setMaxHeight(maxHeight).setStepHeight(1);
builder.setMinWidth(minWidth).setMaxWidth(maxWidth).setStepWidth(1);
builder.setCurrentHeight((mPhotoHeight > 0) ? mPhotoHeight : mCaptureFormat.getHeight());
builder.setCurrentWidth((mPhotoWidth > 0) ? mPhotoWidth : mCaptureFormat.getWidth());
final float currentZoom =
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)
.width()
/ (float) mPreviewRequest.get(CaptureRequest.SCALER_CROP_REGION).width();
// There is no min-zoom per se, so clamp it to always 1.
builder.setMinZoom(1.0).setMaxZoom(mMaxZoom);
builder.setCurrentZoom(currentZoom).setStepZoom(0.1);
// Classify the Focus capabilities. In CONTINUOUS and SINGLE_SHOT, we can call
// autoFocus(AutoFocusCallback) to configure region(s) to focus onto.
final int[] jniFocusModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
ArrayList<Integer> focusModes = new ArrayList<Integer>(3);
for (int mode : jniFocusModes) {
if (mode == CameraMetadata.CONTROL_AF_MODE_OFF) {
focusModes.add(Integer.valueOf(AndroidMeteringMode.FIXED));
} else if (mode == CameraMetadata.CONTROL_AF_MODE_AUTO
|| mode == CameraMetadata.CONTROL_AF_MODE_MACRO) {
// CONTROL_AF_MODE_{AUTO,MACRO} do not imply continuously focusing.
if (!focusModes.contains(Integer.valueOf(AndroidMeteringMode.SINGLE_SHOT))) {
focusModes.add(Integer.valueOf(AndroidMeteringMode.SINGLE_SHOT));
}
} else if (mode == CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_VIDEO
|| mode == CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE
|| mode == CameraMetadata.CONTROL_AF_MODE_EDOF) {
if (!focusModes.contains(Integer.valueOf(AndroidMeteringMode.CONTINUOUS))) {
focusModes.add(Integer.valueOf(AndroidMeteringMode.CONTINUOUS));
}
}
}
builder.setFocusModes(integerArrayListToArray(focusModes));
final int focusMode = mPreviewRequest.get(CaptureRequest.CONTROL_AF_MODE);
int jniFocusMode = AndroidMeteringMode.NONE;
if (focusMode == CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_VIDEO
|| focusMode == CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE) {
jniFocusMode = AndroidMeteringMode.CONTINUOUS;
} else if (focusMode == CameraMetadata.CONTROL_AF_MODE_AUTO
|| focusMode == CameraMetadata.CONTROL_AF_MODE_MACRO) {
jniFocusMode = AndroidMeteringMode.SINGLE_SHOT;
} else if (focusMode == CameraMetadata.CONTROL_AF_MODE_OFF) {
jniFocusMode = AndroidMeteringMode.FIXED;
} else {
assert jniFocusMode == CameraMetadata.CONTROL_AF_MODE_EDOF;
}
builder.setFocusMode(jniFocusMode);
// Auto Exposure is the usual capability and state, unless AE is not available at all, which
// is signalled by an empty CONTROL_AE_AVAILABLE_MODES list. Exposure Compensation can also
// support or be locked, this is equivalent to AndroidMeteringMode.FIXED.
final int[] jniExposureModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
ArrayList<Integer> exposureModes = new ArrayList<Integer>(1);
for (int mode : jniExposureModes) {
if (mode == CameraMetadata.CONTROL_AE_MODE_ON
|| mode == CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH
|| mode == CameraMetadata.CONTROL_AE_MODE_ON_ALWAYS_FLASH
|| mode == CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
exposureModes.add(Integer.valueOf(AndroidMeteringMode.CONTINUOUS));
break;
}
}
try {
if (cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_LOCK_AVAILABLE)) {
exposureModes.add(Integer.valueOf(AndroidMeteringMode.FIXED));
}
} catch (NoSuchFieldError e) {
// Ignore this exception, it means CONTROL_AE_LOCK_AVAILABLE is not known.
}
builder.setExposureModes(integerArrayListToArray(exposureModes));
int jniExposureMode = AndroidMeteringMode.CONTINUOUS;
if (mPreviewRequest.get(CaptureRequest.CONTROL_AE_MODE)
== CameraMetadata.CONTROL_AE_MODE_OFF) {
jniExposureMode = AndroidMeteringMode.NONE;
}
if (mPreviewRequest.get(CaptureRequest.CONTROL_AE_LOCK)) {
jniExposureMode = AndroidMeteringMode.FIXED;
}
builder.setExposureMode(jniExposureMode);
final float step =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP)
.floatValue();
builder.setStepExposureCompensation(step);
final Range<Integer> exposureCompensationRange =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
builder.setMinExposureCompensation(exposureCompensationRange.getLower() * step);
builder.setMaxExposureCompensation(exposureCompensationRange.getUpper() * step);
builder.setCurrentExposureCompensation(
mPreviewRequest.get(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION) * step);
final int[] jniWhiteBalanceMode =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES);
ArrayList<Integer> whiteBalanceModes = new ArrayList<Integer>(1);
for (int mode : jniWhiteBalanceMode) {
if (mode == CameraMetadata.CONTROL_AWB_MODE_AUTO) {
whiteBalanceModes.add(Integer.valueOf(AndroidMeteringMode.CONTINUOUS));
break;
}
}
try {
if (cameraCharacteristics.get(CameraCharacteristics.CONTROL_AWB_LOCK_AVAILABLE)) {
whiteBalanceModes.add(Integer.valueOf(AndroidMeteringMode.FIXED));
}
} catch (NoSuchFieldError e) {
// Ignore this exception, it means CONTROL_AWB_LOCK_AVAILABLE is not known.
}
builder.setWhiteBalanceModes(integerArrayListToArray(whiteBalanceModes));
final int whiteBalanceMode = mPreviewRequest.get(CaptureRequest.CONTROL_AWB_MODE);
if (whiteBalanceMode == CameraMetadata.CONTROL_AWB_MODE_OFF) {
builder.setWhiteBalanceMode(AndroidMeteringMode.NONE);
} else if (whiteBalanceMode == CameraMetadata.CONTROL_AWB_MODE_AUTO) {
builder.setWhiteBalanceMode(AndroidMeteringMode.CONTINUOUS);
} else {
builder.setWhiteBalanceMode(AndroidMeteringMode.FIXED);
}
builder.setMinColorTemperature(COLOR_TEMPERATURES_MAP.keyAt(0));
builder.setMaxColorTemperature(
COLOR_TEMPERATURES_MAP.keyAt(COLOR_TEMPERATURES_MAP.size() - 1));
final int index = COLOR_TEMPERATURES_MAP.indexOfValue(whiteBalanceMode);
if (index >= 0) {
builder.setCurrentColorTemperature(COLOR_TEMPERATURES_MAP.keyAt(index));
}
builder.setStepColorTemperature(50);
if (!cameraCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)) {
builder.setSupportsTorch(false);
builder.setRedEyeReduction(false);
} else {
// There's no way to query if torch and/or red eye reduction modes are available using
// Camera2 API but since there's a Flash unit, we assume so.
builder.setSupportsTorch(true);
builder.setTorch(mPreviewRequest.get(CaptureRequest.FLASH_MODE)
== CameraMetadata.FLASH_MODE_TORCH);
builder.setRedEyeReduction(true);
final int[] flashModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
ArrayList<Integer> modes = new ArrayList<Integer>(0);
for (int flashMode : flashModes) {
if (flashMode == CameraMetadata.FLASH_MODE_OFF) {
modes.add(Integer.valueOf(AndroidFillLightMode.OFF));
} else if (flashMode == CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH) {
modes.add(Integer.valueOf(AndroidFillLightMode.AUTO));
} else if (flashMode == CameraMetadata.CONTROL_AE_MODE_ON_ALWAYS_FLASH) {
modes.add(Integer.valueOf(AndroidFillLightMode.FLASH));
}
}
builder.setFillLightModes(integerArrayListToArray(modes));
}
return builder.build();
} }
@Override @Override
...@@ -1027,142 +1308,17 @@ public class VideoCaptureCamera2 extends VideoCapture { ...@@ -1027,142 +1308,17 @@ public class VideoCaptureCamera2 extends VideoCapture {
double exposureCompensation, int whiteBalanceMode, double iso, double exposureCompensation, int whiteBalanceMode, double iso,
boolean hasRedEyeReduction, boolean redEyeReduction, int fillLightMode, boolean hasRedEyeReduction, boolean redEyeReduction, int fillLightMode,
boolean hasTorch, boolean torch, double colorTemperature) { boolean hasTorch, boolean torch, double colorTemperature) {
assert mLooper == Looper.myLooper() : "called on wrong thread"; nativeDCheckCurrentlyOnIncomingTaskRunner(mNativeVideoCaptureDeviceAndroid);
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId); mCameraThreadHandler.post(new SetPhotoOptionsTask(new PhotoOptions(zoom, focusMode,
final Rect canvas = exposureMode, width, height, pointsOfInterest2D, hasExposureCompensation,
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); exposureCompensation, whiteBalanceMode, iso, hasRedEyeReduction, redEyeReduction,
fillLightMode, hasTorch, torch, colorTemperature)));
if (zoom != 0) {
final float normalizedZoom = Math.max(1.0f, Math.min((float) zoom, mMaxZoom));
final float cropFactor = (normalizedZoom - 1) / (2 * normalizedZoom);
mCropRect = new Rect(Math.round(canvas.width() * cropFactor),
Math.round(canvas.height() * cropFactor),
Math.round(canvas.width() * (1 - cropFactor)),
Math.round(canvas.height() * (1 - cropFactor)));
Log.d(TAG, "zoom level %f, rectangle: %s", normalizedZoom, mCropRect.toString());
}
if (focusMode != AndroidMeteringMode.NOT_SET) mFocusMode = focusMode;
if (exposureMode != AndroidMeteringMode.NOT_SET) mExposureMode = exposureMode;
if (whiteBalanceMode != AndroidMeteringMode.NOT_SET) mWhiteBalanceMode = whiteBalanceMode;
if (width > 0) mPhotoWidth = (int) Math.round(width);
if (height > 0) mPhotoHeight = (int) Math.round(height);
// Upon new |zoom| configuration, clear up the previous |mAreaOfInterest| if any.
if (mAreaOfInterest != null && !mAreaOfInterest.getRect().isEmpty() && zoom > 0) {
mAreaOfInterest = null;
}
// Also clear |mAreaOfInterest| if the user sets it as NONE.
if (mFocusMode == AndroidMeteringMode.NONE || mExposureMode == AndroidMeteringMode.NONE) {
mAreaOfInterest = null;
}
// Update |mAreaOfInterest| if the camera supports and there are |pointsOfInterest2D|.
final boolean pointsOfInterestSupported =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF) > 0
|| cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) > 0
|| cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB) > 0;
if (pointsOfInterestSupported && pointsOfInterest2D.length > 0) {
assert pointsOfInterest2D.length == 2 : "Only 1 point of interest supported";
assert pointsOfInterest2D[0] <= 1.0 && pointsOfInterest2D[0] >= 0.0;
assert pointsOfInterest2D[1] <= 1.0 && pointsOfInterest2D[1] >= 0.0;
// Calculate a Rect of 1/8 the |visibleRect| dimensions, and center it w.r.t. |canvas|.
final Rect visibleRect = (mCropRect.isEmpty()) ? canvas : mCropRect;
int centerX = Math.round(pointsOfInterest2D[0] * visibleRect.width());
int centerY = Math.round(pointsOfInterest2D[1] * visibleRect.height());
if (visibleRect.equals(mCropRect)) {
centerX += (canvas.width() - visibleRect.width()) / 2;
centerY += (canvas.height() - visibleRect.height()) / 2;
}
final int regionWidth = visibleRect.width() / 8;
final int regionHeight = visibleRect.height() / 8;
mAreaOfInterest = new MeteringRectangle(Math.max(0, centerX - regionWidth / 2),
Math.max(0, centerY - regionHeight / 2), regionWidth, regionHeight,
MeteringRectangle.METERING_WEIGHT_MAX);
Log.d(TAG, "Calculating (%.2fx%.2f) wrt to %s (canvas being %s)", pointsOfInterest2D[0],
pointsOfInterest2D[1], visibleRect.toString(), canvas.toString());
Log.d(TAG, "Area of interest %s", mAreaOfInterest.toString());
}
if (hasExposureCompensation) {
mExposureCompensation = (int) Math.round(exposureCompensation
/ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP)
.floatValue());
}
if (iso > 0) mIso = (int) Math.round(iso);
if (colorTemperature > 0) mColorTemperature = (int) Math.round(colorTemperature);
if (hasRedEyeReduction) mRedEyeReduction = redEyeReduction;
if (fillLightMode != AndroidFillLightMode.NOT_SET) mFillLightMode = fillLightMode;
if (hasTorch) mTorch = torch;
final Handler mainHandler =
new Handler(ContextUtils.getApplicationContext().getMainLooper());
mainHandler.removeCallbacks(mReconfigureCaptureTask);
mainHandler.post(mReconfigureCaptureTask);
} }
@Override @Override
public boolean takePhoto(final long callbackId) { public void takePhotoAsync(long callbackId) {
assert mLooper == Looper.myLooper() : "called on wrong thread"; nativeDCheckCurrentlyOnIncomingTaskRunner(mNativeVideoCaptureDeviceAndroid);
if (mCameraDevice == null || mCameraState != CameraState.STARTED) return false; mCameraThreadHandler.post(new TakePhotoTask(callbackId));
final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId);
final StreamConfigurationMap streamMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
final Size[] supportedSizes = streamMap.getOutputSizes(ImageFormat.JPEG);
final Size closestSize = findClosestSizeInArray(supportedSizes, mPhotoWidth, mPhotoHeight);
Log.d(TAG, "requested resolution: (%dx%d)", mPhotoWidth, mPhotoHeight);
if (closestSize != null) {
Log.d(TAG, " matched (%dx%d)", closestSize.getWidth(), closestSize.getHeight());
}
final ImageReader imageReader = ImageReader.newInstance(
(closestSize != null) ? closestSize.getWidth() : mCaptureFormat.getWidth(),
(closestSize != null) ? closestSize.getHeight() : mCaptureFormat.getHeight(),
ImageFormat.JPEG, 1 /* maxImages */);
HandlerThread thread = new HandlerThread("CameraPicture");
thread.start();
final Handler backgroundHandler = new Handler(thread.getLooper());
final CrPhotoReaderListener photoReaderListener = new CrPhotoReaderListener(callbackId);
imageReader.setOnImageAvailableListener(photoReaderListener, backgroundHandler);
final List<Surface> surfaceList = new ArrayList<Surface>(1);
// TODO(mcasas): release this Surface when not needed, https://crbug.com/643884.
surfaceList.add(imageReader.getSurface());
CaptureRequest.Builder photoRequestBuilder = null;
try {
photoRequestBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
} catch (CameraAccessException ex) {
Log.e(TAG, "createCaptureRequest() error ", ex);
return false;
}
if (photoRequestBuilder == null) {
Log.e(TAG, "photoRequestBuilder error");
return false;
}
photoRequestBuilder.addTarget(imageReader.getSurface());
photoRequestBuilder.set(CaptureRequest.JPEG_ORIENTATION, getCameraRotation());
configureCommonCaptureSettings(photoRequestBuilder);
final CaptureRequest photoRequest = photoRequestBuilder.build();
final CrPhotoSessionListener sessionListener =
new CrPhotoSessionListener(imageReader, photoRequest, callbackId);
try {
mCameraDevice.createCaptureSession(surfaceList, sessionListener, backgroundHandler);
} catch (CameraAccessException | IllegalArgumentException | SecurityException ex) {
Log.e(TAG, "createCaptureSession: " + ex);
return false;
}
return true;
} }
@Override @Override
......
...@@ -161,7 +161,7 @@ void VideoCaptureDeviceAndroid::AllocateAndStart( ...@@ -161,7 +161,7 @@ void VideoCaptureDeviceAndroid::AllocateAndStart(
<< capture_format_.frame_size.ToString() << ")@ " << capture_format_.frame_size.ToString() << ")@ "
<< capture_format_.frame_rate << "fps"; << capture_format_.frame_rate << "fps";
ret = Java_VideoCapture_startCapture(env, j_capture_); ret = Java_VideoCapture_startCaptureMaybeAsync(env, j_capture_);
if (!ret) { if (!ret) {
SetErrorState(FROM_HERE, "failed to start capture"); SetErrorState(FROM_HERE, "failed to start capture");
return; return;
...@@ -183,7 +183,8 @@ void VideoCaptureDeviceAndroid::StopAndDeAllocate() { ...@@ -183,7 +183,8 @@ void VideoCaptureDeviceAndroid::StopAndDeAllocate() {
JNIEnv* env = AttachCurrentThread(); JNIEnv* env = AttachCurrentThread();
const jboolean ret = Java_VideoCapture_stopCapture(env, j_capture_); const jboolean ret =
Java_VideoCapture_stopCaptureAndBlockUntilStopped(env, j_capture_);
if (!ret) { if (!ret) {
SetErrorState(FROM_HERE, "failed to stop capture"); SetErrorState(FROM_HERE, "failed to stop capture");
return; return;
...@@ -344,6 +345,115 @@ void VideoCaptureDeviceAndroid::OnError(JNIEnv* env, ...@@ -344,6 +345,115 @@ void VideoCaptureDeviceAndroid::OnError(JNIEnv* env,
base::android::ConvertJavaStringToUTF8(env, message)); base::android::ConvertJavaStringToUTF8(env, message));
} }
void VideoCaptureDeviceAndroid::OnGetPhotoCapabilitiesReply(
JNIEnv* env,
const base::android::JavaParamRef<jobject>& obj,
jlong callback_id,
jobject result) {
base::AutoLock lock(photo_callbacks_lock_);
GetPhotoStateCallback* const cb =
reinterpret_cast<GetPhotoStateCallback*>(callback_id);
// Search for the pointer |cb| in the list of |take_photo_callbacks_|.
const auto reference_it = std::find_if(
get_photo_state_callbacks_.begin(), get_photo_state_callbacks_.end(),
[cb](const std::unique_ptr<GetPhotoStateCallback>& callback) {
return callback.get() == cb;
});
if (reference_it == get_photo_state_callbacks_.end()) {
NOTREACHED() << "|callback_id| not found.";
return;
}
base::android::ScopedJavaLocalRef<jobject> scoped_photo_capabilities(env,
result);
PhotoCapabilities caps(scoped_photo_capabilities);
// TODO(mcasas): Manual member copying sucks, consider adding typemapping from
// PhotoCapabilities to mojom::PhotoStatePtr, https://crbug.com/622002.
mojom::PhotoStatePtr photo_capabilities = mojom::PhotoState::New();
const auto jni_white_balance_modes = caps.getWhiteBalanceModes();
std::vector<mojom::MeteringMode> white_balance_modes;
for (const auto& white_balance_mode : jni_white_balance_modes)
white_balance_modes.push_back(ToMojomMeteringMode(white_balance_mode));
photo_capabilities->supported_white_balance_modes = white_balance_modes;
photo_capabilities->current_white_balance_mode =
ToMojomMeteringMode(caps.getWhiteBalanceMode());
const auto jni_exposure_modes = caps.getExposureModes();
std::vector<mojom::MeteringMode> exposure_modes;
for (const auto& exposure_mode : jni_exposure_modes)
exposure_modes.push_back(ToMojomMeteringMode(exposure_mode));
photo_capabilities->supported_exposure_modes = exposure_modes;
photo_capabilities->current_exposure_mode =
ToMojomMeteringMode(caps.getExposureMode());
const auto jni_focus_modes = caps.getFocusModes();
std::vector<mojom::MeteringMode> focus_modes;
for (const auto& focus_mode : jni_focus_modes)
focus_modes.push_back(ToMojomMeteringMode(focus_mode));
photo_capabilities->supported_focus_modes = focus_modes;
photo_capabilities->current_focus_mode =
ToMojomMeteringMode(caps.getFocusMode());
photo_capabilities->exposure_compensation = mojom::Range::New();
photo_capabilities->exposure_compensation->current =
caps.getCurrentExposureCompensation();
photo_capabilities->exposure_compensation->max =
caps.getMaxExposureCompensation();
photo_capabilities->exposure_compensation->min =
caps.getMinExposureCompensation();
photo_capabilities->exposure_compensation->step =
caps.getStepExposureCompensation();
photo_capabilities->color_temperature = mojom::Range::New();
photo_capabilities->color_temperature->current =
caps.getCurrentColorTemperature();
photo_capabilities->color_temperature->max = caps.getMaxColorTemperature();
photo_capabilities->color_temperature->min = caps.getMinColorTemperature();
photo_capabilities->color_temperature->step = caps.getStepColorTemperature();
photo_capabilities->iso = mojom::Range::New();
photo_capabilities->iso->current = caps.getCurrentIso();
photo_capabilities->iso->max = caps.getMaxIso();
photo_capabilities->iso->min = caps.getMinIso();
photo_capabilities->iso->step = caps.getStepIso();
photo_capabilities->brightness = mojom::Range::New();
photo_capabilities->contrast = mojom::Range::New();
photo_capabilities->saturation = mojom::Range::New();
photo_capabilities->sharpness = mojom::Range::New();
photo_capabilities->zoom = mojom::Range::New();
photo_capabilities->zoom->current = caps.getCurrentZoom();
photo_capabilities->zoom->max = caps.getMaxZoom();
photo_capabilities->zoom->min = caps.getMinZoom();
photo_capabilities->zoom->step = caps.getStepZoom();
photo_capabilities->supports_torch = caps.getSupportsTorch();
photo_capabilities->torch = caps.getTorch();
photo_capabilities->red_eye_reduction =
caps.getRedEyeReduction() ? mojom::RedEyeReduction::CONTROLLABLE
: mojom::RedEyeReduction::NEVER;
photo_capabilities->height = mojom::Range::New();
photo_capabilities->height->current = caps.getCurrentHeight();
photo_capabilities->height->max = caps.getMaxHeight();
photo_capabilities->height->min = caps.getMinHeight();
photo_capabilities->height->step = caps.getStepHeight();
photo_capabilities->width = mojom::Range::New();
photo_capabilities->width->current = caps.getCurrentWidth();
photo_capabilities->width->max = caps.getMaxWidth();
photo_capabilities->width->min = caps.getMinWidth();
photo_capabilities->width->step = caps.getStepWidth();
const auto fill_light_modes = caps.getFillLightModes();
std::vector<mojom::FillLightMode> modes;
for (const auto& fill_light_mode : fill_light_modes)
modes.push_back(ToMojomFillLightMode(fill_light_mode));
photo_capabilities->fill_light_mode = modes;
std::move(*cb).Run(std::move(photo_capabilities));
get_photo_state_callbacks_.erase(reference_it);
}
void VideoCaptureDeviceAndroid::OnPhotoTaken( void VideoCaptureDeviceAndroid::OnPhotoTaken(
JNIEnv* env, JNIEnv* env,
const base::android::JavaParamRef<jobject>& obj, const base::android::JavaParamRef<jobject>& obj,
...@@ -355,23 +465,25 @@ void VideoCaptureDeviceAndroid::OnPhotoTaken( ...@@ -355,23 +465,25 @@ void VideoCaptureDeviceAndroid::OnPhotoTaken(
TakePhotoCallback* const cb = TakePhotoCallback* const cb =
reinterpret_cast<TakePhotoCallback*>(callback_id); reinterpret_cast<TakePhotoCallback*>(callback_id);
// Search for the pointer |cb| in the list of |photo_callbacks_|. // Search for the pointer |cb| in the list of |take_photo_callbacks_|.
const auto reference_it = const auto reference_it =
std::find_if(photo_callbacks_.begin(), photo_callbacks_.end(), std::find_if(take_photo_callbacks_.begin(), take_photo_callbacks_.end(),
[cb](const std::unique_ptr<TakePhotoCallback>& callback) { [cb](const std::unique_ptr<TakePhotoCallback>& callback) {
return callback.get() == cb; return callback.get() == cb;
}); });
if (reference_it == photo_callbacks_.end()) { if (reference_it == take_photo_callbacks_.end()) {
NOTREACHED() << "|callback_id| not found."; NOTREACHED() << "|callback_id| not found.";
return; return;
} }
mojom::BlobPtr blob = mojom::Blob::New(); if (data != nullptr) {
base::android::JavaByteArrayToByteVector(env, data.obj(), &blob->data); mojom::BlobPtr blob = mojom::Blob::New();
blob->mime_type = blob->data.empty() ? "" : "image/jpeg"; base::android::JavaByteArrayToByteVector(env, data.obj(), &blob->data);
std::move(*cb).Run(std::move(blob)); blob->mime_type = blob->data.empty() ? "" : "image/jpeg";
std::move(*cb).Run(std::move(blob));
}
photo_callbacks_.erase(reference_it); take_photo_callbacks_.erase(reference_it);
} }
void VideoCaptureDeviceAndroid::OnStarted(JNIEnv* env, void VideoCaptureDeviceAndroid::OnStarted(JNIEnv* env,
...@@ -380,6 +492,12 @@ void VideoCaptureDeviceAndroid::OnStarted(JNIEnv* env, ...@@ -380,6 +492,12 @@ void VideoCaptureDeviceAndroid::OnStarted(JNIEnv* env,
client_->OnStarted(); client_->OnStarted();
} }
void VideoCaptureDeviceAndroid::DCheckCurrentlyOnIncomingTaskRunner(
JNIEnv* env,
const base::android::JavaParamRef<jobject>& obj) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
}
void VideoCaptureDeviceAndroid::ConfigureForTesting() { void VideoCaptureDeviceAndroid::ConfigureForTesting() {
Java_VideoCapture_setTestMode(AttachCurrentThread(), j_capture_); Java_VideoCapture_setTestMode(AttachCurrentThread(), j_capture_);
} }
...@@ -466,15 +584,11 @@ void VideoCaptureDeviceAndroid::DoTakePhoto(TakePhotoCallback callback) { ...@@ -466,15 +584,11 @@ void VideoCaptureDeviceAndroid::DoTakePhoto(TakePhotoCallback callback) {
std::unique_ptr<TakePhotoCallback> heap_callback( std::unique_ptr<TakePhotoCallback> heap_callback(
new TakePhotoCallback(std::move(callback))); new TakePhotoCallback(std::move(callback)));
const intptr_t callback_id = reinterpret_cast<intptr_t>(heap_callback.get()); const intptr_t callback_id = reinterpret_cast<intptr_t>(heap_callback.get());
{
// We need lock here because asynchronous response to base::AutoLock lock(photo_callbacks_lock_);
// Java_VideoCapture_takePhoto(), i.e. a call to OnPhotoTaken, arrives from a take_photo_callbacks_.push_back(std::move(heap_callback));
// separate thread, and it can arrive before |photo_callbacks_.push_back()|
// has executed.
base::AutoLock lock(photo_callbacks_lock_);
if (Java_VideoCapture_takePhoto(env, j_capture_, callback_id)) {
photo_callbacks_.push_back(std::move(heap_callback));
} }
Java_VideoCapture_takePhotoAsync(env, j_capture_, callback_id);
} }
void VideoCaptureDeviceAndroid::DoGetPhotoState( void VideoCaptureDeviceAndroid::DoGetPhotoState(
...@@ -489,92 +603,15 @@ void VideoCaptureDeviceAndroid::DoGetPhotoState( ...@@ -489,92 +603,15 @@ void VideoCaptureDeviceAndroid::DoGetPhotoState(
#endif #endif
JNIEnv* env = AttachCurrentThread(); JNIEnv* env = AttachCurrentThread();
PhotoCapabilities caps( // Make copy on the heap so we can pass the pointer through JNI.
Java_VideoCapture_getPhotoCapabilities(env, j_capture_)); std::unique_ptr<GetPhotoStateCallback> heap_callback(
new GetPhotoStateCallback(std::move(callback)));
// TODO(mcasas): Manual member copying sucks, consider adding typemapping from const intptr_t callback_id = reinterpret_cast<intptr_t>(heap_callback.get());
// PhotoCapabilities to mojom::PhotoStatePtr, https://crbug.com/622002. {
mojom::PhotoStatePtr photo_capabilities = mojom::PhotoState::New(); base::AutoLock lock(photo_callbacks_lock_);
get_photo_state_callbacks_.push_back(std::move(heap_callback));
const auto jni_white_balance_modes = caps.getWhiteBalanceModes(); }
std::vector<mojom::MeteringMode> white_balance_modes; Java_VideoCapture_getPhotoCapabilitiesAsync(env, j_capture_, callback_id);
for (const auto& white_balance_mode : jni_white_balance_modes)
white_balance_modes.push_back(ToMojomMeteringMode(white_balance_mode));
photo_capabilities->supported_white_balance_modes = white_balance_modes;
photo_capabilities->current_white_balance_mode =
ToMojomMeteringMode(caps.getWhiteBalanceMode());
const auto jni_exposure_modes = caps.getExposureModes();
std::vector<mojom::MeteringMode> exposure_modes;
for (const auto& exposure_mode : jni_exposure_modes)
exposure_modes.push_back(ToMojomMeteringMode(exposure_mode));
photo_capabilities->supported_exposure_modes = exposure_modes;
photo_capabilities->current_exposure_mode =
ToMojomMeteringMode(caps.getExposureMode());
const auto jni_focus_modes = caps.getFocusModes();
std::vector<mojom::MeteringMode> focus_modes;
for (const auto& focus_mode : jni_focus_modes)
focus_modes.push_back(ToMojomMeteringMode(focus_mode));
photo_capabilities->supported_focus_modes = focus_modes;
photo_capabilities->current_focus_mode =
ToMojomMeteringMode(caps.getFocusMode());
photo_capabilities->exposure_compensation = mojom::Range::New();
photo_capabilities->exposure_compensation->current =
caps.getCurrentExposureCompensation();
photo_capabilities->exposure_compensation->max =
caps.getMaxExposureCompensation();
photo_capabilities->exposure_compensation->min =
caps.getMinExposureCompensation();
photo_capabilities->exposure_compensation->step =
caps.getStepExposureCompensation();
photo_capabilities->color_temperature = mojom::Range::New();
photo_capabilities->color_temperature->current =
caps.getCurrentColorTemperature();
photo_capabilities->color_temperature->max = caps.getMaxColorTemperature();
photo_capabilities->color_temperature->min = caps.getMinColorTemperature();
photo_capabilities->color_temperature->step = caps.getStepColorTemperature();
photo_capabilities->iso = mojom::Range::New();
photo_capabilities->iso->current = caps.getCurrentIso();
photo_capabilities->iso->max = caps.getMaxIso();
photo_capabilities->iso->min = caps.getMinIso();
photo_capabilities->iso->step = caps.getStepIso();
photo_capabilities->brightness = mojom::Range::New();
photo_capabilities->contrast = mojom::Range::New();
photo_capabilities->saturation = mojom::Range::New();
photo_capabilities->sharpness = mojom::Range::New();
photo_capabilities->zoom = mojom::Range::New();
photo_capabilities->zoom->current = caps.getCurrentZoom();
photo_capabilities->zoom->max = caps.getMaxZoom();
photo_capabilities->zoom->min = caps.getMinZoom();
photo_capabilities->zoom->step = caps.getStepZoom();
photo_capabilities->supports_torch = caps.getSupportsTorch();
photo_capabilities->torch = caps.getTorch();
photo_capabilities->red_eye_reduction =
caps.getRedEyeReduction() ? mojom::RedEyeReduction::CONTROLLABLE
: mojom::RedEyeReduction::NEVER;
photo_capabilities->height = mojom::Range::New();
photo_capabilities->height->current = caps.getCurrentHeight();
photo_capabilities->height->max = caps.getMaxHeight();
photo_capabilities->height->min = caps.getMinHeight();
photo_capabilities->height->step = caps.getStepHeight();
photo_capabilities->width = mojom::Range::New();
photo_capabilities->width->current = caps.getCurrentWidth();
photo_capabilities->width->max = caps.getMaxWidth();
photo_capabilities->width->min = caps.getMinWidth();
photo_capabilities->width->step = caps.getStepWidth();
const auto fill_light_modes = caps.getFillLightModes();
std::vector<mojom::FillLightMode> modes;
for (const auto& fill_light_mode : fill_light_modes)
modes.push_back(ToMojomFillLightMode(fill_light_mode));
photo_capabilities->fill_light_mode = modes;
std::move(callback).Run(std::move(photo_capabilities));
} }
void VideoCaptureDeviceAndroid::DoSetPhotoOptions( void VideoCaptureDeviceAndroid::DoSetPhotoOptions(
......
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
namespace base { namespace base {
class Location; class Location;
class SingleThreadTaskRunner; class SingleThreadTaskRunner;
} } // namespace base
namespace media { namespace media {
...@@ -91,6 +91,12 @@ class CAPTURE_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice { ...@@ -91,6 +91,12 @@ class CAPTURE_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice {
const base::android::JavaParamRef<jobject>& obj, const base::android::JavaParamRef<jobject>& obj,
const base::android::JavaParamRef<jstring>& message); const base::android::JavaParamRef<jstring>& message);
void OnGetPhotoCapabilitiesReply(
JNIEnv* env,
const base::android::JavaParamRef<jobject>& obj,
jlong callback_id,
jobject photo_capabilities);
// Implement org.chromium.media.VideoCapture.nativeOnPhotoTaken. // Implement org.chromium.media.VideoCapture.nativeOnPhotoTaken.
void OnPhotoTaken(JNIEnv* env, void OnPhotoTaken(JNIEnv* env,
const base::android::JavaParamRef<jobject>& obj, const base::android::JavaParamRef<jobject>& obj,
...@@ -100,6 +106,12 @@ class CAPTURE_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice { ...@@ -100,6 +106,12 @@ class CAPTURE_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice {
// Implement org.chromium.media.VideoCapture.nativeOnStarted. // Implement org.chromium.media.VideoCapture.nativeOnStarted.
void OnStarted(JNIEnv* env, const base::android::JavaParamRef<jobject>& obj); void OnStarted(JNIEnv* env, const base::android::JavaParamRef<jobject>& obj);
// Implement
// org.chromium.media.VideoCapture.nativeDCheckCurrentlyOnIncomingTaskRunner.
void DCheckCurrentlyOnIncomingTaskRunner(
JNIEnv* env,
const base::android::JavaParamRef<jobject>& obj);
void ConfigureForTesting(); void ConfigureForTesting();
protected: protected:
...@@ -152,9 +164,10 @@ class CAPTURE_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice { ...@@ -152,9 +164,10 @@ class CAPTURE_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice {
base::TimeTicks expected_next_frame_time_; base::TimeTicks expected_next_frame_time_;
base::TimeDelta frame_interval_; base::TimeDelta frame_interval_;
// List of |photo_callbacks_| in flight, being served in Java side. // List of callbacks for photo API in flight, being served in Java side.
base::Lock photo_callbacks_lock_; base::Lock photo_callbacks_lock_;
std::list<std::unique_ptr<TakePhotoCallback>> photo_callbacks_; std::list<std::unique_ptr<GetPhotoStateCallback>> get_photo_state_callbacks_;
std::list<std::unique_ptr<TakePhotoCallback>> take_photo_callbacks_;
const VideoCaptureDeviceDescriptor device_descriptor_; const VideoCaptureDeviceDescriptor device_descriptor_;
VideoCaptureFormat capture_format_; VideoCaptureFormat capture_format_;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment