Commit b3edc847 authored by braveyao's avatar braveyao Committed by Commit bot

Android video capture: use new libyuv::Android420ToI420 API for format converting.

libyuv offers a new API, Android420ToI420, to convert the
Android YUV_420_888 frame (which has interleaved UV planes)
to normal I420 frame.
This can significantly reduce the de-interlacing time on
Android, up to dozens of milliseconds, which can help a lot
on the overall end-to-end video delay.

BUG=629342

Review-Url: https://codereview.chromium.org/2156003006
Cr-Commit-Position: refs/heads/master@{#407183}
parent e82c4e1b
......@@ -8,8 +8,6 @@
#include "base/android/jni_android.h"
#include "base/android/scoped_java_ref.h"
#include "jni/ScreenCapture_jni.h"
#include "media/base/video_capture_types.h"
#include "media/base/yuv_convert.h"
#include "media/capture/content/video_capture_oracle.h"
#include "third_party/libyuv/include/libyuv.h"
......@@ -60,10 +58,10 @@ void ScreenCaptureMachineAndroid::OnRGBAFrameAvailable(JNIEnv* env,
DCHECK(frame->format() == PIXEL_FORMAT_I420 ||
frame->format() == PIXEL_FORMAT_YV12);
scoped_refptr<VideoFrame> unscaled_frame = frame;
scoped_refptr<VideoFrame> temp_frame = frame;
if (frame->visible_rect().width() != width ||
frame->visible_rect().height() != height) {
unscaled_frame = VideoFrame::CreateFrame(
temp_frame = VideoFrame::CreateFrame(
PIXEL_FORMAT_I420, gfx::Size(width, height), gfx::Rect(width, height),
gfx::Size(width, height), base::TimeDelta());
}
......@@ -74,33 +72,31 @@ void ScreenCaptureMachineAndroid::OnRGBAFrameAvailable(JNIEnv* env,
const int offset = top * row_stride + left * 4;
// ABGR little endian (rgba in memory) to I420.
libyuv::ABGRToI420(src + offset, row_stride,
unscaled_frame->visible_data(VideoFrame::kYPlane),
unscaled_frame->stride(VideoFrame::kYPlane),
unscaled_frame->visible_data(VideoFrame::kUPlane),
unscaled_frame->stride(VideoFrame::kUPlane),
unscaled_frame->visible_data(VideoFrame::kVPlane),
unscaled_frame->stride(VideoFrame::kVPlane),
unscaled_frame->visible_rect().width(),
unscaled_frame->visible_rect().height());
if (unscaled_frame != frame) {
libyuv::I420Scale(unscaled_frame->visible_data(VideoFrame::kYPlane),
unscaled_frame->stride(VideoFrame::kYPlane),
unscaled_frame->visible_data(VideoFrame::kUPlane),
unscaled_frame->stride(VideoFrame::kUPlane),
unscaled_frame->visible_data(VideoFrame::kVPlane),
unscaled_frame->stride(VideoFrame::kVPlane),
unscaled_frame->visible_rect().width(),
unscaled_frame->visible_rect().height(),
frame->visible_data(VideoFrame::kYPlane),
frame->stride(VideoFrame::kYPlane),
frame->visible_data(VideoFrame::kUPlane),
frame->stride(VideoFrame::kUPlane),
frame->visible_data(VideoFrame::kVPlane),
frame->stride(VideoFrame::kVPlane),
frame->visible_rect().width(),
frame->visible_rect().height(), libyuv::kFilterBilinear);
libyuv::ABGRToI420(
src + offset, row_stride, temp_frame->visible_data(VideoFrame::kYPlane),
temp_frame->stride(VideoFrame::kYPlane),
temp_frame->visible_data(VideoFrame::kUPlane),
temp_frame->stride(VideoFrame::kUPlane),
temp_frame->visible_data(VideoFrame::kVPlane),
temp_frame->stride(VideoFrame::kVPlane),
temp_frame->visible_rect().width(), temp_frame->visible_rect().height());
if (temp_frame != frame) {
libyuv::I420Scale(
temp_frame->visible_data(VideoFrame::kYPlane),
temp_frame->stride(VideoFrame::kYPlane),
temp_frame->visible_data(VideoFrame::kUPlane),
temp_frame->stride(VideoFrame::kUPlane),
temp_frame->visible_data(VideoFrame::kVPlane),
temp_frame->stride(VideoFrame::kVPlane),
temp_frame->visible_rect().width(), temp_frame->visible_rect().height(),
frame->visible_data(VideoFrame::kYPlane),
frame->stride(VideoFrame::kYPlane),
frame->visible_data(VideoFrame::kUPlane),
frame->stride(VideoFrame::kUPlane),
frame->visible_data(VideoFrame::kVPlane),
frame->stride(VideoFrame::kVPlane), frame->visible_rect().width(),
frame->visible_rect().height(), libyuv::kFilterBilinear);
}
capture_frame_cb.Run(frame, start_time, true);
......@@ -137,6 +133,14 @@ void ScreenCaptureMachineAndroid::OnI420FrameAvailable(JNIEnv* env,
DCHECK(frame->format() == PIXEL_FORMAT_I420 ||
frame->format() == PIXEL_FORMAT_YV12);
scoped_refptr<VideoFrame> temp_frame = frame;
if (frame->visible_rect().width() != width ||
frame->visible_rect().height() != height) {
temp_frame = VideoFrame::CreateFrame(
PIXEL_FORMAT_I420, gfx::Size(width, height), gfx::Rect(width, height),
gfx::Size(width, height), base::TimeDelta());
}
uint8_t* const y_src =
reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(y_buffer));
CHECK(y_src);
......@@ -147,43 +151,36 @@ void ScreenCaptureMachineAndroid::OnI420FrameAvailable(JNIEnv* env,
reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(v_buffer));
CHECK(v_src);
// De-interleave the U and V planes into temporary buffers, if needed.
int uv_stride = uv_row_stride;
std::unique_ptr<uint8_t[]> u_tmp, v_tmp;
if (uv_pixel_stride != 1) {
// U and V planes are actually interleaved, unpack them here.
// TODO(braveyao): According to
// https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888,
// how U and V planes are interlaced is not guaranteed, so there is no an
// existing libyuv function suitable for such a job. Filed a request at
// https://bugs.chromium.org/p/libyuv/issues/detail?id=604. Switch to new
// function when it's available.
const int uv_plane_len = (int)env->GetDirectBufferCapacity(u_buffer);
u_tmp.reset(new uint8_t[(uv_plane_len + 1) / uv_pixel_stride]);
v_tmp.reset(new uint8_t[(uv_plane_len + 1) / uv_pixel_stride]);
for (int index = 0; index * uv_pixel_stride <= uv_plane_len; index++) {
u_tmp[index] = u_src[index * uv_pixel_stride];
v_tmp[index] = v_src[index * uv_pixel_stride];
}
u_src = u_tmp.get();
v_src = v_tmp.get();
uv_stride /= uv_pixel_stride;
}
const int y_offset = top * y_stride + left;
const int uv_offset = (top / 2) * uv_stride + left / 2;
// Note: If source width/height are same as the frame's width/height, the
// following will, internally, just perform a copy without scaling.
libyuv::I420Scale(y_src + y_offset, y_stride, u_src + uv_offset, uv_stride,
v_src + uv_offset, uv_stride, width, height,
frame->visible_data(VideoFrame::kYPlane),
frame->stride(VideoFrame::kYPlane),
frame->visible_data(VideoFrame::kUPlane),
frame->stride(VideoFrame::kUPlane),
frame->visible_data(VideoFrame::kVPlane),
frame->stride(VideoFrame::kVPlane),
frame->visible_rect().width(),
frame->visible_rect().height(), libyuv::kFilterBilinear);
const int uv_offset = (top / 2) * uv_row_stride + left / 2;
libyuv::Android420ToI420(
y_src + y_offset, y_stride, u_src + uv_offset, uv_row_stride,
v_src + uv_offset, uv_row_stride, uv_pixel_stride,
temp_frame->visible_data(VideoFrame::kYPlane),
temp_frame->stride(VideoFrame::kYPlane),
temp_frame->visible_data(VideoFrame::kUPlane),
temp_frame->stride(VideoFrame::kUPlane),
temp_frame->visible_data(VideoFrame::kVPlane),
temp_frame->stride(VideoFrame::kVPlane),
temp_frame->visible_rect().width(), temp_frame->visible_rect().height());
if (temp_frame != frame) {
libyuv::I420Scale(
temp_frame->visible_data(VideoFrame::kYPlane),
temp_frame->stride(VideoFrame::kYPlane),
temp_frame->visible_data(VideoFrame::kUPlane),
temp_frame->stride(VideoFrame::kUPlane),
temp_frame->visible_data(VideoFrame::kVPlane),
temp_frame->stride(VideoFrame::kVPlane),
temp_frame->visible_rect().width(), temp_frame->visible_rect().height(),
frame->visible_data(VideoFrame::kYPlane),
frame->stride(VideoFrame::kYPlane),
frame->visible_data(VideoFrame::kUPlane),
frame->stride(VideoFrame::kUPlane),
frame->visible_data(VideoFrame::kVPlane),
frame->stride(VideoFrame::kVPlane), frame->visible_rect().width(),
frame->visible_rect().height(), libyuv::kFilterBilinear);
}
capture_frame_cb.Run(frame, start_time, true);
......
......@@ -37,7 +37,7 @@ class ScreenCaptureMachineAndroid : public media::VideoCaptureMachine {
void OnI420FrameAvailable(JNIEnv* env,
jobject obj,
jobject y_buffer,
jint y_Zde,
jint y_stride,
jobject u_buffer,
jobject v_buffer,
jint uv_row_stride,
......
......@@ -28,6 +28,7 @@ source_set("android") {
deps = [
":capture_jni_headers",
"//media/mojo/interfaces:image_capture",
"//third_party/libyuv",
]
}
......
......@@ -12,6 +12,8 @@ import android.view.WindowManager;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.base.annotations.JNINamespace;
import java.nio.ByteBuffer;
/**
* Video Capture Device base class, defines a set of methods that native code
* needs to use to configure, start capture, and to be reached by callbacks and
......@@ -122,6 +124,10 @@ public abstract class VideoCapture {
public native void nativeOnFrameAvailable(
long nativeVideoCaptureDeviceAndroid, byte[] data, int length, int rotation);
public native void nativeOnI420FrameAvailable(long nativeVideoCaptureDeviceAndroid,
ByteBuffer yBuffer, int yStride, ByteBuffer uBuffer, ByteBuffer vBuffer,
int uvRowStride, int uvPixelStride, int width, int height, int rotation);
// Method for VideoCapture implementations to signal an asynchronous error.
public native void nativeOnError(long nativeVideoCaptureDeviceAndroid, String message);
......
......@@ -127,9 +127,11 @@ public class VideoCaptureCamera2 extends VideoCapture {
throw new IllegalStateException();
}
readImageIntoBuffer(image, mCapturedData);
nativeOnFrameAvailable(mNativeVideoCaptureDeviceAndroid, mCapturedData,
mCapturedData.length, getCameraRotation());
nativeOnI420FrameAvailable(mNativeVideoCaptureDeviceAndroid,
image.getPlanes()[0].getBuffer(), image.getPlanes()[0].getRowStride(),
image.getPlanes()[1].getBuffer(), image.getPlanes()[2].getBuffer(),
image.getPlanes()[1].getRowStride(), image.getPlanes()[1].getPixelStride(),
image.getWidth(), image.getHeight(), getCameraRotation());
} catch (IllegalStateException ex) {
Log.e(TAG, "acquireLatestImage():", ex);
}
......@@ -225,8 +227,6 @@ public class VideoCaptureCamera2 extends VideoCapture {
private final Object mCameraStateLock = new Object();
private byte[] mCapturedData;
private CameraDevice mCameraDevice;
private CameraCaptureSession mPreviewSession;
private CaptureRequest mPreviewRequest;
......@@ -312,47 +312,6 @@ public class VideoCaptureCamera2 extends VideoCapture {
return true;
}
private static void readImageIntoBuffer(Image image, byte[] data) {
final int imageWidth = image.getWidth();
final int imageHeight = image.getHeight();
final Image.Plane[] planes = image.getPlanes();
int offset = 0;
for (int plane = 0; plane < planes.length; ++plane) {
final ByteBuffer buffer = planes[plane].getBuffer();
final int rowStride = planes[plane].getRowStride();
// Experimentally, U and V planes have |pixelStride| = 2, which
// essentially means they are packed. That's silly, because we are
// forced to unpack here.
final int pixelStride = planes[plane].getPixelStride();
final int planeWidth = (plane == 0) ? imageWidth : imageWidth / 2;
final int planeHeight = (plane == 0) ? imageHeight : imageHeight / 2;
if (pixelStride == 1 && rowStride == planeWidth) {
// Copy whole plane from buffer into |data| at once.
buffer.get(data, offset, planeWidth * planeHeight);
offset += planeWidth * planeHeight;
} else {
// Copy pixels one by one respecting pixelStride and rowStride.
byte[] rowData = new byte[rowStride];
for (int row = 0; row < planeHeight - 1; ++row) {
buffer.get(rowData, 0, rowStride);
for (int col = 0; col < planeWidth; ++col) {
data[offset++] = rowData[col * pixelStride];
}
}
// Last row is special in some devices and may not contain the full
// |rowStride| bytes of data. See http://crbug.com/458701 and
// http://developer.android.com/reference/android/media/Image.Plane.html#getBuffer()
buffer.get(rowData, 0, Math.min(rowStride, buffer.remaining()));
for (int col = 0; col < planeWidth; ++col) {
data[offset++] = rowData[col * pixelStride];
}
}
}
}
private void changeCameraStateAndNotify(CameraState state) {
synchronized (mCameraStateLock) {
mCameraState = state;
......@@ -497,9 +456,6 @@ public class VideoCaptureCamera2 extends VideoCapture {
// |mCaptureFormat| is also used to configure the ImageReader.
mCaptureFormat = new VideoCaptureFormat(closestSupportedSize.getWidth(),
closestSupportedSize.getHeight(), frameRate, ImageFormat.YUV_420_888);
int expectedFrameSize = mCaptureFormat.mWidth * mCaptureFormat.mHeight
* ImageFormat.getBitsPerPixel(mCaptureFormat.mPixelFormat) / 8;
mCapturedData = new byte[expectedFrameSize];
mCameraNativeOrientation =
cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// TODO(mcasas): The following line is correct for N5 with prerelease Build,
......
......@@ -14,6 +14,7 @@
#include "jni/VideoCapture_jni.h"
#include "media/capture/video/android/photo_capabilities.h"
#include "media/capture/video/android/video_capture_device_factory_android.h"
#include "third_party/libyuv/include/libyuv.h"
using base::android::AttachCurrentThread;
using base::android::CheckException;
......@@ -208,7 +209,7 @@ void VideoCaptureDeviceAndroid::OnFrameAvailable(
return;
}
base::TimeTicks current_time = base::TimeTicks::Now();
const base::TimeTicks current_time = base::TimeTicks::Now();
if (!got_first_frame_) {
// Set aside one frame allowance for fluctuation.
expected_next_frame_time_ = current_time - frame_interval_;
......@@ -230,6 +231,58 @@ void VideoCaptureDeviceAndroid::OnFrameAvailable(
env->ReleaseByteArrayElements(data, buffer, JNI_ABORT);
}
void VideoCaptureDeviceAndroid::OnI420FrameAvailable(JNIEnv* env,
jobject obj,
jobject y_buffer,
jint y_stride,
jobject u_buffer,
jobject v_buffer,
jint uv_row_stride,
jint uv_pixel_stride,
jint width,
jint height,
jint rotation) {
const base::TimeTicks current_time = base::TimeTicks::Now();
if (!got_first_frame_) {
// Set aside one frame allowance for fluctuation.
expected_next_frame_time_ = current_time - frame_interval_;
first_ref_time_ = current_time;
got_first_frame_ = true;
}
uint8_t* const y_src =
reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(y_buffer));
CHECK(y_src);
uint8_t* const u_src =
reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(u_buffer));
CHECK(u_src);
uint8_t* const v_src =
reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(v_buffer));
CHECK(v_src);
const int y_plane_length = width * height;
const int uv_plane_length = y_plane_length / 4;
const int buffer_length = y_plane_length + uv_plane_length * 2;
std::unique_ptr<uint8_t> buffer(new uint8_t[buffer_length]);
libyuv::Android420ToI420(y_src, y_stride, u_src, uv_row_stride, v_src,
uv_row_stride, uv_pixel_stride, buffer.get(), width,
buffer.get() + y_plane_length, width / 2,
buffer.get() + y_plane_length + uv_plane_length,
width / 2, width, height);
// Deliver the frame when it doesn't arrive too early.
if (expected_next_frame_time_ <= current_time) {
expected_next_frame_time_ += frame_interval_;
// TODO(qiangchen): Investigate how to get raw timestamp for Android,
// rather than using reference time to calculate timestamp.
client_->OnIncomingCapturedData(buffer.get(), buffer_length,
capture_format_, rotation, current_time,
current_time - first_ref_time_);
}
}
void VideoCaptureDeviceAndroid::OnError(JNIEnv* env,
const JavaParamRef<jobject>& obj,
const JavaParamRef<jstring>& message) {
......
......@@ -68,6 +68,19 @@ class CAPTURE_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice {
jint length,
jint rotation);
// Implement org.chromium.media.VideoCapture.nativeOnI420FrameAvailable.
void OnI420FrameAvailable(JNIEnv* env,
jobject obj,
jobject y_buffer,
jint y_stride,
jobject u_buffer,
jobject v_buffer,
jint uv_row_stride,
jint uv_pixel_stride,
jint width,
jint height,
jint rotation);
// Implement org.chromium.media.VideoCapture.nativeOnError.
void OnError(JNIEnv* env,
const base::android::JavaParamRef<jobject>& obj,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment