Commit d851a930 authored by aleksandar.stojiljkovic's avatar aleksandar.stojiljkovic Committed by Commit bot

Lossless access to 16-bit video stream using WebGL GL_FLOAT texture.

If using canvas.getImageData or GL textures of UNSIGNED_BYTE type, 16-bit depth
stream data is available only through 8-bit* API, so there is a precision loss.
Here, we add no-precision-loss** JavaScript access through WebGL float texture.

RGBA32F usage here enables lossless access to 16-bit depth information via WebGL1.
In related work, the same code path is used to upload 16-bit data to other WebGL2
supported formats; e.g. with GL_R16UI there is no conversion needed in
SkCanvasVideoRenderer::TexImageImpl.

* 8-bit access refers to JS ImageData and WebGL UNSIGNED_BYTE where 16-bit depth
data is now available as luminance (all 3 color channels contains upper 8 bits
of 16bit value).

** Float is used for no-precision-loss access to 16-bit data normalized to
[0-1.0] range using formula value_float = value_16bit/65535.0.

This patch also adds UNSIGNED_BYTE WebGL test which was earlier tested through
testVideoToImageBitmap since UNSIGNED_BYTE and canvas rendering still share the
same path through SkCanvasVideoRenderer::Paint.

BUG=369849, 624436
CQ_INCLUDE_TRYBOTS=master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel

Review-Url: https://codereview.chromium.org/2476693002
Cr-Commit-Position: refs/heads/master@{#436221}
parent 821cd036
...@@ -314,6 +314,11 @@ group("telemetry_gpu_integration_test") { ...@@ -314,6 +314,11 @@ group("telemetry_gpu_integration_test") {
"//content/test/gpu/", "//content/test/gpu/",
"//content/test/data/gpu/", "//content/test/data/gpu/",
# For depth_capture
"//content/test/data/media/depth_stream_test_utilities.js",
"//content/test/data/media/getusermedia-depth-capture.html",
"//content/test/data/media/webrtc_test_utilities.js",
# For GpuProcess.video # For GpuProcess.video
"//content/test/data/media/bear.ogv", "//content/test/data/media/bear.ogv",
......
...@@ -610,6 +610,41 @@ bool WebMediaPlayerMS::copyVideoTextureToPlatformTexture( ...@@ -610,6 +610,41 @@ bool WebMediaPlayerMS::copyVideoTextureToPlatformTexture(
premultiply_alpha, flip_y); premultiply_alpha, flip_y);
} }
bool WebMediaPlayerMS::texImageImpl(TexImageFunctionID functionID,
unsigned target,
gpu::gles2::GLES2Interface* gl,
int level,
int internalformat,
unsigned format,
unsigned type,
int xoffset,
int yoffset,
int zoffset,
bool flip_y,
bool premultiply_alpha) {
TRACE_EVENT0("media", "WebMediaPlayerMS:texImageImpl");
DCHECK(thread_checker_.CalledOnValidThread());
const scoped_refptr<media::VideoFrame> video_frame =
compositor_->GetCurrentFrameWithoutUpdatingStatistics();
if (!video_frame || !video_frame->IsMappable() ||
video_frame->HasTextures() ||
video_frame->format() != media::PIXEL_FORMAT_Y16) {
return false;
}
if (functionID == TexImage2D) {
return media::SkCanvasVideoRenderer::TexImage2D(
target, gl, video_frame.get(), level, internalformat, format, type,
flip_y, premultiply_alpha);
} else if (functionID == TexSubImage2D) {
return media::SkCanvasVideoRenderer::TexSubImage2D(
target, gl, video_frame.get(), level, format, type, xoffset, yoffset,
flip_y, premultiply_alpha);
}
return false;
}
void WebMediaPlayerMS::OnFirstFrameReceived(media::VideoRotation video_rotation, void WebMediaPlayerMS::OnFirstFrameReceived(media::VideoRotation video_rotation,
bool is_opaque) { bool is_opaque) {
DVLOG(1) << __func__; DVLOG(1) << __func__;
......
...@@ -156,6 +156,19 @@ class CONTENT_EXPORT WebMediaPlayerMS ...@@ -156,6 +156,19 @@ class CONTENT_EXPORT WebMediaPlayerMS
bool premultiply_alpha, bool premultiply_alpha,
bool flip_y) override; bool flip_y) override;
bool texImageImpl(TexImageFunctionID functionID,
unsigned target,
gpu::gles2::GLES2Interface* gl,
int level,
int internalformat,
unsigned format,
unsigned type,
int xoffset,
int yoffset,
int zoffset,
bool flip_y,
bool premultiply_alpha) override;
private: private:
friend class WebMediaPlayerMSTest; friend class WebMediaPlayerMSTest;
......
...@@ -32,80 +32,44 @@ function getFake16bitStream() { ...@@ -32,80 +32,44 @@ function getFake16bitStream() {
}); });
} }
function testVideoToImageBitmap(videoElementName, success, error) // Data is RGBA array data and could be used with different formats:
{ // e.g. Uint8Array, Uint8ClampedArray, Float32Array...
var bitmaps = {}; // Value at point (row, column) is calculated as
var video = $(videoElementName); // (top_left_value + (row + column) * step) % wrap_around. wrap_around is 255
var canvas = document.createElement('canvas'); // (for Uint8) or 1.0 for float. See FakeVideoCaptureDevice for details.
canvas.width = 96; function verifyPixels(
canvas.height = 96; data, width, height, flip_y, step, wrap_around, tolerance, test_name) {
document.body.appendChild(canvas);
var p1 = createImageBitmap(video).then(function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, false); });
var p2 = createImageBitmap(video,
{imageOrientation: "none", premultiplyAlpha: "premultiply"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, false); });
var p3 = createImageBitmap(video,
{imageOrientation: "none", premultiplyAlpha: "default"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, false); });
var p4 = createImageBitmap(video,
{imageOrientation: "none", premultiplyAlpha: "none"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, false); });
var p5 = createImageBitmap(video,
{imageOrientation: "flipY", premultiplyAlpha: "premultiply"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, true); });
var p6 = createImageBitmap(video,
{imageOrientation: "flipY", premultiplyAlpha: "default"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, true); });
var p7 = createImageBitmap(video,
{imageOrientation: "flipY", premultiplyAlpha: "none"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, true); });
return Promise.all([p1, p2, p3, p4, p5, p6, p7]).then(success(), reason => {
return error({name: reason});
});
}
function runImageBitmapTest(bitmap, canvas, flipped) {
var context = canvas.getContext('2d');
context.drawImage(bitmap,0,0);
var imageData = context.getImageData(0, 0, canvas.width, canvas.height);
// Fake capture device 96x96 depth image is gradient. See also
// Draw16BitGradient in fake_video_capture_device.cc.
var color_step = 255.0 / (canvas.width + canvas.height);
var rowsColumnsToCheck = [[1, 1], var rowsColumnsToCheck = [[1, 1],
[0, canvas.width - 1], [0, width - 1],
[canvas.height - 1, 0], [height - 1, 0],
[canvas.height - 1, canvas.width - 1], [height - 1, width - 1],
[canvas.height - 3, canvas.width - 4]]; [height - 3, width - 3]];
// Same value is expected for all color components. // Same value is expected for all color components.
if (imageData.data[0] != imageData.data[1] || if (data[0] != data[1] || data[0] != data[2]) {
imageData.data[0] != imageData.data[2]) return Promise.reject(test_name + ": values " + data[0] + ", " + data[1] +
return Promise.reject("Values " + imageData.data[0] + ", " + ", " + data[2] + " differ at top left.");
imageData.data[1] + ", " + imageData.data[2] + " differ at top left"); }
// Calculate all reference points based on top left and compare. // Calculate all reference points based on top left and compare.
for (var j = 0; j < rowsColumnsToCheck.length; ++j) { for (var j = 0; j < rowsColumnsToCheck.length; ++j) {
var row = rowsColumnsToCheck[j][0]; var row = rowsColumnsToCheck[j][0];
var column = rowsColumnsToCheck[j][1]; var column = rowsColumnsToCheck[j][1];
var i = (canvas.width * row + column) * 4; var i = (width * row + column) * 4;
if (imageData.data[i] != imageData.data[i + 1] || if (data[i] != data[i + 1] || data[i] != data[i + 2]) {
imageData.data[i] != imageData.data[i + 2]) return Promise.reject(test_name + ": values " + data[i] + ", " +
return Promise.reject("Values " + imageData.data[i] + ", " + data[i + 1] + ", " + data[i + 2] +
imageData.data[i + 1] + ", " + imageData.data[i + 2] + " differ at index " + i);
" differ at index " + i); }
var calculated = (imageData.data[0] + var calculated = (data[0] +
color_step * ((flipped ? -row : row) + column)) % 255; step * ((flip_y ? -row : row) + column)) % wrap_around;
if (Math.abs(calculated - imageData.data[i]) > 2) if (Math.abs(calculated - data[i]) > tolerance) {
return Promise.reject("Reference value " + imageData.data[i] + return Promise.reject(test_name + ": reference value " + data[i] +
" differs from calculated: " + calculated + " at index " + i + " differs from calculated: " + calculated +
". TopLeft value:" + imageData.data[0]); " at index (row, column) " + i + " (" + row + ", " + column +
"). TopLeft value:" + data[0] + ", step:" + step + ", flip_y:" +
flip_y);
}
} }
return true; return true;
} }
\ No newline at end of file
...@@ -1115,6 +1115,13 @@ TELEMETRY_GPU_INTEGRATION_TESTS = { ...@@ -1115,6 +1115,13 @@ TELEMETRY_GPU_INTEGRATION_TESTS = {
}, },
] ]
}, },
'depth_capture': {
'tester_configs': [
{
'allow_on_android': True,
},
]
},
'pixel_test': { 'pixel_test': {
'target_name': 'pixel', 'target_name': 'pixel',
'args': [ 'args': [
......
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from gpu_tests.gpu_test_expectations import GpuTestExpectations
# See the GpuTestExpectations class for documentation.
class DepthCaptureExpectations(GpuTestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('DepthCapture_depthStreamToRGBAFloatTexture',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
return
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from gpu_tests import gpu_integration_test
from gpu_tests import depth_capture_expectations
from gpu_tests import path_util
data_path = os.path.join(
path_util.GetChromiumSrcDir(), 'content', 'test', 'data', 'media')
wait_timeout = 60 # seconds
harness_script = r"""
var domAutomationController = {};
domAutomationController._succeeded = false;
domAutomationController._finished = false;
domAutomationController._error_msg = "";
domAutomationController.setAutomationId = function(id) {}
domAutomationController.send = function(msg) {
if (msg == "OK") {
if (!domAutomationController._finished) {
domAutomationController._succeeded = true;
}
domAutomationController._finished = true;
} else {
domAutomationController._succeeded = false;
domAutomationController._finished = true;
domAutomationController._error_msg = msg;
}
}
domAutomationController.reset = function() {
domAutomationController._succeeded = false;
domAutomationController._finished = false;
}
window.domAutomationController = domAutomationController;
console.log("Harness injected.");
"""
class DepthCaptureIntegrationTest(gpu_integration_test.GpuIntegrationTest):
@classmethod
def Name(cls):
return 'depth_capture'
@classmethod
def CustomizeOptions(cls):
options = cls._finder_options.browser_options
options.AppendExtraBrowserArgs(
'--disable-domain-blocking-for-3d-apis')
options.AppendExtraBrowserArgs(
'--use-fake-ui-for-media-stream')
options.AppendExtraBrowserArgs(
'--use-fake-device-for-media-stream=device-count=2')
# Required for about:gpucrash handling from Telemetry.
options.AppendExtraBrowserArgs('--enable-gpu-benchmarking')
@classmethod
def GenerateGpuTests(cls, options):
tests = (('DepthCapture_depthStreamToRGBAUint8Texture',
'getusermedia-depth-capture.html?query=RGBAUint8'),
('DepthCapture_depthStreamToRGBAFloatTexture',
'getusermedia-depth-capture.html?query=RGBAFloat'))
for t in tests:
yield (t[0], t[1], ('_' + t[0]))
def RunActualGpuTest(self, test_path, *args):
url = self.UrlOfStaticFilePath(test_path)
tab = self.tab
tab.Navigate(url, script_to_evaluate_on_commit=harness_script)
tab.action_runner.WaitForJavaScriptCondition(
'domAutomationController._finished', timeout_in_seconds=60)
if not tab.EvaluateJavaScript('domAutomationController._succeeded'):
self.fail('page indicated test failure:' +
tab.EvaluateJavaScript('domAutomationController._error_msg'))
@classmethod
def _CreateExpectations(cls):
return depth_capture_expectations.DepthCaptureExpectations()
@classmethod
def setUpClass(cls):
super(cls, DepthCaptureIntegrationTest).setUpClass()
cls.CustomizeOptions()
cls.SetBrowserOptions(cls._finder_options)
cls.StartBrowser()
cls.SetStaticServerDirs([data_path])
...@@ -10,6 +10,7 @@ ...@@ -10,6 +10,7 @@
#include "gpu/GLES2/gl2extchromium.h" #include "gpu/GLES2/gl2extchromium.h"
#include "gpu/command_buffer/client/gles2_interface.h" #include "gpu/command_buffer/client/gles2_interface.h"
#include "gpu/command_buffer/common/mailbox_holder.h" #include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/base/data_buffer.h"
#include "media/base/video_frame.h" #include "media/base/video_frame.h"
#include "media/base/yuv_convert.h" #include "media/base/yuv_convert.h"
#include "skia/ext/texture_handle.h" #include "skia/ext/texture_handle.h"
...@@ -522,28 +523,89 @@ scoped_refptr<VideoFrame> DownShiftHighbitVideoFrame( ...@@ -522,28 +523,89 @@ scoped_refptr<VideoFrame> DownShiftHighbitVideoFrame(
return ret; return ret;
} }
// We take the upper 8 bits of 16-bit data and convert it as luminance to ARGB. // Converts 16-bit data to |out| buffer of specified GL |type|.
// We loose the precision here, but it is important not to render Y16 as RG_88. // When the |format| is RGBA, the converted value is fed as luminance.
// To get the full precision use float textures with WebGL1 and e.g. R16UI or void FlipAndConvertY16(const VideoFrame* video_frame,
// R32F textures with WebGL2. uint8_t* out,
void ConvertY16ToARGB(const VideoFrame* video_frame, unsigned format,
void* argb_pixels, unsigned type,
size_t argb_row_bytes) { bool flip_y,
size_t output_row_bytes) {
const uint8_t* row_head = video_frame->visible_data(0); const uint8_t* row_head = video_frame->visible_data(0);
uint8_t* out = static_cast<uint8_t*>(argb_pixels);
const size_t stride = video_frame->stride(0); const size_t stride = video_frame->stride(0);
for (int i = 0; i < video_frame->visible_rect().height(); ++i) { const int height = video_frame->visible_rect().height();
uint32_t* rgba = reinterpret_cast<uint32_t*>(out); for (int i = 0; i < height; ++i) {
const uint8_t* row_end = row_head + video_frame->visible_rect().width() * 2; uint8_t* out_row_head = flip_y ? out + output_row_bytes * (height - i - 1)
for (const uint8_t* row = row_head; row < row_end; ++row) { : out + output_row_bytes * i;
uint32_t gray_value = *++row; const uint16_t* row = reinterpret_cast<const uint16_t*>(row_head);
*rgba++ = SkColorSetRGB(gray_value, gray_value, gray_value); const uint16_t* row_end = row + video_frame->visible_rect().width();
if (type == GL_FLOAT) {
DCHECK_EQ(static_cast<unsigned>(GL_RGBA), format);
float* out_row = reinterpret_cast<float*>(out_row_head);
while (row < row_end) {
float gray_value = *row++ / 65535.f;
*out_row++ = gray_value;
*out_row++ = gray_value;
*out_row++ = gray_value;
*out_row++ = 1.0f;
}
} else if (type == GL_UNSIGNED_BYTE) {
// We take the upper 8 bits of 16-bit data and convert it as luminance to
// ARGB. We loose the precision here, but it is important not to render
// Y16 as RG_88. To get the full precision use float textures with WebGL1
// and e.g. R16UI or R32F textures with WebGL2.
DCHECK_EQ(static_cast<unsigned>(GL_RGBA), format);
uint32_t* rgba = reinterpret_cast<uint32_t*>(out_row_head);
while (row < row_end) {
uint32_t gray_value = *row++ >> 8;
*rgba++ = SkColorSetRGB(gray_value, gray_value, gray_value);
}
} else {
NOTREACHED() << "Unsupported Y16 conversion for format: 0x" << std::hex
<< format << " and type: 0x" << std::hex << type;
} }
out += argb_row_bytes;
row_head += stride; row_head += stride;
} }
} }
// Common functionality of SkCanvasVideoRenderer's TexImage2D and TexSubImage2D.
// Allocates a buffer required for conversion and converts |frame| content to
// desired |format|.
// Returns true if calling glTex(Sub)Image is supported for provided |frame|
// format and parameters.
bool TexImageHelper(VideoFrame* frame,
unsigned format,
unsigned type,
bool flip_y,
scoped_refptr<DataBuffer>* temp_buffer) {
unsigned output_bytes_per_pixel = 0;
switch (frame->format()) {
case PIXEL_FORMAT_Y16:
// Converting single component unsigned short here to FLOAT luminance.
switch (format) {
case GL_RGBA:
if (type == GL_FLOAT) {
output_bytes_per_pixel = 4 * sizeof(GLfloat);
break;
}
// Pass through.
default:
return false;
}
break;
default:
return false;
}
size_t output_row_bytes =
frame->visible_rect().width() * output_bytes_per_pixel;
*temp_buffer =
new DataBuffer(output_row_bytes * frame->visible_rect().height());
FlipAndConvertY16(frame, (*temp_buffer)->writable_data(), format, type,
flip_y, output_row_bytes);
return true;
}
} // anonymous namespace } // anonymous namespace
// static // static
...@@ -648,7 +710,10 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( ...@@ -648,7 +710,10 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
} }
case PIXEL_FORMAT_Y16: case PIXEL_FORMAT_Y16:
ConvertY16ToARGB(video_frame, rgb_pixels, row_bytes); // Since it is grayscale conversion, we disregard SK_PMCOLOR_BYTE_ORDER
// and always use GL_RGBA.
FlipAndConvertY16(video_frame, static_cast<uint8_t*>(rgb_pixels), GL_RGBA,
GL_UNSIGNED_BYTE, false /*flip_y*/, row_bytes);
break; break;
case PIXEL_FORMAT_NV12: case PIXEL_FORMAT_NV12:
...@@ -772,6 +837,51 @@ bool SkCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture( ...@@ -772,6 +837,51 @@ bool SkCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
return true; return true;
} }
bool SkCanvasVideoRenderer::TexImage2D(unsigned target,
gpu::gles2::GLES2Interface* gl,
VideoFrame* frame,
int level,
int internalformat,
unsigned format,
unsigned type,
bool flip_y,
bool premultiply_alpha) {
DCHECK(frame);
DCHECK(!frame->HasTextures());
scoped_refptr<DataBuffer> temp_buffer;
if (!TexImageHelper(frame, format, type, flip_y, &temp_buffer))
return false;
gl->TexImage2D(target, level, internalformat, frame->visible_rect().width(),
frame->visible_rect().height(), 0, format, type,
temp_buffer->data());
return true;
}
bool SkCanvasVideoRenderer::TexSubImage2D(unsigned target,
gpu::gles2::GLES2Interface* gl,
VideoFrame* frame,
int level,
unsigned format,
unsigned type,
int xoffset,
int yoffset,
bool flip_y,
bool premultiply_alpha) {
DCHECK(frame);
DCHECK(!frame->HasTextures());
scoped_refptr<DataBuffer> temp_buffer;
if (!TexImageHelper(frame, format, type, flip_y, &temp_buffer))
return false;
gl->TexSubImage2D(
target, level, xoffset, yoffset, frame->visible_rect().width(),
frame->visible_rect().height(), format, type, temp_buffer->data());
return true;
}
void SkCanvasVideoRenderer::ResetCache() { void SkCanvasVideoRenderer::ResetCache() {
DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(thread_checker_.CalledOnValidThread());
// Clear cached values. // Clear cached values.
......
...@@ -90,6 +90,37 @@ class MEDIA_EXPORT SkCanvasVideoRenderer { ...@@ -90,6 +90,37 @@ class MEDIA_EXPORT SkCanvasVideoRenderer {
bool premultiply_alpha, bool premultiply_alpha,
bool flip_y); bool flip_y);
// Converts unsigned 16-bit value to target |format| for Y16 format and
// calls WebGL texImage2D.
// |level|, |internal_format|, |format|, |type| are WebGL texImage2D
// parameters.
// Returns false if there is no implementation for given parameters.
static bool TexImage2D(unsigned target,
gpu::gles2::GLES2Interface* gl,
VideoFrame* video_frame,
int level,
int internalformat,
unsigned format,
unsigned type,
bool flip_y,
bool premultiply_alpha);
// Converts unsigned 16-bit value to target |format| for Y16 format and
// calls WebGL texSubImage2D.
// |level|, |format|, |type|, |xoffset| and |yoffset| are texSubImage2D
// parameters.
// Returns false if there is no implementation for given parameters.
static bool TexSubImage2D(unsigned target,
gpu::gles2::GLES2Interface* gl,
VideoFrame* video_frame,
int level,
unsigned format,
unsigned type,
int xoffset,
int yoffset,
bool flip_y,
bool premultiply_alpha);
// In general, We hold the most recently painted frame to increase the // In general, We hold the most recently painted frame to increase the
// performance for the case that the same frame needs to be painted // performance for the case that the same frame needs to be painted
// repeatedly. Call this function if you are sure the most recent frame will // repeatedly. Call this function if you are sure the most recent frame will
......
...@@ -50,6 +50,31 @@ SkColor GetColor(SkCanvas* canvas) { ...@@ -50,6 +50,31 @@ SkColor GetColor(SkCanvas* canvas) {
return GetColorAt(canvas, 0, 0); return GetColorAt(canvas, 0, 0);
} }
// Generate frame pixels to provided |external_memory| and wrap it as frame.
scoped_refptr<VideoFrame> CreateTestY16Frame(const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
void* external_memory,
base::TimeDelta timestamp) {
const int offset_x = visible_rect.x();
const int offset_y = visible_rect.y();
const int stride = coded_size.width();
const size_t byte_size = stride * coded_size.height() * 2;
// In the visible rect, fill upper byte with [0-255] and lower with [255-0].
uint16_t* data = static_cast<uint16_t*>(external_memory);
for (int j = 0; j < visible_rect.height(); j++) {
for (int i = 0; i < visible_rect.width(); i++) {
const int value = i + j * visible_rect.width();
data[(stride * (j + offset_y)) + i + offset_x] =
((value & 0xFF) << 8) | (~value & 0xFF);
}
}
return media::VideoFrame::WrapExternalData(
media::PIXEL_FORMAT_Y16, coded_size, visible_rect, visible_rect.size(),
static_cast<uint8_t*>(external_memory), byte_size, timestamp);
}
class SkCanvasVideoRendererTest : public testing::Test { class SkCanvasVideoRendererTest : public testing::Test {
public: public:
enum Color { enum Color {
...@@ -525,22 +550,10 @@ TEST_F(SkCanvasVideoRendererTest, Y16) { ...@@ -525,22 +550,10 @@ TEST_F(SkCanvasVideoRendererTest, Y16) {
std::unique_ptr<unsigned char, base::AlignedFreeDeleter> memory( std::unique_ptr<unsigned char, base::AlignedFreeDeleter> memory(
static_cast<unsigned char*>(base::AlignedAlloc( static_cast<unsigned char*>(base::AlignedAlloc(
byte_size, media::VideoFrame::kFrameAddressAlignment))); byte_size, media::VideoFrame::kFrameAddressAlignment)));
// In the visible rect, fill upper byte with [0-255] and lower with [255-0].
uint16_t* data = reinterpret_cast<uint16_t*>(memory.get());
for (int j = 0; j < bitmap.height(); j++) {
for (int i = 0; i < bitmap.width(); i++) {
const int value = i + j * bitmap.width();
data[(stride * (j + offset_y)) + i + offset_x] =
((value & 0xFF) << 8) | (~value & 0xFF);
}
}
const gfx::Rect rect(offset_x, offset_y, bitmap.width(), bitmap.height()); const gfx::Rect rect(offset_x, offset_y, bitmap.width(), bitmap.height());
scoped_refptr<media::VideoFrame> video_frame = scoped_refptr<media::VideoFrame> video_frame =
media::VideoFrame::WrapExternalData( CreateTestY16Frame(gfx::Size(stride, offset_y + bitmap.height()), rect,
media::PIXEL_FORMAT_Y16, memory.get(), cropped_frame()->timestamp());
gfx::Size(stride, offset_y + bitmap.height()), rect, rect.size(),
memory.get(), byte_size, cropped_frame()->timestamp());
SkCanvas canvas(bitmap); SkCanvas canvas(bitmap);
SkPaint paint; SkPaint paint;
...@@ -564,6 +577,32 @@ class TestGLES2Interface : public gpu::gles2::GLES2InterfaceStub { ...@@ -564,6 +577,32 @@ class TestGLES2Interface : public gpu::gles2::GLES2InterfaceStub {
DCHECK_EQ(1, n); DCHECK_EQ(1, n);
*textures = 1; *textures = 1;
} }
void TexImage2D(GLenum target,
GLint level,
GLint internalformat,
GLsizei width,
GLsizei height,
GLint border,
GLenum format,
GLenum type,
const void* pixels) override {
if (!teximage2d_callback_.is_null()) {
teximage2d_callback_.Run(target, level, internalformat, width, height,
border, format, type, pixels);
}
}
base::Callback<void(GLenum target,
GLint level,
GLint internalformat,
GLsizei width,
GLsizei height,
GLint border,
GLenum format,
GLenum type,
const void* pixels)>
teximage2d_callback_;
}; };
void MailboxHoldersReleased(const gpu::SyncToken& sync_token) {} void MailboxHoldersReleased(const gpu::SyncToken& sync_token) {}
} // namespace } // namespace
...@@ -635,4 +674,59 @@ TEST_F(SkCanvasVideoRendererTest, CorrectFrameSizeToVisibleRect) { ...@@ -635,4 +674,59 @@ TEST_F(SkCanvasVideoRendererTest, CorrectFrameSizeToVisibleRect) {
EXPECT_EQ(fWidth / 2, renderer_.LastImageDimensionsForTesting().height()); EXPECT_EQ(fWidth / 2, renderer_.LastImageDimensionsForTesting().height());
} }
TEST_F(SkCanvasVideoRendererTest, TexImageImplY16) {
// Create test frame.
// |offset_x| and |offset_y| define visible rect's offset to coded rect.
const int offset_x = 3;
const int offset_y = 5;
const int width = 16;
const int height = 16;
const int stride = width + offset_x;
const size_t byte_size = stride * (height + offset_y) * 2;
std::unique_ptr<unsigned char, base::AlignedFreeDeleter> memory(
static_cast<unsigned char*>(base::AlignedAlloc(
byte_size, media::VideoFrame::kFrameAddressAlignment)));
const gfx::Rect rect(offset_x, offset_y, width, height);
scoped_refptr<media::VideoFrame> video_frame =
CreateTestY16Frame(gfx::Size(stride, offset_y + height), rect,
memory.get(), cropped_frame()->timestamp());
// Create GL context.
sk_sp<const GrGLInterface> null_interface(GrGLCreateNullInterface());
sk_sp<GrContext> gr_context(GrContext::Create(
kOpenGL_GrBackend,
reinterpret_cast<GrBackendContext>(null_interface.get())));
TestGLES2Interface gles2;
Context3D context_3d(&gles2, gr_context.get());
// Bind the texImage2D callback to verify the uint16 to float32 conversion.
gles2.teximage2d_callback_ =
base::Bind([](GLenum target, GLint level, GLint internalformat,
GLsizei width, GLsizei height, GLint border, GLenum format,
GLenum type, const void* pixels) {
EXPECT_EQ(static_cast<unsigned>(GL_FLOAT), type);
EXPECT_EQ(static_cast<unsigned>(GL_RGBA), format);
EXPECT_EQ(GL_RGBA, internalformat);
EXPECT_EQ(0, border);
EXPECT_EQ(16, width);
EXPECT_EQ(16, height);
EXPECT_EQ(static_cast<unsigned>(GL_TEXTURE_2D), target);
const float* data = static_cast<const float*>(pixels);
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
const int value = i + (height - j - 1) * width; // flip_y is true.
float expected_value =
(((value & 0xFF) << 8) | (~value & 0xFF)) / 65535.f;
EXPECT_EQ(expected_value, data[(i + j * width) * 4]);
EXPECT_EQ(expected_value, data[(i + j * width) * 4 + 1]);
EXPECT_EQ(expected_value, data[(i + j * width) * 4 + 2]);
EXPECT_EQ(1.0f, data[(i + j * width) * 4 + 3]);
}
}
});
SkCanvasVideoRenderer::TexImage2D(GL_TEXTURE_2D, &gles2, video_frame.get(), 0,
GL_RGBA, GL_RGBA, GL_FLOAT, true /*flip_y*/,
true);
}
} // namespace media } // namespace media
This diff is collapsed.
...@@ -83,6 +83,29 @@ ...@@ -83,6 +83,29 @@
] ]
} }
}, },
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=debug",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"name": "depth_capture_tests",
"override_compile_targets": [
"telemetry_gpu_integration_test_run"
],
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "10de:104a",
"os": "Linux"
}
]
}
},
{ {
"args": [ "args": [
"gpu_process", "gpu_process",
...@@ -381,6 +404,29 @@ ...@@ -381,6 +404,29 @@
] ]
} }
}, },
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"name": "depth_capture_tests",
"override_compile_targets": [
"telemetry_gpu_integration_test_run"
],
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "10de:104a",
"os": "Linux"
}
]
}
},
{ {
"args": [ "args": [
"gpu_process", "gpu_process",
...@@ -663,6 +709,29 @@ ...@@ -663,6 +709,29 @@
] ]
} }
}, },
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=debug",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"name": "depth_capture_tests",
"override_compile_targets": [
"telemetry_gpu_integration_test_run"
],
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "8086:0a2e",
"os": "Mac-10.10"
}
]
}
},
{ {
"args": [ "args": [
"gpu_process", "gpu_process",
...@@ -961,6 +1030,29 @@ ...@@ -961,6 +1030,29 @@
] ]
} }
}, },
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"name": "depth_capture_tests",
"override_compile_targets": [
"telemetry_gpu_integration_test_run"
],
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "8086:0a2e",
"os": "Mac-10.10"
}
]
}
},
{ {
"args": [ "args": [
"gpu_process", "gpu_process",
...@@ -1247,6 +1339,30 @@ ...@@ -1247,6 +1339,30 @@
] ]
} }
}, },
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=debug",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"name": "depth_capture_tests",
"override_compile_targets": [
"telemetry_gpu_integration_test_run"
],
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "1002:6821",
"hidpi": "1",
"os": "Mac"
}
]
}
},
{ {
"args": [ "args": [
"gpu_process", "gpu_process",
...@@ -1558,6 +1674,30 @@ ...@@ -1558,6 +1674,30 @@
] ]
} }
}, },
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"name": "depth_capture_tests",
"override_compile_targets": [
"telemetry_gpu_integration_test_run"
],
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "1002:6821",
"hidpi": "1",
"os": "Mac"
}
]
}
},
{ {
"args": [ "args": [
"gpu_process", "gpu_process",
...@@ -1848,6 +1988,29 @@ ...@@ -1848,6 +1988,29 @@
] ]
} }
}, },
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=debug",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"name": "depth_capture_tests",
"override_compile_targets": [
"telemetry_gpu_integration_test_run"
],
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "10de:104a",
"os": "Windows-2008ServerR2-SP1"
}
]
}
},
{ {
"args": [ "args": [
"gpu_process", "gpu_process",
...@@ -2146,6 +2309,29 @@ ...@@ -2146,6 +2309,29 @@
] ]
} }
}, },
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"name": "depth_capture_tests",
"override_compile_targets": [
"telemetry_gpu_integration_test_run"
],
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "10de:104a",
"os": "Windows-2008ServerR2-SP1"
}
]
}
},
{ {
"args": [ "args": [
"gpu_process", "gpu_process",
......
...@@ -224,6 +224,26 @@ bool HTMLVideoElement::copyVideoTextureToPlatformTexture( ...@@ -224,6 +224,26 @@ bool HTMLVideoElement::copyVideoTextureToPlatformTexture(
gl, texture, internalFormat, type, premultiplyAlpha, flipY); gl, texture, internalFormat, type, premultiplyAlpha, flipY);
} }
bool HTMLVideoElement::texImageImpl(
WebMediaPlayer::TexImageFunctionID functionID,
GLenum target,
gpu::gles2::GLES2Interface* gl,
GLint level,
GLint internalformat,
GLenum format,
GLenum type,
GLint xoffset,
GLint yoffset,
GLint zoffset,
bool flipY,
bool premultiplyAlpha) {
if (!webMediaPlayer())
return false;
return webMediaPlayer()->texImageImpl(
functionID, target, gl, level, internalformat, format, type, xoffset,
yoffset, zoffset, flipY, premultiplyAlpha);
}
bool HTMLVideoElement::hasAvailableVideoFrame() const { bool HTMLVideoElement::hasAvailableVideoFrame() const {
if (!webMediaPlayer()) if (!webMediaPlayer())
return false; return false;
......
...@@ -79,6 +79,20 @@ class CORE_EXPORT HTMLVideoElement final : public HTMLMediaElement, ...@@ -79,6 +79,20 @@ class CORE_EXPORT HTMLVideoElement final : public HTMLMediaElement,
bool premultiplyAlpha, bool premultiplyAlpha,
bool flipY); bool flipY);
// Used by WebGL to do CPU-GPU texture upload if possible.
bool texImageImpl(WebMediaPlayer::TexImageFunctionID,
GLenum target,
gpu::gles2::GLES2Interface*,
GLint level,
GLint internalformat,
GLenum format,
GLenum type,
GLint xoffset,
GLint yoffset,
GLint zoffset,
bool flipY,
bool premultiplyAlpha);
bool shouldDisplayPosterImage() const { return getDisplayMode() == Poster; } bool shouldDisplayPosterImage() const { return getDisplayMode() == Poster; }
bool hasAvailableVideoFrame() const; bool hasAvailableVideoFrame() const;
......
...@@ -505,6 +505,28 @@ class ScopedFramebufferRestorer { ...@@ -505,6 +505,28 @@ class ScopedFramebufferRestorer {
Member<WebGLRenderingContextBase> m_context; Member<WebGLRenderingContextBase> m_context;
}; };
class ScopedUnpackParametersResetRestore {
STACK_ALLOCATED();
public:
explicit ScopedUnpackParametersResetRestore(
WebGLRenderingContextBase* context,
bool enabled = true)
: m_context(context), m_enabled(enabled) {
if (enabled)
m_context->resetUnpackParameters();
}
~ScopedUnpackParametersResetRestore() {
if (m_enabled)
m_context->restoreUnpackParameters();
}
private:
Member<WebGLRenderingContextBase> m_context;
bool m_enabled;
};
static void formatWebGLStatusString(const StringView& glInfo, static void formatWebGLStatusString(const StringView& glInfo,
const StringView& infoString, const StringView& infoString,
StringBuilder& builder) { StringBuilder& builder) {
...@@ -4394,7 +4416,7 @@ void WebGLRenderingContextBase::texImageImpl( ...@@ -4394,7 +4416,7 @@ void WebGLRenderingContextBase::texImageImpl(
} }
} }
resetUnpackParameters(); ScopedUnpackParametersResetRestore temporaryResetUnpack(this);
if (functionID == TexImage2D) { if (functionID == TexImage2D) {
texImage2DBase(target, level, internalformat, texImage2DBase(target, level, internalformat,
adjustedSourceImageRect.width(), adjustedSourceImageRect.width(),
...@@ -4420,7 +4442,6 @@ void WebGLRenderingContextBase::texImageImpl( ...@@ -4420,7 +4442,6 @@ void WebGLRenderingContextBase::texImageImpl(
depth, format, type, needConversion ? data.data() : imagePixelData); depth, format, type, needConversion ? data.data() : imagePixelData);
} }
} }
restoreUnpackParameters();
} }
bool WebGLRenderingContextBase::validateTexFunc( bool WebGLRenderingContextBase::validateTexFunc(
...@@ -4627,16 +4648,14 @@ void WebGLRenderingContextBase::texImageHelperDOMArrayBufferView( ...@@ -4627,16 +4648,14 @@ void WebGLRenderingContextBase::texImageHelperDOMArrayBufferView(
return; return;
} }
if (changeUnpackAlignment) ScopedUnpackParametersResetRestore temporaryResetUnpack(
resetUnpackParameters(); this, changeUnpackAlignment);
if (functionID == TexImage2D) if (functionID == TexImage2D)
texImage2DBase(target, level, internalformat, width, height, border, format, texImage2DBase(target, level, internalformat, width, height, border, format,
type, data); type, data);
else if (functionID == TexSubImage2D) else if (functionID == TexSubImage2D)
contextGL()->TexSubImage2D(target, level, xoffset, yoffset, width, height, contextGL()->TexSubImage2D(target, level, xoffset, yoffset, width, height,
format, type, data); format, type, data);
if (changeUnpackAlignment)
restoreUnpackParameters();
} }
void WebGLRenderingContextBase::texImage2D(GLenum target, void WebGLRenderingContextBase::texImage2D(GLenum target,
...@@ -4724,7 +4743,7 @@ void WebGLRenderingContextBase::texImageHelperImageData( ...@@ -4724,7 +4743,7 @@ void WebGLRenderingContextBase::texImageHelperImageData(
return; return;
} }
} }
resetUnpackParameters(); ScopedUnpackParametersResetRestore temporaryResetUnpack(this);
const uint8_t* bytes = needConversion ? data.data() : pixels->data()->data(); const uint8_t* bytes = needConversion ? data.data() : pixels->data()->data();
if (functionID == TexImage2D) { if (functionID == TexImage2D) {
DCHECK_EQ(unpackImageHeight, 0); DCHECK_EQ(unpackImageHeight, 0);
...@@ -4753,7 +4772,6 @@ void WebGLRenderingContextBase::texImageHelperImageData( ...@@ -4753,7 +4772,6 @@ void WebGLRenderingContextBase::texImageHelperImageData(
depth, format, type, bytes); depth, format, type, bytes);
} }
} }
restoreUnpackParameters();
} }
void WebGLRenderingContextBase::texImage2D(GLenum target, void WebGLRenderingContextBase::texImage2D(GLenum target,
...@@ -5141,19 +5159,18 @@ void WebGLRenderingContextBase::texImageHelperHTMLVideoElement( ...@@ -5141,19 +5159,18 @@ void WebGLRenderingContextBase::texImageHelperHTMLVideoElement(
ImageBuffer::create(std::move(surface))); ImageBuffer::create(std::move(surface)));
if (imageBuffer) { if (imageBuffer) {
// The video element paints an RGBA frame into our surface here. By // The video element paints an RGBA frame into our surface here. By
// using an AcceleratedImageBufferSurface, we enable the // using an AcceleratedImageBufferSurface, we enable the WebMediaPlayer
// WebMediaPlayer implementation to do any necessary color space // implementation to do any necessary color space conversion on the GPU
// conversion on the GPU (though it // (though it may still do a CPU conversion and upload the results).
// may still do a CPU conversion and upload the results).
video->paintCurrentFrame( video->paintCurrentFrame(
imageBuffer->canvas(), imageBuffer->canvas(),
IntRect(0, 0, video->videoWidth(), video->videoHeight()), nullptr); IntRect(0, 0, video->videoWidth(), video->videoHeight()), nullptr);
// This is a straight GPU-GPU copy, any necessary color space // This is a straight GPU-GPU copy, any necessary color space conversion
// conversion was handled in the paintCurrentFrameInContext() call. // was handled in the paintCurrentFrameInContext() call.
// Note that copyToPlatformTexture no longer allocates the // Note that copyToPlatformTexture no longer allocates the destination
// destination texture. // texture.
texImage2DBase(target, level, internalformat, video->videoWidth(), texImage2DBase(target, level, internalformat, video->videoWidth(),
video->videoHeight(), 0, format, type, nullptr); video->videoHeight(), 0, format, type, nullptr);
...@@ -5167,6 +5184,20 @@ void WebGLRenderingContextBase::texImageHelperHTMLVideoElement( ...@@ -5167,6 +5184,20 @@ void WebGLRenderingContextBase::texImageHelperHTMLVideoElement(
} }
} }
if (sourceImageRectIsDefault) {
// Try using optimized CPU-GPU path for some formats: e.g. Y16 and Y8. It
// leaves early for other formats or if frame is stored on GPU.
ScopedUnpackParametersResetRestore(
this, m_unpackFlipY || m_unpackPremultiplyAlpha);
if (video->texImageImpl(
static_cast<WebMediaPlayer::TexImageFunctionID>(functionID), target,
contextGL(), level, convertTexInternalFormat(internalformat, type),
format, type, xoffset, yoffset, zoffset, m_unpackFlipY,
m_unpackPremultiplyAlpha &&
m_unpackColorspaceConversion == GL_NONE))
return;
}
RefPtr<Image> image = videoFrameToImage(video); RefPtr<Image> image = videoFrameToImage(video);
if (!image) if (!image)
return; return;
...@@ -5308,7 +5339,7 @@ void WebGLRenderingContextBase::texImageHelperImageBitmap( ...@@ -5308,7 +5339,7 @@ void WebGLRenderingContextBase::texImageHelperImageBitmap(
return; return;
} }
} }
resetUnpackParameters(); ScopedUnpackParametersResetRestore temporaryResetUnpack(this);
if (functionID == TexImage2D) { if (functionID == TexImage2D) {
texImage2DBase(target, level, internalformat, width, height, 0, format, texImage2DBase(target, level, internalformat, width, height, 0, format,
type, needConversion ? data.data() : pixelDataPtr); type, needConversion ? data.data() : pixelDataPtr);
...@@ -5326,7 +5357,6 @@ void WebGLRenderingContextBase::texImageHelperImageBitmap( ...@@ -5326,7 +5357,6 @@ void WebGLRenderingContextBase::texImageHelperImageBitmap(
height, depth, format, type, height, depth, format, type,
needConversion ? data.data() : pixelDataPtr); needConversion ? data.data() : pixelDataPtr);
} }
restoreUnpackParameters();
} }
void WebGLRenderingContextBase::texImage2D(GLenum target, void WebGLRenderingContextBase::texImage2D(GLenum target,
......
...@@ -605,6 +605,7 @@ class MODULES_EXPORT WebGLRenderingContextBase : public CanvasRenderingContext, ...@@ -605,6 +605,7 @@ class MODULES_EXPORT WebGLRenderingContextBase : public CanvasRenderingContext,
friend class ScopedFramebufferRestorer; friend class ScopedFramebufferRestorer;
// To allow V8WebGL[2]RenderingContext to call visitChildDOMWrappers. // To allow V8WebGL[2]RenderingContext to call visitChildDOMWrappers.
friend class V8WebGLRenderingContext; friend class V8WebGLRenderingContext;
friend class ScopedUnpackParametersResetRestore;
WebGLRenderingContextBase(HTMLCanvasElement*, WebGLRenderingContextBase(HTMLCanvasElement*,
std::unique_ptr<WebGraphicsContext3DProvider>, std::unique_ptr<WebGraphicsContext3DProvider>,
...@@ -971,6 +972,8 @@ class MODULES_EXPORT WebGLRenderingContextBase : public CanvasRenderingContext, ...@@ -971,6 +972,8 @@ class MODULES_EXPORT WebGLRenderingContextBase : public CanvasRenderingContext,
CopyTexImage, CopyTexImage,
CompressedTexImage CompressedTexImage
}; };
// This must stay in sync with WebMediaPlayer::TexImageFunctionID.
enum TexImageFunctionID { enum TexImageFunctionID {
TexImage2D, TexImage2D,
TexSubImage2D, TexSubImage2D,
......
...@@ -105,6 +105,14 @@ class WebMediaPlayer { ...@@ -105,6 +105,14 @@ class WebMediaPlayer {
typedef WebString TrackId; typedef WebString TrackId;
enum TrackType { TextTrack, AudioTrack, VideoTrack }; enum TrackType { TextTrack, AudioTrack, VideoTrack };
// This must stay in sync with WebGLRenderingContextBase::TexImageFunctionID.
enum TexImageFunctionID {
TexImage2D,
TexSubImage2D,
TexImage3D,
TexSubImage3D
};
virtual ~WebMediaPlayer() {} virtual ~WebMediaPlayer() {}
virtual void load(LoadType, const WebMediaPlayerSource&, CORSMode) = 0; virtual void load(LoadType, const WebMediaPlayerSource&, CORSMode) = 0;
...@@ -208,6 +216,30 @@ class WebMediaPlayer { ...@@ -208,6 +216,30 @@ class WebMediaPlayer {
return false; return false;
} }
// Do tex(Sub)Image2D/3D for current frame. If it is not implemented for given
// parameters or fails, it returns false.
// The method is wrapping calls to glTexImage2D, glTexSubImage2D,
// glTexImage3D and glTexSubImage3D and parameters have the same name and
// meaning.
// Texture needs to be created and bound to active texture unit before this
// call. In addition, TexSubImage2D and TexSubImage3D require that previous
// TexImage2D and TexSubImage3D calls, respectivelly, defined the texture
// content.
virtual bool texImageImpl(TexImageFunctionID functionID,
unsigned target,
gpu::gles2::GLES2Interface* gl,
int level,
int internalformat,
unsigned format,
unsigned type,
int xoffset,
int yoffset,
int zoffset,
bool flipY,
bool premultiplyAlpha) {
return false;
}
virtual WebAudioSourceProvider* getAudioSourceProvider() { return nullptr; } virtual WebAudioSourceProvider* getAudioSourceProvider() { return nullptr; }
virtual void setContentDecryptionModule( virtual void setContentDecryptionModule(
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment