Commit dc289335 authored by zmo@chromium.org's avatar zmo@chromium.org

Fix a bug in gpu_unittests mock expectations for VAO.

I notcied this bug while trying to fix the WebGL VAO related regressions
on Linux AMD.

Before the generated ID is not set, so by default it's 0, which
is wrong, because 0 is reserved to unbind instead of bind.

Also, turn on VAO support for majority of tests as the emulated code path
is more isolated and its tests are also more isolated.  I feel it's better
to test with VAO on by default.

BUG=373974
TEST=gpu_unittests
R=vmiura@chromium.org,kbr@chromium.org,piman@chromium.org,jbauman@chromium.org

Review URL: https://codereview.chromium.org/303543005

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@273123 0039d316-1c4b-4281-b951-d872f2087c98
parent a02f8d06
......@@ -236,7 +236,6 @@ class GLES2DecoderVertexArraysOESTest : public GLES2DecoderWithShaderTest {
virtual void SetUp() {
InitState init;
init.extensions = "GL_OES_vertex_array_object";
init.gl_version = "opengl es 2.0";
init.bind_generates_resource = true;
InitDecoder(init);
......@@ -361,6 +360,7 @@ class GLES2DecoderEmulatedVertexArraysOESTest
InitState init;
init.gl_version = "3.0";
init.bind_generates_resource = true;
init.use_native_vao = false;
InitDecoder(init);
SetupDefaultProgram();
......
......@@ -36,6 +36,44 @@ using ::testing::SetArgumentPointee;
using ::testing::StrEq;
using ::testing::StrictMock;
namespace {
void NormalizeInitState(gpu::gles2::GLES2DecoderTestBase::InitState* init) {
CHECK(init);
const char* kVAOExtensions[] = {
"GL_OES_vertex_array_object",
"GL_ARB_vertex_array_object",
"GL_APPLE_vertex_array_object"
};
bool contains_vao_extension = false;
for (size_t ii = 0; ii < arraysize(kVAOExtensions); ++ii) {
if (init->extensions.find(kVAOExtensions[ii]) != std::string::npos) {
contains_vao_extension = true;
break;
}
}
if (init->use_native_vao) {
if (contains_vao_extension)
return;
if (!init->extensions.empty())
init->extensions += " ";
if (StartsWithASCII(init->gl_version, "opengl es", false)) {
init->extensions += kVAOExtensions[0];
} else {
#if !defined(OS_MACOSX)
init->extensions += kVAOExtensions[1];
#else
init->extensions += kVAOExtensions[2];
#endif // OS_MACOSX
}
} else {
// Make sure we don't set up an invalid InitState.
CHECK(!contains_vao_extension);
}
}
} // namespace Anonymous
namespace gpu {
namespace gles2 {
......@@ -94,7 +132,8 @@ GLES2DecoderTestBase::InitState::InitState()
request_depth(false),
request_stencil(false),
bind_generates_resource(false),
lose_context_when_out_of_memory(false) {
lose_context_when_out_of_memory(false),
use_native_vao(true) {
}
void GLES2DecoderTestBase::InitDecoder(const InitState& init) {
......@@ -104,6 +143,8 @@ void GLES2DecoderTestBase::InitDecoder(const InitState& init) {
void GLES2DecoderTestBase::InitDecoderWithCommandLine(
const InitState& init,
const base::CommandLine* command_line) {
InitState normalized_init = init;
NormalizeInitState(&normalized_init);
Framebuffer::ClearFramebufferCompleteComboMap();
gfx::SetGLGetProcAddressProc(gfx::MockGLInterface::GetGLProcAddress);
......@@ -114,7 +155,7 @@ void GLES2DecoderTestBase::InitDecoderWithCommandLine(
// Only create stream texture manager if extension is requested.
std::vector<std::string> list;
base::SplitString(init.extensions, ' ', &list);
base::SplitString(normalized_init.extensions, ' ', &list);
scoped_refptr<FeatureInfo> feature_info;
if (command_line)
feature_info = new FeatureInfo(*command_line);
......@@ -124,8 +165,8 @@ void GLES2DecoderTestBase::InitDecoderWithCommandLine(
memory_tracker_,
new ShaderTranslatorCache,
feature_info.get(),
init.bind_generates_resource));
bool use_default_textures = init.bind_generates_resource;
normalized_init.bind_generates_resource));
bool use_default_textures = normalized_init.bind_generates_resource;
InSequence sequence;
......@@ -136,17 +177,18 @@ void GLES2DecoderTestBase::InitDecoderWithCommandLine(
// in turn initialize FeatureInfo, which needs a context to determine
// extension support.
context_ = new gfx::GLContextStubWithExtensions;
context_->AddExtensionsString(init.extensions.c_str());
context_->SetGLVersionString(init.gl_version.c_str());
context_->AddExtensionsString(normalized_init.extensions.c_str());
context_->SetGLVersionString(normalized_init.gl_version.c_str());
context_->MakeCurrent(surface_.get());
gfx::GLSurface::InitializeDynamicMockBindingsForTests(context_);
TestHelper::SetupContextGroupInitExpectations(gl_.get(),
DisallowedFeatures(),
init.extensions.c_str(),
init.gl_version.c_str(),
init.bind_generates_resource);
TestHelper::SetupContextGroupInitExpectations(
gl_.get(),
DisallowedFeatures(),
normalized_init.extensions.c_str(),
normalized_init.gl_version.c_str(),
normalized_init.bind_generates_resource);
// We initialize the ContextGroup with a MockGLES2Decoder so that
// we can use the ContextGroup to figure out how the real GLES2Decoder
......@@ -156,7 +198,9 @@ void GLES2DecoderTestBase::InitDecoderWithCommandLine(
group_->Initialize(mock_decoder_.get(), DisallowedFeatures()));
if (group_->feature_info()->feature_flags().native_vertex_array_object) {
EXPECT_CALL(*gl_, GenVertexArraysOES(1, _)).Times(1).RetiresOnSaturation();
EXPECT_CALL(*gl_, GenVertexArraysOES(1, _))
.WillOnce(SetArgumentPointee<1>(kServiceVertexArrayId))
.RetiresOnSaturation();
EXPECT_CALL(*gl_, BindVertexArrayOES(_)).Times(1).RetiresOnSaturation();
}
......@@ -239,13 +283,13 @@ void GLES2DecoderTestBase::InitDecoderWithCommandLine(
.Times(1)
.RetiresOnSaturation();
EXPECT_CALL(*gl_, GetIntegerv(GL_ALPHA_BITS, _))
.WillOnce(SetArgumentPointee<1>(init.has_alpha ? 8 : 0))
.WillOnce(SetArgumentPointee<1>(normalized_init.has_alpha ? 8 : 0))
.RetiresOnSaturation();
EXPECT_CALL(*gl_, GetIntegerv(GL_DEPTH_BITS, _))
.WillOnce(SetArgumentPointee<1>(init.has_depth ? 24 : 0))
.WillOnce(SetArgumentPointee<1>(normalized_init.has_depth ? 24 : 0))
.RetiresOnSaturation();
EXPECT_CALL(*gl_, GetIntegerv(GL_STENCIL_BITS, _))
.WillOnce(SetArgumentPointee<1>(init.has_stencil ? 8 : 0))
.WillOnce(SetArgumentPointee<1>(normalized_init.has_stencil ? 8 : 0))
.RetiresOnSaturation();
EXPECT_CALL(*gl_, Enable(GL_VERTEX_PROGRAM_POINT_SIZE))
......@@ -305,14 +349,15 @@ void GLES2DecoderTestBase::InitDecoderWithCommandLine(
static const int32 kLoseContextWhenOutOfMemory = 0x10003;
int32 attributes[] = {EGL_ALPHA_SIZE,
init.request_alpha ? 8 : 0,
EGL_DEPTH_SIZE,
init.request_depth ? 24 : 0,
EGL_STENCIL_SIZE,
init.request_stencil ? 8 : 0,
kLoseContextWhenOutOfMemory,
init.lose_context_when_out_of_memory ? 1 : 0, };
int32 attributes[] = {
EGL_ALPHA_SIZE,
normalized_init.request_alpha ? 8 : 0,
EGL_DEPTH_SIZE,
normalized_init.request_depth ? 24 : 0,
EGL_STENCIL_SIZE,
normalized_init.request_stencil ? 8 : 0,
kLoseContextWhenOutOfMemory,
normalized_init.lose_context_when_out_of_memory ? 1 : 0, };
std::vector<int32> attribs(attributes, attributes + arraysize(attributes));
decoder_.reset(GLES2Decoder::Create(group_.get()));
......@@ -364,6 +409,11 @@ void GLES2DecoderTestBase::ResetDecoder() {
EXPECT_CALL(*gl_, DeleteBuffersARB(1, _))
.Times(2)
.RetiresOnSaturation();
if (group_->feature_info()->feature_flags().native_vertex_array_object) {
EXPECT_CALL(*gl_, DeleteVertexArraysOES(1, Pointee(kServiceVertexArrayId)))
.Times(1)
.RetiresOnSaturation();
}
decoder_->EndDecoding();
decoder_->Destroy(true);
......
......@@ -164,6 +164,7 @@ class GLES2DecoderTestBase : public ::testing::TestWithParam<bool> {
bool request_stencil;
bool bind_generates_resource;
bool lose_context_when_out_of_memory;
bool use_native_vao; // default is true.
};
void InitDecoder(const InitState& init);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment