Commit 721937da authored by Kenneth Russell's avatar Kenneth Russell Committed by Commit Bot

Address comments from zmo on an earlier CL.

Address zmo's comments on the earlier change:
https://chromium-review.googlesource.com/1885636
which were published after the CL landed.

Bug: 681341
Change-Id: Ib89967cc4f6b7f11371e7694689445d8afc34883
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1977739
Commit-Queue: Kenneth Russell <kbr@chromium.org>
Auto-Submit: Kenneth Russell <kbr@chromium.org>
Reviewed-by: default avatarZhenyao Mo <zmo@chromium.org>
Cr-Commit-Position: refs/heads/master@{#726997}
parent 0ecee86d
...@@ -879,7 +879,15 @@ void GpuDataManagerImplPrivate::HandleGpuSwitch() { ...@@ -879,7 +879,15 @@ void GpuDataManagerImplPrivate::HandleGpuSwitch() {
bool GpuDataManagerImplPrivate::UpdateActiveGpu(uint32_t vendor_id, bool GpuDataManagerImplPrivate::UpdateActiveGpu(uint32_t vendor_id,
uint32_t device_id) { uint32_t device_id) {
// Heuristics for dual-GPU detection. // Heuristics for dual-GPU detection.
#if defined(OS_WIN)
// On Windows, "Microsoft Basic Render Driver" now shows up as a
// secondary GPU.
bool is_dual_gpu = gpu_info_.secondary_gpus.size() == 2;
#else
bool is_dual_gpu = gpu_info_.secondary_gpus.size() == 1; bool is_dual_gpu = gpu_info_.secondary_gpus.size() == 1;
#endif
// TODO(kbr/zmo): on Windows, at least, it's now possible to have a
// system with both low-power and high-performance GPUs from AMD.
const uint32_t kIntelID = 0x8086; const uint32_t kIntelID = 0x8086;
bool saw_intel_gpu = false; bool saw_intel_gpu = false;
bool saw_non_intel_gpu = false; bool saw_non_intel_gpu = false;
......
...@@ -730,7 +730,7 @@ void GpuMessageHandler::OnGpuInfoUpdate() { ...@@ -730,7 +730,7 @@ void GpuMessageHandler::OnGpuInfoUpdate() {
*(gpu_info_val.get())); *(gpu_info_val.get()));
} }
void GpuMessageHandler::OnGpuSwitched(gl::GpuPreference active_gpu_heuristic_) { void GpuMessageHandler::OnGpuSwitched(gl::GpuPreference active_gpu_heuristic) {
// Currently, about:gpu page does not update GPU info after the GPU switch. // Currently, about:gpu page does not update GPU info after the GPU switch.
// If there is something to be updated, the code should be added here. // If there is something to be updated, the code should be added here.
} }
......
...@@ -64,7 +64,6 @@ ...@@ -64,7 +64,6 @@
#include "third_party/skia/include/core/SkSurface.h" #include "third_party/skia/include/core/SkSurface.h"
#include "third_party/skia/include/gpu/GrContext.h" #include "third_party/skia/include/gpu/GrContext.h"
#include "third_party/skia/include/gpu/gl/GrGLTypes.h" #include "third_party/skia/include/gpu/gl/GrGLTypes.h"
#include "ui/gl/gpu_preference.h"
#include "v8/include/v8.h" #include "v8/include/v8.h"
namespace blink { namespace blink {
......
...@@ -611,7 +611,7 @@ class PLATFORM_EXPORT DrawingBuffer : public cc::TextureLayerClient, ...@@ -611,7 +611,7 @@ class PLATFORM_EXPORT DrawingBuffer : public cc::TextureLayerClient,
bool opengl_flip_y_extension_; bool opengl_flip_y_extension_;
gl::GpuPreference initial_gpu_; const gl::GpuPreference initial_gpu_;
gl::GpuPreference current_active_gpu_; gl::GpuPreference current_active_gpu_;
DISALLOW_COPY_AND_ASSIGN(DrawingBuffer); DISALLOW_COPY_AND_ASSIGN(DrawingBuffer);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment