Byte-swap the video frame pixels before passing them to Java.

When a complete video frame is decoded, this CL converts the pixels from
BGRA to a format suitable for loading into a Java Bitmap directly. This
removes the need to create a temporary int[] array in Java.

Review URL: https://codereview.chromium.org/23677011

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@226405 0039d316-1c4b-4281-b951-d872f2087c98
parent 77787cf1
...@@ -142,6 +142,9 @@ public class JniInterface { ...@@ -142,6 +142,9 @@ public class JniInterface {
/** Screen height of the video feed. */ /** Screen height of the video feed. */
private static int sHeight = 0; private static int sHeight = 0;
/** Bitmap holding the latest screen image. */
private static Bitmap sBitmap = null;
/** Buffer holding the video feed. */ /** Buffer holding the video feed. */
private static ByteBuffer sBuffer = null; private static ByteBuffer sBuffer = null;
...@@ -310,12 +313,19 @@ public class JniInterface { ...@@ -310,12 +313,19 @@ public class JniInterface {
return null; return null;
} }
int[] frame = new int[sWidth * sHeight]; // This is synchronized only to silence a findbugs warning about incorrect initialization of
// |sBitmap|.
sBuffer.order(ByteOrder.LITTLE_ENDIAN); // TODO(lambroslambrou): Annotate this class as @NotThreadSafe to prevent similar warnings
sBuffer.asIntBuffer().get(frame, 0, frame.length); // in future.
synchronized (JniInterface.class) {
if (sBitmap == null || sBitmap.getWidth() != sWidth || sBitmap.getHeight() != sHeight) {
sBitmap = Bitmap.createBitmap(sWidth, sHeight, Bitmap.Config.ARGB_8888);
}
}
return Bitmap.createBitmap(frame, 0, sWidth, sWidth, sHeight, Bitmap.Config.ARGB_8888); sBuffer.rewind();
sBitmap.copyPixelsFromBuffer(sBuffer);
return sBitmap;
} }
/** /**
......
...@@ -19,6 +19,13 @@ namespace remoting { ...@@ -19,6 +19,13 @@ namespace remoting {
class FrameConsumer { class FrameConsumer {
public: public:
// List of supported pixel formats needed by various platforms.
enum PixelFormat {
FORMAT_BGRA, // Used by the Pepper plugin.
FORMAT_RGBA, // Used for Android's Bitmap class.
};
// Accepts a buffer to be painted to the screen. The buffer's dimensions and // Accepts a buffer to be painted to the screen. The buffer's dimensions and
// relative position within the frame are specified by |clip_area|. Only // relative position within the frame are specified by |clip_area|. Only
// pixels falling within |region| and the current clipping area are painted. // pixels falling within |region| and the current clipping area are painted.
...@@ -41,6 +48,9 @@ class FrameConsumer { ...@@ -41,6 +48,9 @@ class FrameConsumer {
virtual void SetSourceSize(const webrtc::DesktopSize& source_size, virtual void SetSourceSize(const webrtc::DesktopSize& source_size,
const webrtc::DesktopVector& dpi) = 0; const webrtc::DesktopVector& dpi) = 0;
// Returns the preferred pixel encoding for the platform.
virtual PixelFormat GetPixelFormat() = 0;
protected: protected:
FrameConsumer() {} FrameConsumer() {}
virtual ~FrameConsumer() {} virtual ~FrameConsumer() {}
......
...@@ -14,8 +14,11 @@ ...@@ -14,8 +14,11 @@
namespace remoting { namespace remoting {
FrameConsumerProxy::FrameConsumerProxy( FrameConsumerProxy::FrameConsumerProxy(
scoped_refptr<base::SingleThreadTaskRunner> task_runner) scoped_refptr<base::SingleThreadTaskRunner> task_runner,
: task_runner_(task_runner) { const base::WeakPtr<FrameConsumer>& frame_consumer)
: frame_consumer_(frame_consumer),
task_runner_(task_runner) {
pixel_format_ = frame_consumer_->GetPixelFormat();
} }
void FrameConsumerProxy::ApplyBuffer(const webrtc::DesktopSize& view_size, void FrameConsumerProxy::ApplyBuffer(const webrtc::DesktopSize& view_size,
...@@ -57,11 +60,8 @@ void FrameConsumerProxy::SetSourceSize( ...@@ -57,11 +60,8 @@ void FrameConsumerProxy::SetSourceSize(
frame_consumer_->SetSourceSize(source_size, source_dpi); frame_consumer_->SetSourceSize(source_size, source_dpi);
} }
void FrameConsumerProxy::Attach( FrameConsumer::PixelFormat FrameConsumerProxy::GetPixelFormat() {
const base::WeakPtr<FrameConsumer>& frame_consumer) { return pixel_format_;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(frame_consumer_.get() == NULL);
frame_consumer_ = frame_consumer;
} }
FrameConsumerProxy::~FrameConsumerProxy() { FrameConsumerProxy::~FrameConsumerProxy() {
......
...@@ -26,7 +26,8 @@ class FrameConsumerProxy ...@@ -26,7 +26,8 @@ class FrameConsumerProxy
public: public:
// Constructs a proxy for |frame_consumer| which will trampoline invocations // Constructs a proxy for |frame_consumer| which will trampoline invocations
// to |frame_consumer_message_loop|. // to |frame_consumer_message_loop|.
FrameConsumerProxy(scoped_refptr<base::SingleThreadTaskRunner> task_runner); FrameConsumerProxy(scoped_refptr<base::SingleThreadTaskRunner> task_runner,
const base::WeakPtr<FrameConsumer>& frame_consumer);
// FrameConsumer implementation. // FrameConsumer implementation.
virtual void ApplyBuffer(const webrtc::DesktopSize& view_size, virtual void ApplyBuffer(const webrtc::DesktopSize& view_size,
...@@ -36,10 +37,7 @@ class FrameConsumerProxy ...@@ -36,10 +37,7 @@ class FrameConsumerProxy
virtual void ReturnBuffer(webrtc::DesktopFrame* buffer) OVERRIDE; virtual void ReturnBuffer(webrtc::DesktopFrame* buffer) OVERRIDE;
virtual void SetSourceSize(const webrtc::DesktopSize& source_size, virtual void SetSourceSize(const webrtc::DesktopSize& source_size,
const webrtc::DesktopVector& dpi) OVERRIDE; const webrtc::DesktopVector& dpi) OVERRIDE;
virtual PixelFormat GetPixelFormat() OVERRIDE;
// Attaches to |frame_consumer_|.
// This must only be called from |frame_consumer_message_loop_|.
void Attach(const base::WeakPtr<FrameConsumer>& frame_consumer);
private: private:
friend class base::RefCountedThreadSafe<FrameConsumerProxy>; friend class base::RefCountedThreadSafe<FrameConsumerProxy>;
...@@ -48,6 +46,8 @@ class FrameConsumerProxy ...@@ -48,6 +46,8 @@ class FrameConsumerProxy
base::WeakPtr<FrameConsumer> frame_consumer_; base::WeakPtr<FrameConsumer> frame_consumer_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_; scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
PixelFormat pixel_format_;
DISALLOW_COPY_AND_ASSIGN(FrameConsumerProxy); DISALLOW_COPY_AND_ASSIGN(FrameConsumerProxy);
}; };
......
...@@ -228,11 +228,11 @@ void ChromotingJniInstance::SetCursorShape( ...@@ -228,11 +228,11 @@ void ChromotingJniInstance::SetCursorShape(
void ChromotingJniInstance::ConnectToHostOnDisplayThread() { void ChromotingJniInstance::ConnectToHostOnDisplayThread() {
DCHECK(jni_runtime_->display_task_runner()->BelongsToCurrentThread()); DCHECK(jni_runtime_->display_task_runner()->BelongsToCurrentThread());
frame_consumer_ = new FrameConsumerProxy(jni_runtime_->display_task_runner());
view_.reset(new JniFrameConsumer(jni_runtime_)); view_.reset(new JniFrameConsumer(jni_runtime_));
view_weak_factory_.reset(new base::WeakPtrFactory<JniFrameConsumer>( view_weak_factory_.reset(new base::WeakPtrFactory<JniFrameConsumer>(
view_.get())); view_.get()));
frame_consumer_->Attach(view_weak_factory_->GetWeakPtr()); frame_consumer_ = new FrameConsumerProxy(jni_runtime_->display_task_runner(),
view_weak_factory_->GetWeakPtr());
jni_runtime_->network_task_runner()->PostTask( jni_runtime_->network_task_runner()->PostTask(
FROM_HERE, FROM_HERE,
......
...@@ -109,6 +109,10 @@ void JniFrameConsumer::SetSourceSize(const webrtc::DesktopSize& source_size, ...@@ -109,6 +109,10 @@ void JniFrameConsumer::SetSourceSize(const webrtc::DesktopSize& source_size,
&JniFrameConsumer::AllocateBuffer, base::Unretained(this))); &JniFrameConsumer::AllocateBuffer, base::Unretained(this)));
} }
FrameConsumer::PixelFormat JniFrameConsumer::GetPixelFormat() {
return FORMAT_RGBA;
}
void JniFrameConsumer::AllocateBuffer() { void JniFrameConsumer::AllocateBuffer() {
// Only do anything if we're not being destructed. // Only do anything if we're not being destructed.
if (!in_dtor_) { if (!in_dtor_) {
......
...@@ -37,6 +37,7 @@ class JniFrameConsumer : public FrameConsumer { ...@@ -37,6 +37,7 @@ class JniFrameConsumer : public FrameConsumer {
virtual void ReturnBuffer(webrtc::DesktopFrame* buffer) OVERRIDE; virtual void ReturnBuffer(webrtc::DesktopFrame* buffer) OVERRIDE;
virtual void SetSourceSize(const webrtc::DesktopSize& source_size, virtual void SetSourceSize(const webrtc::DesktopSize& source_size,
const webrtc::DesktopVector& dpi) OVERRIDE; const webrtc::DesktopVector& dpi) OVERRIDE;
virtual PixelFormat GetPixelFormat() OVERRIDE;
private: private:
// Variables are to be used from the display thread. // Variables are to be used from the display thread.
......
...@@ -603,10 +603,16 @@ void ChromotingInstance::ConnectWithConfig(const ClientConfig& config, ...@@ -603,10 +603,16 @@ void ChromotingInstance::ConnectWithConfig(const ClientConfig& config,
jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop(); jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
view_.reset(new PepperView(this, &context_));
view_weak_factory_.reset(
new base::WeakPtrFactory<FrameConsumer>(view_.get()));
// RectangleUpdateDecoder runs on a separate thread so for now we wrap // RectangleUpdateDecoder runs on a separate thread so for now we wrap
// PepperView with a ref-counted proxy object. // PepperView with a ref-counted proxy object.
scoped_refptr<FrameConsumerProxy> consumer_proxy = scoped_refptr<FrameConsumerProxy> consumer_proxy =
new FrameConsumerProxy(plugin_task_runner_); new FrameConsumerProxy(plugin_task_runner_,
view_weak_factory_->GetWeakPtr());
host_connection_.reset(new protocol::ConnectionToHost(true)); host_connection_.reset(new protocol::ConnectionToHost(true));
scoped_ptr<AudioPlayer> audio_player(new PepperAudioPlayer(this)); scoped_ptr<AudioPlayer> audio_player(new PepperAudioPlayer(this));
...@@ -614,10 +620,8 @@ void ChromotingInstance::ConnectWithConfig(const ClientConfig& config, ...@@ -614,10 +620,8 @@ void ChromotingInstance::ConnectWithConfig(const ClientConfig& config,
host_connection_.get(), this, host_connection_.get(), this,
consumer_proxy, audio_player.Pass())); consumer_proxy, audio_player.Pass()));
view_.reset(new PepperView(this, &context_, client_->GetFrameProducer())); view_->Initialize(client_->GetFrameProducer());
view_weak_factory_.reset(
new base::WeakPtrFactory<FrameConsumer>(view_.get()));
consumer_proxy->Attach(view_weak_factory_->GetWeakPtr());
if (!plugin_view_.is_null()) { if (!plugin_view_.is_null()) {
view_->SetView(plugin_view_); view_->SetView(plugin_view_);
} }
......
...@@ -61,11 +61,10 @@ const size_t kMaxPendingBuffersCount = 2; ...@@ -61,11 +61,10 @@ const size_t kMaxPendingBuffersCount = 2;
} // namespace } // namespace
PepperView::PepperView(ChromotingInstance* instance, PepperView::PepperView(ChromotingInstance* instance,
ClientContext* context, ClientContext* context)
FrameProducer* producer)
: instance_(instance), : instance_(instance),
context_(context), context_(context),
producer_(producer), producer_(NULL),
merge_buffer_(NULL), merge_buffer_(NULL),
dips_to_device_scale_(1.0f), dips_to_device_scale_(1.0f),
dips_to_view_scale_(1.0f), dips_to_view_scale_(1.0f),
...@@ -73,7 +72,6 @@ PepperView::PepperView(ChromotingInstance* instance, ...@@ -73,7 +72,6 @@ PepperView::PepperView(ChromotingInstance* instance,
is_initialized_(false), is_initialized_(false),
frame_received_(false), frame_received_(false),
callback_factory_(this) { callback_factory_(this) {
InitiateDrawing();
} }
PepperView::~PepperView() { PepperView::~PepperView() {
...@@ -91,6 +89,15 @@ PepperView::~PepperView() { ...@@ -91,6 +89,15 @@ PepperView::~PepperView() {
} }
} }
void PepperView::Initialize(FrameProducer* producer) {
producer_ = producer;
webrtc::DesktopFrame* buffer = AllocateBuffer();
while (buffer) {
producer_->DrawBuffer(buffer);
buffer = AllocateBuffer();
}
}
void PepperView::SetView(const pp::View& view) { void PepperView::SetView(const pp::View& view) {
bool view_changed = false; bool view_changed = false;
...@@ -150,7 +157,7 @@ void PepperView::SetView(const pp::View& view) { ...@@ -150,7 +157,7 @@ void PepperView::SetView(const pp::View& view) {
if (view_changed) { if (view_changed) {
producer_->SetOutputSizeAndClip(view_size_, clip_area_); producer_->SetOutputSizeAndClip(view_size_, clip_area_);
InitiateDrawing(); Initialize(producer_);
} }
} }
...@@ -171,7 +178,7 @@ void PepperView::ApplyBuffer(const webrtc::DesktopSize& view_size, ...@@ -171,7 +178,7 @@ void PepperView::ApplyBuffer(const webrtc::DesktopSize& view_size,
// the properly scaled data. // the properly scaled data.
if (!view_size_.equals(view_size)) { if (!view_size_.equals(view_size)) {
FreeBuffer(buffer); FreeBuffer(buffer);
InitiateDrawing(); Initialize(producer_);
} else { } else {
FlushBuffer(clip_area, buffer, region); FlushBuffer(clip_area, buffer, region);
} }
...@@ -187,7 +194,7 @@ void PepperView::ReturnBuffer(webrtc::DesktopFrame* buffer) { ...@@ -187,7 +194,7 @@ void PepperView::ReturnBuffer(webrtc::DesktopFrame* buffer) {
producer_->DrawBuffer(buffer); producer_->DrawBuffer(buffer);
} else { } else {
FreeBuffer(buffer); FreeBuffer(buffer);
InitiateDrawing(); Initialize(producer_);
} }
} }
...@@ -205,6 +212,10 @@ void PepperView::SetSourceSize(const webrtc::DesktopSize& source_size, ...@@ -205,6 +212,10 @@ void PepperView::SetSourceSize(const webrtc::DesktopSize& source_size,
instance_->SetDesktopSize(source_size, source_dpi); instance_->SetDesktopSize(source_size, source_dpi);
} }
FrameConsumer::PixelFormat PepperView::GetPixelFormat() {
return FORMAT_BGRA;
}
webrtc::DesktopFrame* PepperView::AllocateBuffer() { webrtc::DesktopFrame* PepperView::AllocateBuffer() {
if (buffers_.size() >= kMaxPendingBuffersCount) if (buffers_.size() >= kMaxPendingBuffersCount)
return NULL; return NULL;
...@@ -235,14 +246,6 @@ void PepperView::FreeBuffer(webrtc::DesktopFrame* buffer) { ...@@ -235,14 +246,6 @@ void PepperView::FreeBuffer(webrtc::DesktopFrame* buffer) {
delete buffer; delete buffer;
} }
void PepperView::InitiateDrawing() {
webrtc::DesktopFrame* buffer = AllocateBuffer();
while (buffer) {
producer_->DrawBuffer(buffer);
buffer = AllocateBuffer();
}
}
void PepperView::FlushBuffer(const webrtc::DesktopRect& clip_area, void PepperView::FlushBuffer(const webrtc::DesktopRect& clip_area,
webrtc::DesktopFrame* buffer, webrtc::DesktopFrame* buffer,
const webrtc::DesktopRegion& region) { const webrtc::DesktopRegion& region) {
......
...@@ -35,13 +35,15 @@ class FrameProducer; ...@@ -35,13 +35,15 @@ class FrameProducer;
class PepperView : public FrameConsumer { class PepperView : public FrameConsumer {
public: public:
// Constructs a PepperView for the |instance|. The |instance|, |context| // Constructs a PepperView for the |instance|. The |instance| and |context|
// and |producer| must outlive this class. // must outlive this class.
PepperView(ChromotingInstance* instance, PepperView(ChromotingInstance* instance, ClientContext* context);
ClientContext* context,
FrameProducer* producer);
virtual ~PepperView(); virtual ~PepperView();
// Allocates buffers and passes them to the FrameProducer to render into until
// the maximum number of buffers are in-flight.
void Initialize(FrameProducer* producer);
// FrameConsumer implementation. // FrameConsumer implementation.
virtual void ApplyBuffer(const webrtc::DesktopSize& view_size, virtual void ApplyBuffer(const webrtc::DesktopSize& view_size,
const webrtc::DesktopRect& clip_area, const webrtc::DesktopRect& clip_area,
...@@ -50,6 +52,7 @@ class PepperView : public FrameConsumer { ...@@ -50,6 +52,7 @@ class PepperView : public FrameConsumer {
virtual void ReturnBuffer(webrtc::DesktopFrame* buffer) OVERRIDE; virtual void ReturnBuffer(webrtc::DesktopFrame* buffer) OVERRIDE;
virtual void SetSourceSize(const webrtc::DesktopSize& source_size, virtual void SetSourceSize(const webrtc::DesktopSize& source_size,
const webrtc::DesktopVector& dpi) OVERRIDE; const webrtc::DesktopVector& dpi) OVERRIDE;
virtual PixelFormat GetPixelFormat() OVERRIDE;
// Updates the PepperView's size & clipping area, taking into account the // Updates the PepperView's size & clipping area, taking into account the
// DIP-to-device scale factor. // DIP-to-device scale factor.
...@@ -74,10 +77,6 @@ class PepperView : public FrameConsumer { ...@@ -74,10 +77,6 @@ class PepperView : public FrameConsumer {
// Frees a frame buffer previously allocated by AllocateBuffer. // Frees a frame buffer previously allocated by AllocateBuffer.
void FreeBuffer(webrtc::DesktopFrame* buffer); void FreeBuffer(webrtc::DesktopFrame* buffer);
// Allocates buffers and passes them to the FrameProducer to render into until
// the maximum number of buffers are in-flight.
void InitiateDrawing();
// Renders the parts of |buffer| identified by |region| to the view. If the // Renders the parts of |buffer| identified by |region| to the view. If the
// clip area of the view has changed since the buffer was generated then // clip area of the view has changed since the buffer was generated then
// FrameProducer is supplied the missed parts of |region|. The FrameProducer // FrameProducer is supplied the missed parts of |region|. The FrameProducer
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
#include "remoting/codec/video_decoder_vp8.h" #include "remoting/codec/video_decoder_vp8.h"
#include "remoting/client/frame_consumer.h" #include "remoting/client/frame_consumer.h"
#include "remoting/protocol/session_config.h" #include "remoting/protocol/session_config.h"
#include "third_party/libyuv/include/libyuv/convert_argb.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
using base::Passed; using base::Passed;
...@@ -24,6 +25,55 @@ using remoting::protocol::SessionConfig; ...@@ -24,6 +25,55 @@ using remoting::protocol::SessionConfig;
namespace remoting { namespace remoting {
// This class wraps a VideoDecoder and byte-swaps the pixels for compatibility
// with the android.graphics.Bitmap class.
// TODO(lambroslambrou): Refactor so that the VideoDecoder produces data
// in the right byte-order, instead of swapping it here.
class RgbToBgrVideoDecoderFilter : public VideoDecoder {
public:
RgbToBgrVideoDecoderFilter(scoped_ptr<VideoDecoder> parent)
: parent_(parent.Pass()) {
}
virtual void Initialize(const webrtc::DesktopSize& screen_size) OVERRIDE {
parent_->Initialize(screen_size);
}
virtual bool DecodePacket(const VideoPacket& packet) OVERRIDE {
return parent_->DecodePacket(packet);
}
virtual void Invalidate(const webrtc::DesktopSize& view_size,
const webrtc::DesktopRegion& region) OVERRIDE {
return parent_->Invalidate(view_size, region);
}
virtual void RenderFrame(const webrtc::DesktopSize& view_size,
const webrtc::DesktopRect& clip_area,
uint8* image_buffer,
int image_stride,
webrtc::DesktopRegion* output_region) OVERRIDE {
parent_->RenderFrame(view_size, clip_area, image_buffer, image_stride,
output_region);
for (webrtc::DesktopRegion::Iterator i(*output_region); !i.IsAtEnd();
i.Advance()) {
webrtc::DesktopRect rect = i.rect();
uint8* pixels = image_buffer + (rect.top() * image_stride) +
(rect.left() * kBytesPerPixel);
libyuv::ABGRToARGB(pixels, image_stride, pixels, image_stride,
rect.width(), rect.height());
}
}
virtual const webrtc::DesktopRegion* GetImageShape() OVERRIDE {
return parent_->GetImageShape();
}
private:
scoped_ptr<VideoDecoder> parent_;
};
RectangleUpdateDecoder::RectangleUpdateDecoder( RectangleUpdateDecoder::RectangleUpdateDecoder(
scoped_refptr<base::SingleThreadTaskRunner> main_task_runner, scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner, scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner,
...@@ -39,6 +89,13 @@ RectangleUpdateDecoder::~RectangleUpdateDecoder() { ...@@ -39,6 +89,13 @@ RectangleUpdateDecoder::~RectangleUpdateDecoder() {
} }
void RectangleUpdateDecoder::Initialize(const SessionConfig& config) { void RectangleUpdateDecoder::Initialize(const SessionConfig& config) {
if (!decode_task_runner_->BelongsToCurrentThread()) {
decode_task_runner_->PostTask(
FROM_HERE, base::Bind(&RectangleUpdateDecoder::Initialize, this,
config));
return;
}
// Initialize decoder based on the selected codec. // Initialize decoder based on the selected codec.
ChannelConfig::Codec codec = config.video_config().codec; ChannelConfig::Codec codec = config.video_config().codec;
if (codec == ChannelConfig::CODEC_VERBATIM) { if (codec == ChannelConfig::CODEC_VERBATIM) {
...@@ -48,6 +105,12 @@ void RectangleUpdateDecoder::Initialize(const SessionConfig& config) { ...@@ -48,6 +105,12 @@ void RectangleUpdateDecoder::Initialize(const SessionConfig& config) {
} else { } else {
NOTREACHED() << "Invalid Encoding found: " << codec; NOTREACHED() << "Invalid Encoding found: " << codec;
} }
if (consumer_->GetPixelFormat() == FrameConsumer::FORMAT_RGBA) {
scoped_ptr<VideoDecoder> wrapper(
new RgbToBgrVideoDecoderFilter(decoder_.Pass()));
decoder_ = wrapper.Pass();
}
} }
void RectangleUpdateDecoder::DecodePacket(scoped_ptr<VideoPacket> packet, void RectangleUpdateDecoder::DecodePacket(scoped_ptr<VideoPacket> packet,
......
...@@ -2542,6 +2542,7 @@ ...@@ -2542,6 +2542,7 @@
'remoting_base', 'remoting_base',
'remoting_jingle_glue', 'remoting_jingle_glue',
'remoting_protocol', 'remoting_protocol',
'../third_party/libyuv/libyuv.gyp:libyuv',
'../third_party/webrtc/modules/modules.gyp:desktop_capture', '../third_party/webrtc/modules/modules.gyp:desktop_capture',
], ],
'sources': [ 'sources': [
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment