Commit 206d44a2 authored by Yuwei Huang's avatar Yuwei Huang Committed by Commit Bot

[CRD iOS] Make ChromotingSession own the AudioStub

Previously AudioStub is owned by the the (Remoting|Jni)Client class and
being passed to ChromotingSession as a WeakPtr. The AudioStub is not
used after it is passed to ChromotingSession and it is a burden for the
*Client class to manage its lifetime.

This CL fixes this by making ChromotingSession own the AudioStub.

Bug: 868088
Change-Id: I067f321b3aab7110df7bb85a8b8c519ea889865e
Reviewed-on: https://chromium-review.googlesource.com/1171840Reviewed-by: default avatarJoe Downing <joedow@chromium.org>
Commit-Queue: Yuwei Huang <yuweih@chromium.org>
Cr-Commit-Position: refs/heads/master@{#582428}
parent 39e00c42
...@@ -51,7 +51,7 @@ void AudioPlaybackStream::Core::ResetStreamFormat( ...@@ -51,7 +51,7 @@ void AudioPlaybackStream::Core::ResetStreamFormat(
AudioPlaybackStream::AudioPlaybackStream( AudioPlaybackStream::AudioPlaybackStream(
std::unique_ptr<AudioPlaybackSink> audio_sink, std::unique_ptr<AudioPlaybackSink> audio_sink,
scoped_refptr<base::SingleThreadTaskRunner> audio_task_runner) scoped_refptr<base::SingleThreadTaskRunner> audio_task_runner)
: audio_task_runner_(audio_task_runner), weak_factory_(this) { : audio_task_runner_(audio_task_runner) {
DETACH_FROM_THREAD(thread_checker_); DETACH_FROM_THREAD(thread_checker_);
core_ = std::make_unique<Core>(std::move(audio_sink)); core_ = std::make_unique<Core>(std::move(audio_sink));
...@@ -62,10 +62,6 @@ AudioPlaybackStream::~AudioPlaybackStream() { ...@@ -62,10 +62,6 @@ AudioPlaybackStream::~AudioPlaybackStream() {
audio_task_runner_->DeleteSoon(FROM_HERE, core_.release()); audio_task_runner_->DeleteSoon(FROM_HERE, core_.release());
} }
base::WeakPtr<AudioPlaybackStream> AudioPlaybackStream::GetWeakPtr() {
return weak_factory_.GetWeakPtr();
}
void AudioPlaybackStream::ProcessAudioPacket( void AudioPlaybackStream::ProcessAudioPacket(
std::unique_ptr<AudioPacket> packet, std::unique_ptr<AudioPacket> packet,
const base::RepeatingClosure& done) { const base::RepeatingClosure& done) {
......
...@@ -37,8 +37,6 @@ class AudioPlaybackStream : public protocol::AudioStub { ...@@ -37,8 +37,6 @@ class AudioPlaybackStream : public protocol::AudioStub {
void ProcessAudioPacket(std::unique_ptr<AudioPacket> packet, void ProcessAudioPacket(std::unique_ptr<AudioPacket> packet,
const base::RepeatingClosure& done) override; const base::RepeatingClosure& done) override;
base::WeakPtr<AudioPlaybackStream> GetWeakPtr();
private: private:
class Core; class Core;
...@@ -48,7 +46,6 @@ class AudioPlaybackStream : public protocol::AudioStub { ...@@ -48,7 +46,6 @@ class AudioPlaybackStream : public protocol::AudioStub {
scoped_refptr<base::SingleThreadTaskRunner> audio_task_runner_; scoped_refptr<base::SingleThreadTaskRunner> audio_task_runner_;
base::WeakPtrFactory<AudioPlaybackStream> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(AudioPlaybackStream); DISALLOW_COPY_AND_ASSIGN(AudioPlaybackStream);
}; };
......
...@@ -93,7 +93,9 @@ struct ChromotingSession::SessionContext { ...@@ -93,7 +93,9 @@ struct ChromotingSession::SessionContext {
ChromotingClientRuntime* runtime; ChromotingClientRuntime* runtime;
base::WeakPtr<ChromotingSession::Delegate> delegate; base::WeakPtr<ChromotingSession::Delegate> delegate;
base::WeakPtr<ClientTelemetryLogger> logger; base::WeakPtr<ClientTelemetryLogger> logger;
base::WeakPtr<protocol::AudioStub> audio_player; std::unique_ptr<protocol::AudioStub> audio_player;
std::unique_ptr<base::WeakPtrFactory<protocol::AudioStub>>
audio_player_weak_factory;
std::unique_ptr<protocol::CursorShapeStub> cursor_shape_stub; std::unique_ptr<protocol::CursorShapeStub> cursor_shape_stub;
std::unique_ptr<protocol::VideoRenderer> video_renderer; std::unique_ptr<protocol::VideoRenderer> video_renderer;
...@@ -435,9 +437,11 @@ void ChromotingSession::Core::ConnectOnNetworkThread() { ...@@ -435,9 +437,11 @@ void ChromotingSession::Core::ConnectOnNetworkThread() {
ChromotingEvent::ParseOsFromString(session_context_->info.host_os), ChromotingEvent::ParseOsFromString(session_context_->info.host_os),
session_context_->info.host_os_version); session_context_->info.host_os_version);
client_.reset(new ChromotingClient(client_context_.get(), this, // TODO(yuweih): Ideally we should make ChromotingClient and all its
session_context_->video_renderer.get(), // sub-components (e.g. ConnectionToHost) take raw pointer instead of WeakPtr.
session_context_->audio_player)); client_.reset(new ChromotingClient(
client_context_.get(), this, session_context_->video_renderer.get(),
session_context_->audio_player_weak_factory->GetWeakPtr()));
XmppSignalStrategy::XmppServerConfig xmpp_config; XmppSignalStrategy::XmppServerConfig xmpp_config;
xmpp_config.host = kXmppServer; xmpp_config.host = kXmppServer;
...@@ -582,7 +586,7 @@ ChromotingSession::ChromotingSession( ...@@ -582,7 +586,7 @@ ChromotingSession::ChromotingSession(
base::WeakPtr<ChromotingSession::Delegate> delegate, base::WeakPtr<ChromotingSession::Delegate> delegate,
std::unique_ptr<protocol::CursorShapeStub> cursor_shape_stub, std::unique_ptr<protocol::CursorShapeStub> cursor_shape_stub,
std::unique_ptr<protocol::VideoRenderer> video_renderer, std::unique_ptr<protocol::VideoRenderer> video_renderer,
base::WeakPtr<protocol::AudioStub> audio_player, std::unique_ptr<protocol::AudioStub> audio_player,
const ConnectToHostInfo& info) { const ConnectToHostInfo& info) {
DCHECK(delegate); DCHECK(delegate);
DCHECK(cursor_shape_stub); DCHECK(cursor_shape_stub);
...@@ -601,7 +605,10 @@ ChromotingSession::ChromotingSession( ...@@ -601,7 +605,10 @@ ChromotingSession::ChromotingSession(
session_context_->runtime = runtime_; session_context_->runtime = runtime_;
session_context_->delegate = delegate; session_context_->delegate = delegate;
session_context_->logger = logger_->GetWeakPtr(); session_context_->logger = logger_->GetWeakPtr();
session_context_->audio_player = audio_player; session_context_->audio_player = std::move(audio_player);
session_context_->audio_player_weak_factory =
std::make_unique<base::WeakPtrFactory<protocol::AudioStub>>(
session_context_->audio_player.get());
session_context_->cursor_shape_stub = std::move(cursor_shape_stub); session_context_->cursor_shape_stub = std::move(cursor_shape_stub);
session_context_->video_renderer = std::move(video_renderer); session_context_->video_renderer = std::move(video_renderer);
session_context_->info = info; session_context_->info = info;
......
...@@ -90,7 +90,7 @@ class ChromotingSession : public ClientInputInjector { ...@@ -90,7 +90,7 @@ class ChromotingSession : public ClientInputInjector {
ChromotingSession(base::WeakPtr<ChromotingSession::Delegate> delegate, ChromotingSession(base::WeakPtr<ChromotingSession::Delegate> delegate,
std::unique_ptr<protocol::CursorShapeStub> cursor_stub, std::unique_ptr<protocol::CursorShapeStub> cursor_stub,
std::unique_ptr<protocol::VideoRenderer> video_renderer, std::unique_ptr<protocol::VideoRenderer> video_renderer,
base::WeakPtr<protocol::AudioStub> audio_player, std::unique_ptr<protocol::AudioStub> audio_player,
const ConnectToHostInfo& info); const ConnectToHostInfo& info);
~ChromotingSession() override; ~ChromotingSession() override;
......
...@@ -41,28 +41,21 @@ JniClient::~JniClient() { ...@@ -41,28 +41,21 @@ JniClient::~JniClient() {
void JniClient::ConnectToHost(const ConnectToHostInfo& info) { void JniClient::ConnectToHost(const ConnectToHostInfo& info) {
DCHECK(runtime_->ui_task_runner()->BelongsToCurrentThread()); DCHECK(runtime_->ui_task_runner()->BelongsToCurrentThread());
DCHECK(!display_handler_); DCHECK(!display_handler_);
DCHECK(!audio_player_);
DCHECK(!session_); DCHECK(!session_);
host_id_ = info.host_id; host_id_ = info.host_id;
display_handler_.reset(new JniGlDisplayHandler(java_client_)); display_handler_.reset(new JniGlDisplayHandler(java_client_));
audio_player_.reset(new AudioPlayerAndroid());
session_.reset(new ChromotingSession( session_.reset(new ChromotingSession(
weak_ptr_, display_handler_->CreateCursorShapeStub(), weak_ptr_, display_handler_->CreateCursorShapeStub(),
display_handler_->CreateVideoRenderer(), audio_player_->GetWeakPtr(), display_handler_->CreateVideoRenderer(),
info)); std::make_unique<AudioPlayerAndroid>(), info));
session_->Connect(); session_->Connect();
} }
void JniClient::DisconnectFromHost() { void JniClient::DisconnectFromHost() {
DCHECK(runtime_->ui_task_runner()->BelongsToCurrentThread()); DCHECK(runtime_->ui_task_runner()->BelongsToCurrentThread());
session_.reset(); session_.reset();
if (audio_player_) {
runtime_->network_task_runner()->DeleteSoon(FROM_HERE,
audio_player_.release());
}
display_handler_.reset(); display_handler_.reset();
} }
......
...@@ -16,7 +16,6 @@ ...@@ -16,7 +16,6 @@
namespace remoting { namespace remoting {
class AudioPlayerAndroid;
class ChromotingClientRuntime; class ChromotingClientRuntime;
class JniGlDisplayHandler; class JniGlDisplayHandler;
...@@ -161,7 +160,6 @@ class JniClient : public ChromotingSession::Delegate { ...@@ -161,7 +160,6 @@ class JniClient : public ChromotingSession::Delegate {
base::android::ScopedJavaGlobalRef<jobject> java_client_; base::android::ScopedJavaGlobalRef<jobject> java_client_;
std::unique_ptr<JniGlDisplayHandler> display_handler_; std::unique_ptr<JniGlDisplayHandler> display_handler_;
std::unique_ptr<AudioPlayerAndroid> audio_player_;
std::string host_id_; std::string host_id_;
......
...@@ -68,7 +68,6 @@ static void ResolveFeedbackDataCallback( ...@@ -68,7 +68,6 @@ static void ResolveFeedbackDataCallback(
remoting::GestureInterpreter _gestureInterpreter; remoting::GestureInterpreter _gestureInterpreter;
remoting::KeyboardInterpreter _keyboardInterpreter; remoting::KeyboardInterpreter _keyboardInterpreter;
std::unique_ptr<remoting::RendererProxy> _renderer; std::unique_ptr<remoting::RendererProxy> _renderer;
std::unique_ptr<remoting::AudioPlaybackStream> _audioStream;
// _session is valid only when the session is connected. // _session is valid only when the session is connected.
std::unique_ptr<remoting::ChromotingSession> _session; std::unique_ptr<remoting::ChromotingSession> _session;
...@@ -135,7 +134,7 @@ static void ResolveFeedbackDataCallback( ...@@ -135,7 +134,7 @@ static void ResolveFeedbackDataCallback(
showMessage:[MDCSnackbarMessage messageWithText:@"Using WebRTC"]]; showMessage:[MDCSnackbarMessage messageWithText:@"Using WebRTC"]];
} }
_audioStream = std::make_unique<remoting::AudioPlaybackStream>( auto audioStream = std::make_unique<remoting::AudioPlaybackStream>(
std::make_unique<remoting::AudioPlaybackSinkIos>(), std::make_unique<remoting::AudioPlaybackSinkIos>(),
_runtime->audio_task_runner()); _runtime->audio_task_runner());
...@@ -144,7 +143,7 @@ static void ResolveFeedbackDataCallback( ...@@ -144,7 +143,7 @@ static void ResolveFeedbackDataCallback(
_session.reset(new remoting::ChromotingSession( _session.reset(new remoting::ChromotingSession(
_sessonDelegate->GetWeakPtr(), [_displayHandler CreateCursorShapeStub], _sessonDelegate->GetWeakPtr(), [_displayHandler CreateCursorShapeStub],
[_displayHandler CreateVideoRenderer], _audioStream->GetWeakPtr(), info)); [_displayHandler CreateVideoRenderer], std::move(audioStream), info));
_renderer = [_displayHandler CreateRendererProxy]; _renderer = [_displayHandler CreateRendererProxy];
_gestureInterpreter.SetContext(_renderer.get(), _session.get()); _gestureInterpreter.SetContext(_renderer.get(), _session.get());
_keyboardInterpreter.SetContext(_session.get()); _keyboardInterpreter.SetContext(_session.get());
...@@ -157,10 +156,6 @@ static void ResolveFeedbackDataCallback( ...@@ -157,10 +156,6 @@ static void ResolveFeedbackDataCallback(
_displayHandler = nil; _displayHandler = nil;
if (_audioStream) {
_runtime->network_task_runner()->DeleteSoon(FROM_HERE,
_audioStream.release());
}
// This needs to be deleted on the display thread since GlDisplayHandler binds // This needs to be deleted on the display thread since GlDisplayHandler binds
// its WeakPtrFactory to the display thread. // its WeakPtrFactory to the display thread.
// TODO(yuweih): Ideally this constraint can be removed once we allow // TODO(yuweih): Ideally this constraint can be removed once we allow
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment