Commit b922b3a9 authored by Thoren Paulson's avatar Thoren Paulson Committed by Commit Bot

Support running cast apps in background.

This CL should allow Cast apps to run in the background on Android TV
while playing nicely with other apps. This includes:

* Disable kRequestSystemAudioFocus on Cast apps so Cast can manage audio
  focus.
* Stop cast apps when audio focus is lost.
* Don't tear down/finish CastWebContentsActivity when the user leaves.

Bug: internal b/143313649, internal b/142828228
Test: manual
Merge-With: eureka-internal/359240
Change-Id: I2ac25f7e0749abfdac2bb2ab98010f7489787f25
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2036519
Auto-Submit: Thoren Paulson <thoren@chromium.org>
Reviewed-by: default avatarSimeon Anfinrud <sanfin@chromium.org>
Reviewed-by: default avatarLuke Halliwell (slow) <halliwell@chromium.org>
Reviewed-by: default avatarBecca Hughes <beccahughes@chromium.org>
Commit-Queue: Luke Halliwell (slow) <halliwell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#738974}
parent 2a24897a
......@@ -33,7 +33,7 @@
android:screenOrientation="landscape"
android:taskAffinity=".CastWebContentsActivity"
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|locale|layoutDirection"
android:excludeFromRecents="true"
android:excludeFromRecents="false"
{% if cast_build_enable_background_activities == "true" %}
android:noHistory="false">
{% else %}
......
......@@ -64,6 +64,11 @@ public class CastAudioFocusRequest {
void setAudioFocusChangeListener(AudioManager.OnAudioFocusChangeListener l) {
mAudioFocusChangeListener = l;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && mAudioFocusRequest != null) {
mAudioFocusRequest = new AudioFocusRequest.Builder(mAudioFocusRequest)
.setOnAudioFocusChangeListener(mAudioFocusChangeListener)
.build();
}
}
int request(AudioManager audioManager) {
......
......@@ -65,7 +65,7 @@ public class CastAudioManager {
public Observable<AudioFocusLoss> requestAudioFocusWhen(
Observable<CastAudioFocusRequest> event) {
Controller<AudioFocusLoss> audioFocusLossState = new Controller<>();
audioFocusLossState.set(AudioFocusLoss.NORMAL);
audioFocusLossState.set(AudioFocusLoss.NOT_REQUESTED);
event.subscribe(focusRequest -> {
focusRequest.setAudioFocusChangeListener((int focusChange) -> {
audioFocusLossState.set(AudioFocusLoss.from(focusChange));
......@@ -139,7 +139,8 @@ public class CastAudioManager {
public enum AudioFocusLoss {
NORMAL,
TRANSIENT,
TRANSIENT_CAN_DUCK;
TRANSIENT_CAN_DUCK,
NOT_REQUESTED;
private static @Nullable AudioFocusLoss from(int focusChange) {
switch (focusChange) {
......
......@@ -48,8 +48,6 @@ public class CastWebContentsActivity extends Activity {
private final Controller<Unit> mResumedState = new Controller<>();
// Tracks whether this Activity is between onStart() and onStop().
private final Controller<Unit> mStartedState = new Controller<>();
// Tracks whether the user has left according to onUserLeaveHint().
private final Controller<Unit> mUserLeftState = new Controller<>();
// Tracks the most recent Intent for the Activity.
private final Controller<Intent> mGotIntentState = new Controller<>();
// Set this to cause the Activity to finish.
......@@ -128,6 +126,17 @@ public class CastWebContentsActivity extends Activity {
audioManager.releaseStreamMuteIfNecessary(AudioManager.STREAM_MUSIC);
}));
final Observable<CastAudioFocusRequest> audioFocusRequestState = mCreatedState.map(x
-> new CastAudioFocusRequest.Builder()
.setFocusGain(AudioManager.AUDIOFOCUS_GAIN)
.build());
mAudioManagerState.subscribe((CastAudioManager audioManager) -> {
return audioManager.requestAudioFocusWhen(audioFocusRequestState)
.filter(state -> state == CastAudioManager.AudioFocusLoss.NORMAL)
.subscribe(Observers.onEnter(x -> mIsFinishingState.set("Lost audio focus.")));
});
// Handle each new Intent.
Controller<CastWebContentsSurfaceHelper.StartParams> startParamsState = new Controller<>();
mGotIntentState.and(Observable.not(mIsFinishingState))
......@@ -154,11 +163,6 @@ public class CastWebContentsActivity extends Activity {
intent.setFlags(flags);
startActivity(intent);
}));
Observable<?> stoppingBecauseUserLeftState =
Observable.not(mStartedState).and(mUserLeftState);
stoppingBecauseUserLeftState.subscribe(
Observers.onEnter(x -> mIsFinishingState.set("User left and activity stopped.")));
}
@Override
......@@ -215,12 +219,6 @@ public class CastWebContentsActivity extends Activity {
super.onDestroy();
}
@Override
protected void onUserLeaveHint() {
mUserLeftState.set(Unit.unit());
super.onUserLeaveHint();
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
if (DEBUG) Log.d(TAG, "onWindowFocusChanged(%b)", hasFocus);
......
......@@ -115,6 +115,7 @@ class CastWebContentsSurfaceHelper {
Controller<WebContents> webContentsState = new Controller<>();
mStartParamsState.map(params -> params.webContents)
.subscribe(Observers.onEnter(webContentsState::set));
mCreatedState.subscribe(Observers.onExit(x -> webContentsState.reset()));
// Receive broadcasts indicating the screen turned off while we have active WebContents.
uriState.subscribe((Uri uri) -> {
......
......@@ -51,9 +51,9 @@ public class CastAudioManagerTest {
Observable<CastAudioManager.AudioFocusLoss> lostAudioFocusState =
audioManager.requestAudioFocusWhen(requestAudioFocusState);
ReactiveRecorder lostAudioFocusRecorder = ReactiveRecorder.record(lostAudioFocusState);
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NORMAL).end();
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NOT_REQUESTED).end();
requestAudioFocusState.set(buildFocusRequest());
lostAudioFocusRecorder.verify().closed(CastAudioManager.AudioFocusLoss.NORMAL).end();
lostAudioFocusRecorder.verify().closed(CastAudioManager.AudioFocusLoss.NOT_REQUESTED).end();
}
@Test
......@@ -66,11 +66,11 @@ public class CastAudioManagerTest {
Observable<CastAudioManager.AudioFocusLoss> lostAudioFocusState =
audioManager.requestAudioFocusWhen(requestAudioFocusState);
ReactiveRecorder lostAudioFocusRecorder = ReactiveRecorder.record(lostAudioFocusState);
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NORMAL).end();
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NOT_REQUESTED).end();
requestAudioFocusState.set(buildFocusRequest());
shadowAudioManager.getLastAudioFocusRequest().listener.onAudioFocusChange(
AudioManager.AUDIOFOCUS_GAIN);
lostAudioFocusRecorder.verify().closed(CastAudioManager.AudioFocusLoss.NORMAL).end();
lostAudioFocusRecorder.verify().closed(CastAudioManager.AudioFocusLoss.NOT_REQUESTED).end();
requestAudioFocusState.reset();
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NORMAL).end();
}
......@@ -85,12 +85,12 @@ public class CastAudioManagerTest {
Observable<CastAudioManager.AudioFocusLoss> lostAudioFocusState =
audioManager.requestAudioFocusWhen(requestAudioFocusState);
ReactiveRecorder lostAudioFocusRecorder = ReactiveRecorder.record(lostAudioFocusState);
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NORMAL).end();
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NOT_REQUESTED).end();
requestAudioFocusState.set(buildFocusRequest());
AudioManager.OnAudioFocusChangeListener listener =
shadowAudioManager.getLastAudioFocusRequest().listener;
listener.onAudioFocusChange(AudioManager.AUDIOFOCUS_GAIN);
lostAudioFocusRecorder.verify().closed(CastAudioManager.AudioFocusLoss.NORMAL).end();
lostAudioFocusRecorder.verify().closed(CastAudioManager.AudioFocusLoss.NOT_REQUESTED).end();
listener.onAudioFocusChange(AudioManager.AUDIOFOCUS_LOSS);
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NORMAL).end();
}
......@@ -105,12 +105,12 @@ public class CastAudioManagerTest {
Observable<CastAudioManager.AudioFocusLoss> lostAudioFocusState =
audioManager.requestAudioFocusWhen(requestAudioFocusState);
ReactiveRecorder lostAudioFocusRecorder = ReactiveRecorder.record(lostAudioFocusState);
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NORMAL).end();
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NOT_REQUESTED).end();
requestAudioFocusState.set(buildFocusRequest());
AudioManager.OnAudioFocusChangeListener listener =
shadowAudioManager.getLastAudioFocusRequest().listener;
listener.onAudioFocusChange(AudioManager.AUDIOFOCUS_GAIN);
lostAudioFocusRecorder.verify().closed(CastAudioManager.AudioFocusLoss.NORMAL).end();
lostAudioFocusRecorder.verify().closed(CastAudioManager.AudioFocusLoss.NOT_REQUESTED).end();
listener.onAudioFocusChange(AudioManager.AUDIOFOCUS_LOSS);
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NORMAL).end();
listener.onAudioFocusChange(AudioManager.AUDIOFOCUS_GAIN);
......@@ -126,7 +126,7 @@ public class CastAudioManagerTest {
Observable<CastAudioManager.AudioFocusLoss> lostAudioFocusState =
audioManager.requestAudioFocusWhen(requestAudioFocusState);
ReactiveRecorder lostAudioFocusRecorder = ReactiveRecorder.record(lostAudioFocusState);
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NORMAL).end();
lostAudioFocusRecorder.verify().opened(CastAudioManager.AudioFocusLoss.NOT_REQUESTED).end();
}
@Test
......
......@@ -11,6 +11,7 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.app.Activity;
import android.content.Intent;
......@@ -35,6 +36,7 @@ import org.robolectric.annotation.Implements;
import org.robolectric.shadow.api.Shadow;
import org.robolectric.shadows.ShadowActivity;
import org.chromium.chromecast.base.Observable;
import org.chromium.content_public.browser.WebContents;
import org.chromium.testing.local.LocalRobolectricTestRunner;
......@@ -109,6 +111,8 @@ public class CastWebContentsActivityTest {
@Test
public void testReleasesStreamMuteIfNecessaryOnPause() {
CastAudioManager mockAudioManager = mock(CastAudioManager.class);
when(mockAudioManager.requestAudioFocusWhen(anyObject()))
.thenReturn(mock(Observable.class));
mActivity.setAudioManagerForTesting(mockAudioManager);
mActivityLifecycle.create().start().resume();
mActivityLifecycle.pause();
......@@ -239,22 +243,6 @@ public class CastWebContentsActivityTest {
Assert.assertFalse(mShadowActivity.isFinishing());
}
@Test
public void testUserLeaveAndStopCausesFinish() {
mActivityLifecycle.create().start().resume();
mActivityLifecycle.pause().userLeaving().stop();
Assert.assertTrue(mShadowActivity.isFinishing());
}
@Test
public void testUserLeaveAndStopDestroysSurfaceHelper() {
CastWebContentsSurfaceHelper surfaceHelper = mock(CastWebContentsSurfaceHelper.class);
mActivity.setSurfaceHelperForTesting(surfaceHelper);
mActivityLifecycle.create().start().resume();
mActivityLifecycle.pause().userLeaving().stop();
verify(surfaceHelper).onDestroy();
}
@Test
public void testOnDestroyDestroysSurfaceHelper() {
CastWebContentsSurfaceHelper surfaceHelper = mock(CastWebContentsSurfaceHelper.class);
......
......@@ -333,6 +333,6 @@ public class CastWebContentsSurfaceHelperTest {
when(mWebContentsView.open(webContents)).thenReturn(scope);
mSurfaceHelper.onNewStartParams(params);
mSurfaceHelper.onDestroy();
verify(scope, never()).close();
verify(scope).close();
}
}
......@@ -161,6 +161,7 @@ CastContentBrowserClient::CastContentBrowserClient(
#if defined(OS_ANDROID)
cast_feature_list_creator_->SetExtraDisableFeatures({
::media::kAudioFocusLossSuspendMediaSession,
::media::kRequestSystemAudioFocus,
});
#endif
}
......
......@@ -35,6 +35,9 @@ void AudioFocusDelegateAndroid::Initialize() {
AudioFocusDelegate::AudioFocusResult
AudioFocusDelegateAndroid::RequestAudioFocus(
media_session::mojom::AudioFocusType audio_focus_type) {
if (!base::FeatureList::IsEnabled(media::kRequestSystemAudioFocus))
return AudioFocusDelegate::AudioFocusResult::kSuccess;
JNIEnv* env = base::android::AttachCurrentThread();
DCHECK(env);
bool success = Java_AudioFocusDelegate_requestAudioFocus(
......
......@@ -524,6 +524,11 @@ const base::Feature kCanPlayHls{"CanPlayHls", base::FEATURE_ENABLED_BY_DEFAULT};
// HLS manifests will fail to load (triggering source fallback or load error).
const base::Feature kHlsPlayer{"HlsPlayer", base::FEATURE_ENABLED_BY_DEFAULT};
// When enabled, Playing media sessions will request audio focus from the
// Android system.
const base::Feature kRequestSystemAudioFocus{"RequestSystemAudioFocus",
base::FEATURE_ENABLED_BY_DEFAULT};
// Use the (hacky) AudioManager.getOutputLatency() call to get the estimated
// hardware latency for a stream for OpenSLES playback. This is normally not
// needed, except for some Android TV devices.
......
......@@ -179,6 +179,7 @@ MEDIA_EXPORT extern const base::Feature kDisableSurfaceLayerForVideo;
MEDIA_EXPORT extern const base::Feature kCanPlayHls;
MEDIA_EXPORT extern const base::Feature kPictureInPictureAPI;
MEDIA_EXPORT extern const base::Feature kHlsPlayer;
MEDIA_EXPORT extern const base::Feature kRequestSystemAudioFocus;
MEDIA_EXPORT extern const base::Feature kUseAudioLatencyFromHAL;
MEDIA_EXPORT extern const base::Feature kUsePooledSharedImageVideoProvider;
#endif // defined(OS_ANDROID)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment