Upstream: Media implementation for Android.

- This CL makes media_unittests linked.
- The audio_track_output_stub_android.cc is in place of
  audio_track_output_android.cc because the Java environment is not
  avaliable yet.
- Also changed to dependence on '../third_party/ffmpeg/ffmpeg.gyp'
  only if os is not Android in media.gyp.

BUG=
TEST=


Review URL: http://codereview.chromium.org/8718014

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@112739 0039d316-1c4b-4281-b951-d872f2087c98
parent 4297bea8
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/audio/android/audio_manager_android.h"
#include "base/logging.h"
#include "media/audio/android/audio_track_output_android.h"
#include "media/audio/audio_manager.h"
#include "media/audio/fake_audio_input_stream.h"
#include "media/audio/fake_audio_output_stream.h"
// static
AudioManager* AudioManager::CreateAudioManager() {
return new AudioManagerAndroid();
}
AudioManagerAndroid::AudioManagerAndroid() {}
AudioManagerAndroid::~AudioManagerAndroid() {
audio_thread_.Stop();
}
bool AudioManagerAndroid::HasAudioOutputDevices() {
return true;
}
bool AudioManagerAndroid::HasAudioInputDevices() {
return false;
}
AudioOutputStream* AudioManagerAndroid::MakeAudioOutputStream(
const AudioParameters& params) {
if (!params.IsValid())
return NULL;
if (params.format == AudioParameters::AUDIO_MOCK)
return FakeAudioOutputStream::MakeFakeStream(params);
if (params.format == AudioParameters::AUDIO_PCM_LINEAR ||
params.format == AudioParameters::AUDIO_PCM_LOW_LATENCY)
return AudioTrackOutputStream::MakeStream(params);
return NULL;
}
AudioInputStream* AudioManagerAndroid::MakeAudioInputStream(
const AudioParameters& params, const std::string& device_id) {
return FakeAudioInputStream::MakeFakeStream(params);
}
void AudioManagerAndroid::MuteAll() {
NOTIMPLEMENTED();
}
void AudioManagerAndroid::UnMuteAll() {
NOTIMPLEMENTED();
}
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_AUDIO_ANDROID_AUDIO_MANAGER_ANDROID_H_
#define MEDIA_AUDIO_ANDROID_AUDIO_MANAGER_ANDROID_H_
#include "media/audio/audio_manager_base.h"
// Android implemention of AudioManager.
class AudioManagerAndroid : public AudioManagerBase {
public:
AudioManagerAndroid();
// Implementation of AudioManager.
virtual bool HasAudioOutputDevices() OVERRIDE;
virtual bool HasAudioInputDevices() OVERRIDE;
virtual AudioOutputStream* MakeAudioOutputStream(
const AudioParameters& params) OVERRIDE;
virtual AudioInputStream* MakeAudioInputStream(
const AudioParameters& params, const std::string& device_id) OVERRIDE;
virtual void MuteAll() OVERRIDE;
virtual void UnMuteAll() OVERRIDE;
protected:
virtual ~AudioManagerAndroid();
private:
DISALLOW_COPY_AND_ASSIGN(AudioManagerAndroid);
};
#endif // MEDIA_AUDIO_ANDROID_AUDIO_MANAGER_ANDROID_H_
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/audio/android/audio_track_output_android.h"
#include "base/android/jni_android.h"
#include "base/logging.h"
#include "base/memory/scoped_ptr.h"
#include "base/time.h"
using base::android::AttachCurrentThread;
using base::android::CheckException;
static const int kTimerIntervalInMilliseconds = 50;
class AudioTrackOutputStream::StreamBuffer {
public:
explicit StreamBuffer(uint32 buffer_size);
uint32 ReadStream(uint8* dest, uint32 max_size);
void ResetBuffer(uint32 data_size);
uint8* GetWritableBuffer();
const uint8* ReadBuffer();
void AdvancePosition(uint32 advance);
uint32 buffer_size() { return buffer_size_; }
uint32 data_len() { return data_size_ - current_; }
private:
scoped_array<uint8> buffer_;
uint32 buffer_size_;
uint32 data_size_;
uint32 current_;
DISALLOW_COPY_AND_ASSIGN(StreamBuffer);
};
AudioTrackOutputStream::StreamBuffer::StreamBuffer(uint32 buffer_size)
: buffer_(new uint8[buffer_size]),
buffer_size_(buffer_size),
data_size_(0),
current_(0) {
}
uint32 AudioTrackOutputStream::StreamBuffer::ReadStream(uint8* dest,
uint32 max_size) {
uint32 copy_size = data_len() < max_size ? data_len() : max_size;
memcpy(dest, buffer_.get() + current_, copy_size);
current_ += copy_size;
return copy_size;
}
void AudioTrackOutputStream::StreamBuffer::ResetBuffer(uint32 data_size) {
CHECK_LE(data_size, buffer_size_);
data_size_ = data_size;
current_ = 0;
}
uint8* AudioTrackOutputStream::StreamBuffer::GetWritableBuffer() {
return buffer_.get();
}
const uint8* AudioTrackOutputStream::StreamBuffer::ReadBuffer() {
return buffer_.get() + current_;
}
void AudioTrackOutputStream::StreamBuffer::AdvancePosition(uint32 advance) {
current_ += advance;
CHECK(current_ <= data_size_);
}
AudioTrackOutputStream::AudioTrackOutputStream(const AudioParameters& params)
: source_callback_(NULL),
params_(params.format,
params.sample_rate,
params.bits_per_sample,
params.samples_per_packet,
params.channels),
status_(IDLE),
volume_(0),
buffer_size_(0),
j_class_(NULL),
j_audio_track_(NULL) {
data_buffer_.reset(
new AudioTrackOutputStream::StreamBuffer(params.GetPacketSize()));
}
AudioTrackOutputStream::~AudioTrackOutputStream() {
Close();
if (j_class_ && j_audio_track_) {
JNIEnv* env = AttachCurrentThread();
CHECK(env);
env->DeleteGlobalRef(j_audio_track_);
j_audio_track_ = NULL;
env->DeleteGlobalRef(j_class_);
j_class_ = NULL;
}
}
bool AudioTrackOutputStream::Open() {
if (!params_.IsValid())
return false;
if (status_ == OPENED)
return true;
else if (status_ != IDLE)
return false;
JNIEnv* env = AttachCurrentThread();
CHECK(env);
jclass cls = env->FindClass("android/media/AudioTrack");
CHECK(cls);
j_class_ = static_cast<jclass>(env->NewGlobalRef(cls));
env->DeleteLocalRef(cls);
jint channels;
if (params_.channels == 1)
channels = GetStaticIntField("AudioFormat", "CHANNEL_OUT_MONO");
else if (params_.channels == 2)
channels = GetStaticIntField("AudioFormat", "CHANNEL_OUT_STEREO");
else if (params_.channels == 4)
channels = GetStaticIntField("AudioFormat", "CHANNEL_OUT_QUAD");
else
return false;
jint bits_per_sample;
if (params_.bits_per_sample == 16)
bits_per_sample = GetStaticIntField("AudioFormat", "ENCODING_PCM_16BIT");
else if (params_.bits_per_sample == 8)
bits_per_sample = GetStaticIntField("AudioFormat", "ENCODING_PCM_8BIT");
else
return false;
jmethodID min_method = env->GetStaticMethodID(j_class_, "getMinBufferSize",
"(III)I");
CHECK(min_method);
int min_buffer_size = env->CallStaticIntMethod(
j_class_, min_method, static_cast<jint>(params_.sample_rate),
channels, bits_per_sample);
if (params_.GetPacketSize() < min_buffer_size)
return false;
buffer_size_ = params_.GetPacketSize();
jmethodID constructor = env->GetMethodID(j_class_, "<init>", "(IIIIII)V");
CHECK(constructor);
jobject tmp = env->NewObject(
j_class_, constructor,
GetStaticIntField("AudioManager", "STREAM_MUSIC"),
static_cast<jint>(params_.sample_rate), channels, bits_per_sample,
static_cast<jint>(buffer_size_),
GetStaticIntField("AudioTrack", "MODE_STREAM"));
CHECK(tmp);
j_audio_track_ = env->NewGlobalRef(tmp);
env->DeleteLocalRef(tmp);
status_ = OPENED;
return true;
}
void AudioTrackOutputStream::Close() {
if (!j_audio_track_)
return;
Stop();
CallVoidMethod("flush");
status_ = INVALID;
}
void AudioTrackOutputStream::Start(AudioSourceCallback* callback) {
if (status_ != OPENED)
return;
if (!j_audio_track_)
return;
source_callback_ = callback;
data_buffer_->ResetBuffer(0);
FillAudioBufferTask();
CallVoidMethod("play");
status_ = PLAYING;
timer_.Start(
FROM_HERE,
base::TimeDelta::FromMilliseconds(kTimerIntervalInMilliseconds),
this, &AudioTrackOutputStream::FillAudioBufferTask);
}
void AudioTrackOutputStream::Stop() {
if (!j_audio_track_)
return;
if (status_ == PLAYING) {
timer_.Stop();
CallVoidMethod("stop");
status_ = OPENED;
}
}
void AudioTrackOutputStream::SetVolume(double volume) {
volume_ = volume;
if (!j_audio_track_)
return;
JNIEnv* env = AttachCurrentThread();
CHECK(env);
jmethodID method = env->GetMethodID(j_class_, "setStereoVolume", "(FF)I");
CHECK(method);
env->CallIntMethod(j_audio_track_, method, static_cast<jfloat>(volume),
static_cast<jfloat>(volume));
CheckException(env);
}
void AudioTrackOutputStream::GetVolume(double* volume) {
if (volume)
*volume = volume_;
}
// static
AudioOutputStream* AudioTrackOutputStream::MakeStream(
const AudioParameters& params) {
if (params.IsValid())
return new AudioTrackOutputStream(params);
return NULL;
}
void AudioTrackOutputStream::CallVoidMethod(std::string method_name) {
JNIEnv* env = AttachCurrentThread();
CHECK(env);
jmethodID method = env->GetMethodID(j_class_, method_name.c_str(), "()V");
CHECK(method);
env->CallVoidMethod(j_audio_track_, method);
CheckException(env);
}
jint AudioTrackOutputStream::GetStaticIntField(std::string class_name,
std::string field_name) {
JNIEnv* env = AttachCurrentThread();
CHECK(env);
class_name.insert(0, "android/media/");
jclass cls = env->FindClass(class_name.c_str());
CHECK(cls);
jfieldID field = env->GetStaticFieldID(cls, field_name.c_str(), "I");
CHECK(field);
jint int_field = env->GetStaticIntField(cls, field);
env->DeleteLocalRef(cls);
return int_field;
}
void AudioTrackOutputStream::FillAudioBufferTask() {
if (status_ != PLAYING)
return;
JNIEnv* env = AttachCurrentThread();
CHECK(env);
jmethodID method = env->GetMethodID(j_class_, "getPlaybackHeadPosition",
"()I");
CHECK(method);
int64 position = env->CallIntMethod(j_audio_track_, method);
CheckException(env);
// Calculate how many bytes we can fill in.
position *= params_.sample_rate * params_.bits_per_sample *
params_.channels / 8;
position %= buffer_size_;
int need_buffer = static_cast<int>(buffer_size_ - position);
CHECK(need_buffer >= 0 && need_buffer <= buffer_size_);
if (!need_buffer)
return;
// Fill the internal buffer first.
if (!data_buffer_->data_len()) {
uint32 src_data_size = source_callback_->OnMoreData(
this,
data_buffer_->GetWritableBuffer(),
data_buffer_->buffer_size(),
AudioBuffersState());
data_buffer_->ResetBuffer(src_data_size);
}
need_buffer = std::min(need_buffer,
static_cast<int>(data_buffer_->data_len()));
// Prepare a Java array that contains the samples.
jbyteArray buf = env->NewByteArray(need_buffer);
env->SetByteArrayRegion(
buf, 0, need_buffer,
reinterpret_cast<const jbyte*>(data_buffer_->ReadBuffer()));
data_buffer_->AdvancePosition(need_buffer);
// Invoke method to submit samples.
method = env->GetMethodID(j_class_, "write", "([BII)I");
env->CallIntMethod(j_audio_track_, method, buf, static_cast<jint>(0),
static_cast<jint>(need_buffer));
CheckException(env);
env->DeleteLocalRef(buf);
}
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_AUDIO_AUDIO_TRACK_OUTPUT_ANDROID_H_
#define MEDIA_AUDIO_AUDIO_TRACK_OUTPUT_ANDROID_H_
#include <jni.h>
#include <string>
#include "base/basictypes.h"
#include "base/memory/scoped_ptr.h"
#include "base/timer.h"
#include "media/audio/audio_io.h"
#include "media/audio/audio_parameters.h"
class AudioManagerAndroid;
// Implements PCM audio output support for Android using the AudioTrack API.
class AudioTrackOutputStream : public AudioOutputStream {
public:
enum Status {
IDLE,
OPENED,
PLAYING,
INVALID
};
explicit AudioTrackOutputStream(const AudioParameters& params);
virtual ~AudioTrackOutputStream();
// Implementation of AudioOutputStream.
virtual bool Open() OVERRIDE;
virtual void Close() OVERRIDE;
virtual void Start(AudioSourceCallback* callback) OVERRIDE;
virtual void Stop() OVERRIDE;
virtual void SetVolume(double volume) OVERRIDE;
virtual void GetVolume(double* volume) OVERRIDE;
static AudioOutputStream* MakeStream(const AudioParameters& params);
private:
// Helper methods to invoke Java methods on |j_audio_track_|.
void CallVoidMethod(std::string method_name);
// Get the value of static field.
jint GetStaticIntField(std::string class_name, std::string field_name);
// Feed more data to AudioTrack.
void FillAudioBufferTask();
AudioSourceCallback* source_callback_;
AudioParameters params_;
class StreamBuffer;
scoped_ptr<StreamBuffer> data_buffer_;
Status status_;
double volume_;
int buffer_size_;
// Java AudioTrack class and instance.
jclass j_class_;
jobject j_audio_track_;
base::RepeatingTimer<AudioTrackOutputStream> timer_;
DISALLOW_COPY_AND_ASSIGN(AudioTrackOutputStream);
};
#endif // MEDIA_AUDIO_AUDIO_TRACK_OUTPUT_ANDROID_H_
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/audio/android/audio_track_output_android.h"
// This file was added because there is no Java environment in
// upstream yet, audio_track_output_android.cc should be used in
// downstream.
// TODO(michaelbai): Remove this file once Java environment ready.
// static
AudioOutputStream* AudioTrackOutputStream::MakeStream(
const AudioParameters& params) {
return NULL;
}
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/base/media.h"
#include "base/logging.h"
namespace media {
bool InitializeMediaLibrary(const FilePath& module_dir) {
// Android doesn't require any additional media libraries.
return true;
}
void InitializeMediaLibraryForTesting() {}
bool IsMediaLibraryInitialized() {
return true;
}
bool InitializeOpenMaxLibrary(const FilePath& module_dir) {
NOTIMPLEMENTED();
return false;
}
} // namespace media
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
'../base/base.gyp:base', '../base/base.gyp:base',
'../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations', '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../build/temp_gyp/googleurl.gyp:googleurl', '../build/temp_gyp/googleurl.gyp:googleurl',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
'../third_party/openmax/openmax.gyp:il', '../third_party/openmax/openmax.gyp:il',
'../ui/ui.gyp:ui', '../ui/ui.gyp:ui',
], ],
...@@ -49,6 +48,10 @@ ...@@ -49,6 +48,10 @@
'audio/audio_parameters.h', 'audio/audio_parameters.h',
'audio/audio_util.cc', 'audio/audio_util.cc',
'audio/audio_util.h', 'audio/audio_util.h',
'audio/android/audio_manager_android.cc',
'audio/android/audio_manager_android.h',
'audio/android/audio_track_output_android.cc',
'audio/android/audio_track_output_android.h',
'audio/fake_audio_input_stream.cc', 'audio/fake_audio_input_stream.cc',
'audio/fake_audio_input_stream.h', 'audio/fake_audio_input_stream.h',
'audio/fake_audio_output_stream.cc', 'audio/fake_audio_output_stream.cc',
...@@ -130,6 +133,7 @@ ...@@ -130,6 +133,7 @@
'base/h264_bitstream_converter.cc', 'base/h264_bitstream_converter.cc',
'base/h264_bitstream_converter.h', 'base/h264_bitstream_converter.h',
'base/media.h', 'base/media.h',
'base/media_android.cc',
'base/media_export.h', 'base/media_export.h',
'base/media_log.cc', 'base/media_log.cc',
'base/media_log.h', 'base/media_log.h',
...@@ -262,10 +266,12 @@ ...@@ -262,10 +266,12 @@
'conditions': [ 'conditions': [
# Android doesn't use ffmpeg, so make the dependency conditional # Android doesn't use ffmpeg, so make the dependency conditional
# and exclude the sources which depend on ffmpeg. # and exclude the sources which depend on ffmpeg.
['OS=="android"', { ['OS != "android"', {
'dependencies!': [ 'dependencies': [
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg', '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
], ],
}],
['OS == "android"', {
'sources!': [ 'sources!': [
'base/media_posix.cc', 'base/media_posix.cc',
'ffmpeg/ffmpeg_common.cc', 'ffmpeg/ffmpeg_common.cc',
...@@ -297,6 +303,17 @@ ...@@ -297,6 +303,17 @@
'video/ffmpeg_video_decode_engine.h', 'video/ffmpeg_video_decode_engine.h',
], ],
}], }],
# The below 'android' condition were added temporarily and should be
# removed in downstream, because there is no Java environment setup in
# upstream yet.
['OS == "android"', {
'sources!':[
'audio/android/audio_track_output_android.cc',
],
'sources':[
'audio/android/audio_track_output_stub_android.cc',
],
}],
['OS=="linux" or OS=="freebsd" or OS=="solaris"', { ['OS=="linux" or OS=="freebsd" or OS=="solaris"', {
'link_settings': { 'link_settings': {
'libraries': [ 'libraries': [
...@@ -339,6 +356,9 @@ ...@@ -339,6 +356,9 @@
], ],
}], }],
], ],
}],
['os_posix == 1 and OS != "android"', {
# Video capture isn't supported in Android yet.
'sources!': [ 'sources!': [
'video/capture/video_capture_device_dummy.cc', 'video/capture/video_capture_device_dummy.cc',
'video/capture/video_capture_device_dummy.h', 'video/capture/video_capture_device_dummy.h',
...@@ -451,7 +471,7 @@ ...@@ -451,7 +471,7 @@
'base/simd/scale_yuv_to_rgb_sse2_x64.asm', 'base/simd/scale_yuv_to_rgb_sse2_x64.asm',
], ],
}], }],
[ 'os_posix == 1 and OS != "mac"', { [ 'os_posix == 1 and OS != "mac" and OS != "android"', {
'cflags': [ 'cflags': [
'-msse2', '-msse2',
'-msse3', '-msse3',
...@@ -555,7 +575,6 @@ ...@@ -555,7 +575,6 @@
'../base/base.gyp:test_support_base', '../base/base.gyp:test_support_base',
'../testing/gmock.gyp:gmock', '../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest', '../testing/gtest.gyp:gtest',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
'../ui/ui.gyp:ui', '../ui/ui.gyp:ui',
], ],
'sources': [ 'sources': [
...@@ -618,11 +637,15 @@ ...@@ -618,11 +637,15 @@
}], }],
], ],
}], }],
['OS=="android"', { ['OS != "android"', {
'dependencies!': [ 'dependencies': [
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg', '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
], ],
}],
['OS == "android"', {
'sources!': [ 'sources!': [
'base/test_data_util.cc',
'base/test_data_util.h',
'ffmpeg/ffmpeg_common_unittest.cc', 'ffmpeg/ffmpeg_common_unittest.cc',
'filters/ffmpeg_audio_decoder_unittest.cc', 'filters/ffmpeg_audio_decoder_unittest.cc',
'filters/bitstream_converter_unittest.cc', 'filters/bitstream_converter_unittest.cc',
...@@ -800,7 +823,7 @@ ...@@ -800,7 +823,7 @@
}, },
], ],
}], }],
['os_posix == 1 and OS != "mac"', { ['os_posix == 1 and OS != "mac" and OS != "android"', {
'targets': [ 'targets': [
{ {
'target_name': 'player_x11', 'target_name': 'player_x11',
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment