Bug 815905 - Provide JNI as a fallback if OpenSLES opening fails. r=jesup

This commit is contained in:
Gian-Carlo Pascutto 2013-04-16 15:47:41 +02:00
parent ffef92f8a7
commit 342c4f72c7
4 changed files with 71 additions and 74 deletions

View File

@ -24,15 +24,6 @@ namespace webrtc
{
class EventWrapper;
const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
const WebRtc_UWord32 N_REC_CHANNELS = 1; // default is mono recording
const WebRtc_UWord32 N_PLAY_CHANNELS = 1; // default is mono playout
const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
class ThreadWrapper;
class AudioDeviceAndroidJni : public AudioDeviceGeneric {
@ -159,6 +150,14 @@ class AudioDeviceAndroidJni : public AudioDeviceGeneric {
virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;
static const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
static const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
static const WebRtc_UWord32 N_REC_CHANNELS = 1; // default is mono recording
static const WebRtc_UWord32 N_PLAY_CHANNELS = 1; // default is mono playout
static const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
private:
// Lock
void Lock() {

View File

@ -28,36 +28,6 @@ namespace webrtc {
class EventWrapper;
const WebRtc_UWord32 N_MAX_INTERFACES = 3;
const WebRtc_UWord32 N_MAX_OUTPUT_DEVICES = 6;
const WebRtc_UWord32 N_MAX_INPUT_DEVICES = 3;
const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 16000; // Default fs
const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 16000; // Default fs
const WebRtc_UWord32 N_REC_CHANNELS = 1;
const WebRtc_UWord32 N_PLAY_CHANNELS = 1;
const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480;
const WebRtc_UWord32 PLAY_BUF_SIZE_IN_SAMPLES = 480;
const WebRtc_UWord32 REC_MAX_TEMP_BUF_SIZE_PER_10ms =
N_REC_CHANNELS * REC_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
const WebRtc_UWord32 PLAY_MAX_TEMP_BUF_SIZE_PER_10ms =
N_PLAY_CHANNELS * PLAY_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
// Number of the buffers in playout queue
const WebRtc_UWord16 N_PLAY_QUEUE_BUFFERS = 8;
// Number of buffers in recording queue
// TODO(xian): Reduce the numbers of buffers to improve the latency.
const WebRtc_UWord16 N_REC_QUEUE_BUFFERS = 16;
// Some values returned from getMinBufferSize
// (Nexus S playout 72ms, recording 64ms)
// (Galaxy, 167ms, 44ms)
// (Nexus 7, 72ms, 48ms)
// (Xoom 92ms, 40ms)
class ThreadWrapper;
class AudioDeviceAndroidOpenSLES: public AudioDeviceGeneric {
@ -211,6 +181,36 @@ class AudioDeviceAndroidOpenSLES: public AudioDeviceGeneric {
virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const; // NOLINT
static const WebRtc_UWord32 N_MAX_INTERFACES = 3;
static const WebRtc_UWord32 N_MAX_OUTPUT_DEVICES = 6;
static const WebRtc_UWord32 N_MAX_INPUT_DEVICES = 3;
static const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 16000; // Default fs
static const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 16000; // Default fs
static const WebRtc_UWord32 N_REC_CHANNELS = 1;
static const WebRtc_UWord32 N_PLAY_CHANNELS = 1;
static const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480;
static const WebRtc_UWord32 PLAY_BUF_SIZE_IN_SAMPLES = 480;
static const WebRtc_UWord32 REC_MAX_TEMP_BUF_SIZE_PER_10ms =
N_REC_CHANNELS * REC_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
static const WebRtc_UWord32 PLAY_MAX_TEMP_BUF_SIZE_PER_10ms =
N_PLAY_CHANNELS * PLAY_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
// Number of the buffers in playout queue
static const WebRtc_UWord16 N_PLAY_QUEUE_BUFFERS = 8;
// Number of buffers in recording queue
// TODO(xian): Reduce the numbers of buffers to improve the latency.
static const WebRtc_UWord16 N_REC_QUEUE_BUFFERS = 16;
// Some values returned from getMinBufferSize
// (Nexus S playout 72ms, recording 64ms)
// (Galaxy, 167ms, 44ms)
// (Nexus 7, 72ms, 48ms)
// (Xoom 92ms, 40ms)
private:
// Lock
void Lock() {

View File

@ -110,6 +110,10 @@
'win/audio_mixer_manager_win.h',
'android/audio_device_utility_android.cc',
'android/audio_device_utility_android.h',
'android/audio_device_opensles_android.cc',
'android/audio_device_opensles_android.h',
'android/audio_device_jni_android.cc',
'android/audio_device_jni_android.h',
],
'conditions': [
['OS=="android"', {
@ -119,19 +123,6 @@
'-lOpenSLES',
],
},
'conditions': [
['enable_android_opensl==1', {
'sources': [
'android/audio_device_opensles_android.cc',
'android/audio_device_opensles_android.h',
],
}, {
'sources': [
'android/audio_device_jni_android.cc',
'android/audio_device_jni_android.h',
],
}],
],
}],
['OS=="linux"', {
'defines': [

View File

@ -24,8 +24,10 @@
#endif
#elif defined(WEBRTC_ANDROID_OPENSLES)
#include <stdlib.h>
#include <dlfcn.h>
#include "audio_device_utility_android.h"
#include "audio_device_opensles_android.h"
#include "audio_device_jni_android.h"
#elif defined(WEBRTC_ANDROID)
#include <stdlib.h>
#include "audio_device_utility_android.h"
@ -259,12 +261,18 @@ WebRtc_Word32 AudioDeviceModuleImpl::CreatePlatformSpecificObjects()
// Create the *Android OpenSLES* implementation of the Audio Device
//
#if defined(WEBRTC_ANDROID_OPENSLES)
if (audioLayer == kPlatformDefaultAudio)
{
// Create *Android OpenELSE Audio* implementation
ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id());
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
"Android OpenSLES Audio APIs will be utilized");
// Check if the OpenSLES library is available before going further.
void* opensles_lib = dlopen("libOpenSLES.so", RTLD_LAZY);
if (opensles_lib) {
// That worked, close for now and proceed normally.
dlclose(opensles_lib);
if (audioLayer == kPlatformDefaultAudio)
{
// Create *Android OpenSLES Audio* implementation
ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id());
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
"Android OpenSLES Audio APIs will be utilized");
}
}
if (ptrAudioDevice != NULL)
@ -272,25 +280,24 @@ WebRtc_Word32 AudioDeviceModuleImpl::CreatePlatformSpecificObjects()
// Create the Android implementation of the Device Utility.
ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
}
// END #if defined(WEBRTC_ANDROID_OPENSLES)
#elif defined(WEBRTC_ANDROID_OPENSLES) or defined(WEBRTC_ANDROID)
// Fall back to this case if on Android 2.2/OpenSLES not available.
if (ptrAudioDevice == NULL) {
// Create the *Android Java* implementation of the Audio Device
if (audioLayer == kPlatformDefaultAudio)
{
// Create *Android JNI Audio* implementation
ptrAudioDevice = new AudioDeviceAndroidJni(Id());
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized");
}
// Create the *Android Java* implementation of the Audio Device
//
#elif defined(WEBRTC_ANDROID)
if (audioLayer == kPlatformDefaultAudio)
{
// Create *Android JNI Audio* implementation
ptrAudioDevice = new AudioDeviceAndroidJni(Id());
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized");
if (ptrAudioDevice != NULL)
{
// Create the Android implementation of the Device Utility.
ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
}
}
if (ptrAudioDevice != NULL)
{
// Create the Android implementation of the Device Utility.
ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
}
// END #if defined(WEBRTC_ANDROID)
// Create the *Linux* implementation of the Audio Device
//
#elif defined(WEBRTC_LINUX)