mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 932112: Rollup of changes previously applied to media/webrtc/trunk/webrtc rs=jesup
* * * * * * Add AndroidAudioManager to the moz.build files.
This commit is contained in:
parent
836b549082
commit
52771d4abf
@ -254,7 +254,7 @@ MediaEngineWebRTC::EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSourc
|
|||||||
JNIEnv *env;
|
JNIEnv *env;
|
||||||
jvm->AttachCurrentThread(&env, nullptr);
|
jvm->AttachCurrentThread(&env, nullptr);
|
||||||
|
|
||||||
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
|
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, env, (void*)context) != 0) {
|
||||||
LOG(("VoiceEngine:SetAndroidObjects Failed"));
|
LOG(("VoiceEngine:SetAndroidObjects Failed"));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,6 @@
|
|||||||
#include "webrtc/video_engine/include/vie_codec.h"
|
#include "webrtc/video_engine/include/vie_codec.h"
|
||||||
#include "webrtc/video_engine/include/vie_render.h"
|
#include "webrtc/video_engine/include/vie_render.h"
|
||||||
#include "webrtc/video_engine/include/vie_capture.h"
|
#include "webrtc/video_engine/include/vie_capture.h"
|
||||||
#include "webrtc/video_engine/include/vie_file.h"
|
|
||||||
#ifdef MOZ_B2G_CAMERA
|
#ifdef MOZ_B2G_CAMERA
|
||||||
#include "CameraPreviewMediaStream.h"
|
#include "CameraPreviewMediaStream.h"
|
||||||
#include "DOMCameraManager.h"
|
#include "DOMCameraManager.h"
|
||||||
@ -118,7 +117,14 @@ public:
|
|||||||
#else
|
#else
|
||||||
// ViEExternalRenderer.
|
// ViEExternalRenderer.
|
||||||
virtual int FrameSizeChange(unsigned int, unsigned int, unsigned int);
|
virtual int FrameSizeChange(unsigned int, unsigned int, unsigned int);
|
||||||
virtual int DeliverFrame(unsigned char*, int, uint32_t, int64_t);
|
virtual int DeliverFrame(unsigned char*,int, uint32_t , int64_t,
|
||||||
|
void *handle);
|
||||||
|
/**
|
||||||
|
* Does DeliverFrame() support a null buffer and non-null handle
|
||||||
|
* (video texture)?
|
||||||
|
* XXX Investigate! Especially for Android/B2G
|
||||||
|
*/
|
||||||
|
virtual bool IsTextureSupported() { return false; }
|
||||||
|
|
||||||
MediaEngineWebRTCVideoSource(webrtc::VideoEngine* aVideoEnginePtr, int aIndex)
|
MediaEngineWebRTCVideoSource(webrtc::VideoEngine* aVideoEnginePtr, int aIndex)
|
||||||
: mVideoEngine(aVideoEnginePtr)
|
: mVideoEngine(aVideoEnginePtr)
|
||||||
|
@ -40,7 +40,8 @@ MediaEngineWebRTCVideoSource::FrameSizeChange(
|
|||||||
// ViEExternalRenderer Callback. Process every incoming frame here.
|
// ViEExternalRenderer Callback. Process every incoming frame here.
|
||||||
int
|
int
|
||||||
MediaEngineWebRTCVideoSource::DeliverFrame(
|
MediaEngineWebRTCVideoSource::DeliverFrame(
|
||||||
unsigned char* buffer, int size, uint32_t time_stamp, int64_t render_time)
|
unsigned char* buffer, int size, uint32_t time_stamp, int64_t render_time,
|
||||||
|
void *handle)
|
||||||
{
|
{
|
||||||
// mInSnapshotMode can only be set before the camera is turned on and
|
// mInSnapshotMode can only be set before the camera is turned on and
|
||||||
// the renderer is started, so this amounts to a 1-shot
|
// the renderer is started, so this amounts to a 1-shot
|
||||||
@ -399,126 +400,7 @@ MediaEngineWebRTCVideoSource::Stop(SourceMediaStream *aSource, TrackID aID)
|
|||||||
nsresult
|
nsresult
|
||||||
MediaEngineWebRTCVideoSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile)
|
MediaEngineWebRTCVideoSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile)
|
||||||
{
|
{
|
||||||
/**
|
return NS_ERROR_NOT_IMPLEMENTED;
|
||||||
* To get a Snapshot we do the following:
|
|
||||||
* - Set a condition variable (mInSnapshotMode) to true
|
|
||||||
* - Attach the external renderer and start the camera
|
|
||||||
* - Wait for the condition variable to change to false
|
|
||||||
*
|
|
||||||
* Starting the camera has the effect of invoking DeliverFrame() when
|
|
||||||
* the first frame arrives from the camera. We only need one frame for
|
|
||||||
* GetCaptureDeviceSnapshot to work, so we immediately set the condition
|
|
||||||
* variable to false and notify this method.
|
|
||||||
*
|
|
||||||
* This causes the current thread to continue (PR_CondWaitVar will return),
|
|
||||||
* at which point we can grab a snapshot, convert it to a file and
|
|
||||||
* return from this function after cleaning up the temporary stream object
|
|
||||||
* and caling Stop() on the media source.
|
|
||||||
*/
|
|
||||||
#ifdef MOZ_B2G_CAMERA
|
|
||||||
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
|
|
||||||
#endif
|
|
||||||
*aFile = nullptr;
|
|
||||||
if (!mInitDone || mState != kAllocated) {
|
|
||||||
return NS_ERROR_FAILURE;
|
|
||||||
}
|
|
||||||
#ifdef MOZ_B2G_CAMERA
|
|
||||||
mLastCapture = nullptr;
|
|
||||||
|
|
||||||
NS_DispatchToMainThread(WrapRunnable(this,
|
|
||||||
&MediaEngineWebRTCVideoSource::StartImpl,
|
|
||||||
mCapability));
|
|
||||||
mCallbackMonitor.Wait();
|
|
||||||
if (mState != kStarted) {
|
|
||||||
return NS_ERROR_FAILURE;
|
|
||||||
}
|
|
||||||
|
|
||||||
NS_DispatchToMainThread(WrapRunnable(this,
|
|
||||||
&MediaEngineWebRTCVideoSource::SnapshotImpl));
|
|
||||||
mCallbackMonitor.Wait();
|
|
||||||
if (mLastCapture == nullptr)
|
|
||||||
return NS_ERROR_FAILURE;
|
|
||||||
|
|
||||||
mState = kStopped;
|
|
||||||
NS_DispatchToMainThread(WrapRunnable(this,
|
|
||||||
&MediaEngineWebRTCVideoSource::StopImpl));
|
|
||||||
|
|
||||||
// The camera return nsDOMMemoryFile indeed, and the inheritance tree is:
|
|
||||||
// nsIDOMBlob <- nsIDOMFile <- nsDOMFileBase <- nsDOMFile <- nsDOMMemoryFile
|
|
||||||
*aFile = mLastCapture.get();
|
|
||||||
return NS_OK;
|
|
||||||
#else
|
|
||||||
{
|
|
||||||
MonitorAutoLock lock(mMonitor);
|
|
||||||
mInSnapshotMode = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start the rendering (equivalent to calling Start(), but without a track).
|
|
||||||
int error = 0;
|
|
||||||
if (!mInitDone || mState != kAllocated) {
|
|
||||||
return NS_ERROR_FAILURE;
|
|
||||||
}
|
|
||||||
error = mViERender->AddRenderer(mCaptureIndex, webrtc::kVideoI420, (webrtc::ExternalRenderer*)this);
|
|
||||||
if (error == -1) {
|
|
||||||
return NS_ERROR_FAILURE;
|
|
||||||
}
|
|
||||||
error = mViERender->StartRender(mCaptureIndex);
|
|
||||||
if (error == -1) {
|
|
||||||
return NS_ERROR_FAILURE;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mViECapture->StartCapture(mCaptureIndex, mCapability) < 0) {
|
|
||||||
return NS_ERROR_FAILURE;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Wait for the condition variable, will be set in DeliverFrame.
|
|
||||||
// We use a while loop, because even if Wait() returns, it's not
|
|
||||||
// guaranteed that the condition variable changed.
|
|
||||||
// FIX: we need need a way to cancel this and to bail if it appears to not be working
|
|
||||||
// Perhaps a maximum time, though some cameras can take seconds to start. 10 seconds?
|
|
||||||
{
|
|
||||||
MonitorAutoLock lock(mMonitor);
|
|
||||||
while (mInSnapshotMode) {
|
|
||||||
lock.Wait();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we get here, DeliverFrame received at least one frame.
|
|
||||||
webrtc::ViEFile* vieFile = webrtc::ViEFile::GetInterface(mVideoEngine);
|
|
||||||
if (!vieFile) {
|
|
||||||
return NS_ERROR_FAILURE;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a temporary file on the main thread and put the snapshot in it.
|
|
||||||
// See Run() in MediaEngineWebRTCVideo.h (sets mSnapshotPath).
|
|
||||||
NS_DispatchToMainThread(this, NS_DISPATCH_SYNC);
|
|
||||||
|
|
||||||
if (!mSnapshotPath) {
|
|
||||||
return NS_ERROR_FAILURE;
|
|
||||||
}
|
|
||||||
|
|
||||||
NS_ConvertUTF16toUTF8 path(*mSnapshotPath);
|
|
||||||
if (vieFile->GetCaptureDeviceSnapshot(mCaptureIndex, path.get()) < 0) {
|
|
||||||
delete mSnapshotPath;
|
|
||||||
mSnapshotPath = nullptr;
|
|
||||||
return NS_ERROR_FAILURE;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stop the camera.
|
|
||||||
mViERender->StopRender(mCaptureIndex);
|
|
||||||
mViERender->RemoveRenderer(mCaptureIndex);
|
|
||||||
|
|
||||||
nsCOMPtr<nsIFile> file;
|
|
||||||
nsresult rv = NS_NewLocalFile(*mSnapshotPath, false, getter_AddRefs(file));
|
|
||||||
|
|
||||||
delete mSnapshotPath;
|
|
||||||
mSnapshotPath = nullptr;
|
|
||||||
|
|
||||||
NS_ENSURE_SUCCESS(rv, rv);
|
|
||||||
|
|
||||||
NS_ADDREF(*aFile = new nsDOMFileFile(file));
|
|
||||||
#endif
|
|
||||||
return NS_OK;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -9,10 +9,12 @@ SHARED_LIBRARY_LIBS = \
|
|||||||
$(call EXPAND_LIBNAME_PATH,video_capture_module,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_video_capture_module) \
|
$(call EXPAND_LIBNAME_PATH,video_capture_module,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_video_capture_module) \
|
||||||
$(call EXPAND_LIBNAME_PATH,webrtc_utility,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_webrtc_utility) \
|
$(call EXPAND_LIBNAME_PATH,webrtc_utility,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_webrtc_utility) \
|
||||||
$(call EXPAND_LIBNAME_PATH,audio_coding_module,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_audio_coding_module) \
|
$(call EXPAND_LIBNAME_PATH,audio_coding_module,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_audio_coding_module) \
|
||||||
|
$(call EXPAND_LIBNAME_PATH,acm2,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_acm2) \
|
||||||
$(call EXPAND_LIBNAME_PATH,CNG,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_CNG) \
|
$(call EXPAND_LIBNAME_PATH,CNG,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_CNG) \
|
||||||
$(call EXPAND_LIBNAME_PATH,G711,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_G711) \
|
$(call EXPAND_LIBNAME_PATH,G711,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_G711) \
|
||||||
$(call EXPAND_LIBNAME_PATH,PCM16B,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_PCM16B) \
|
$(call EXPAND_LIBNAME_PATH,PCM16B,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_PCM16B) \
|
||||||
$(call EXPAND_LIBNAME_PATH,NetEq,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_NetEq) \
|
$(call EXPAND_LIBNAME_PATH,NetEq,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_NetEq) \
|
||||||
|
$(call EXPAND_LIBNAME_PATH,NetEq4,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_NetEq4) \
|
||||||
$(call EXPAND_LIBNAME_PATH,system_wrappers,$(DEPTH)/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_system_wrappers) \
|
$(call EXPAND_LIBNAME_PATH,system_wrappers,$(DEPTH)/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_system_wrappers) \
|
||||||
$(call EXPAND_LIBNAME_PATH,webrtc_video_coding,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_webrtc_video_coding) \
|
$(call EXPAND_LIBNAME_PATH,webrtc_video_coding,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_webrtc_video_coding) \
|
||||||
$(call EXPAND_LIBNAME_PATH,video_coding_utility,$(DEPTH)/media/webrtc/trunk/webrtc/modules/video_coding/utility/video_coding_utility_video_coding_utility) \
|
$(call EXPAND_LIBNAME_PATH,video_coding_utility,$(DEPTH)/media/webrtc/trunk/webrtc/modules/video_coding/utility/video_coding_utility_video_coding_utility) \
|
||||||
@ -26,6 +28,7 @@ SHARED_LIBRARY_LIBS = \
|
|||||||
$(call EXPAND_LIBNAME_PATH,rtp_rtcp,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_rtp_rtcp) \
|
$(call EXPAND_LIBNAME_PATH,rtp_rtcp,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_rtp_rtcp) \
|
||||||
$(call EXPAND_LIBNAME_PATH,bitrate_controller,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_bitrate_controller) \
|
$(call EXPAND_LIBNAME_PATH,bitrate_controller,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_bitrate_controller) \
|
||||||
$(call EXPAND_LIBNAME_PATH,remote_bitrate_estimator,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_remote_bitrate_estimator) \
|
$(call EXPAND_LIBNAME_PATH,remote_bitrate_estimator,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_remote_bitrate_estimator) \
|
||||||
|
$(call EXPAND_LIBNAME_PATH,rbe_components,$(DEPTH)/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_components_rbe_components) \
|
||||||
$(call EXPAND_LIBNAME_PATH,paced_sender,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_paced_sender) \
|
$(call EXPAND_LIBNAME_PATH,paced_sender,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_paced_sender) \
|
||||||
$(call EXPAND_LIBNAME_PATH,video_processing,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_video_processing) \
|
$(call EXPAND_LIBNAME_PATH,video_processing,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_video_processing) \
|
||||||
$(call EXPAND_LIBNAME_PATH,audio_conference_mixer,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_audio_conference_mixer) \
|
$(call EXPAND_LIBNAME_PATH,audio_conference_mixer,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_audio_conference_mixer) \
|
||||||
|
@ -153,8 +153,9 @@ MediaConduitErrorCode WebrtcAudioConduit::Init(WebrtcAudioConduit *other)
|
|||||||
|
|
||||||
// get the JVM
|
// get the JVM
|
||||||
JavaVM *jvm = jsjni_GetVM();
|
JavaVM *jvm = jsjni_GetVM();
|
||||||
|
JNIEnv* jenv = jsjni_GetJNIForThread();
|
||||||
|
|
||||||
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
|
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, jenv, (void*)context) != 0) {
|
||||||
CSFLogError(logTag, "%s Unable to set Android objects", __FUNCTION__);
|
CSFLogError(logTag, "%s Unable to set Android objects", __FUNCTION__);
|
||||||
return kMediaConduitSessionNotInited;
|
return kMediaConduitSessionNotInited;
|
||||||
}
|
}
|
||||||
|
@ -972,7 +972,8 @@ int
|
|||||||
WebrtcVideoConduit::DeliverFrame(unsigned char* buffer,
|
WebrtcVideoConduit::DeliverFrame(unsigned char* buffer,
|
||||||
int buffer_size,
|
int buffer_size,
|
||||||
uint32_t time_stamp,
|
uint32_t time_stamp,
|
||||||
int64_t render_time)
|
int64_t render_time,
|
||||||
|
void *handle)
|
||||||
{
|
{
|
||||||
CSFLogDebug(logTag, "%s Buffer Size %d", __FUNCTION__, buffer_size);
|
CSFLogDebug(logTag, "%s Buffer Size %d", __FUNCTION__, buffer_size);
|
||||||
|
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
#include "webrtc/video_engine/include/vie_codec.h"
|
#include "webrtc/video_engine/include/vie_codec.h"
|
||||||
#include "webrtc/video_engine/include/vie_render.h"
|
#include "webrtc/video_engine/include/vie_render.h"
|
||||||
#include "webrtc/video_engine/include/vie_network.h"
|
#include "webrtc/video_engine/include/vie_network.h"
|
||||||
#include "webrtc/video_engine/include/vie_file.h"
|
|
||||||
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
|
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
|
||||||
|
|
||||||
/** This file hosts several structures identifying different aspects
|
/** This file hosts several structures identifying different aspects
|
||||||
@ -148,7 +147,15 @@ public:
|
|||||||
*/
|
*/
|
||||||
virtual int FrameSizeChange(unsigned int, unsigned int, unsigned int);
|
virtual int FrameSizeChange(unsigned int, unsigned int, unsigned int);
|
||||||
|
|
||||||
virtual int DeliverFrame(unsigned char*,int, uint32_t , int64_t);
|
virtual int DeliverFrame(unsigned char*,int, uint32_t , int64_t,
|
||||||
|
void *handle);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Does DeliverFrame() support a null buffer and non-null handle
|
||||||
|
* (video texture)?
|
||||||
|
* XXX Investigate! Especially for Android/B2G
|
||||||
|
*/
|
||||||
|
virtual bool IsTextureSupported() { return false; }
|
||||||
|
|
||||||
unsigned short SendingWidth() {
|
unsigned short SendingWidth() {
|
||||||
return mSendingWidth;
|
return mSendingWidth;
|
||||||
|
@ -588,7 +588,7 @@ static short vcmRxAllocICE_s(TemporaryRef<NrIceCtx> ctx_in,
|
|||||||
VcmSIPCCBinding::connectCandidateSignal(stream);
|
VcmSIPCCBinding::connectCandidateSignal(stream);
|
||||||
|
|
||||||
std::vector<std::string> candidates = stream->GetCandidates();
|
std::vector<std::string> candidates = stream->GetCandidates();
|
||||||
CSFLogDebug( logTag, "%s: Got %lu candidates", __FUNCTION__, candidates.size());
|
CSFLogDebug( logTag, "%s: Got %lu candidates", __FUNCTION__, (unsigned long) candidates.size());
|
||||||
|
|
||||||
std::string default_addr;
|
std::string default_addr;
|
||||||
int default_port;
|
int default_port;
|
||||||
|
@ -23,8 +23,30 @@
|
|||||||
'cflags!': [
|
'cflags!': [
|
||||||
'-mfpu=vfpv3-d16',
|
'-mfpu=vfpv3-d16',
|
||||||
],
|
],
|
||||||
|
'cflags_mozilla!': [
|
||||||
|
'-mfpu=vfpv3-d16',
|
||||||
|
],
|
||||||
'cflags': [
|
'cflags': [
|
||||||
'-mfpu=neon',
|
'-mfpu=neon',
|
||||||
'-flax-vector-conversions',
|
'-flax-vector-conversions',
|
||||||
],
|
],
|
||||||
|
'cflags_mozilla': [
|
||||||
|
'-mfpu=neon',
|
||||||
|
'-flax-vector-conversions',
|
||||||
|
],
|
||||||
|
'asflags!': [
|
||||||
|
'-mfpu=vfpv3-d16',
|
||||||
|
],
|
||||||
|
'asflags_mozilla!': [
|
||||||
|
'-mfpu=vfpv3-d16',
|
||||||
|
],
|
||||||
|
'asflags': [
|
||||||
|
'-mfpu=neon',
|
||||||
|
'-flax-vector-conversions',
|
||||||
|
],
|
||||||
|
'asflags_mozilla': [
|
||||||
|
'-mfpu=neon',
|
||||||
|
'-flax-vector-conversions',
|
||||||
|
],
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,12 @@
|
|||||||
|
|
||||||
'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8',
|
'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8',
|
||||||
'rbe_components_path%': '<(webrtc_root)/modules/remote_bitrate_estimator',
|
'rbe_components_path%': '<(webrtc_root)/modules/remote_bitrate_estimator',
|
||||||
|
'include_g711%': 1,
|
||||||
|
'include_g722%': 1,
|
||||||
|
'include_ilbc%': 1,
|
||||||
'include_opus%': 1,
|
'include_opus%': 1,
|
||||||
|
'include_isac%': 1,
|
||||||
|
'include_pcm16b%': 1,
|
||||||
},
|
},
|
||||||
'build_with_chromium%': '<(build_with_chromium)',
|
'build_with_chromium%': '<(build_with_chromium)',
|
||||||
'build_with_libjingle%': '<(build_with_libjingle)',
|
'build_with_libjingle%': '<(build_with_libjingle)',
|
||||||
@ -54,7 +59,14 @@
|
|||||||
'import_isolate_path%': '<(import_isolate_path)',
|
'import_isolate_path%': '<(import_isolate_path)',
|
||||||
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
|
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
|
||||||
'webrtc_vp8_dir%': '<(webrtc_vp8_dir)',
|
'webrtc_vp8_dir%': '<(webrtc_vp8_dir)',
|
||||||
|
|
||||||
|
'include_g711%': '<(include_g711)',
|
||||||
|
'include_g722%': '<(include_g722)',
|
||||||
|
'include_ilbc%': '<(include_ilbc)',
|
||||||
'include_opus%': '<(include_opus)',
|
'include_opus%': '<(include_opus)',
|
||||||
|
'include_isac%': '<(include_isac)',
|
||||||
|
'include_pcm16b%': '<(include_pcm16b)',
|
||||||
|
|
||||||
'rbe_components_path%': '<(rbe_components_path)',
|
'rbe_components_path%': '<(rbe_components_path)',
|
||||||
|
|
||||||
# The Chromium common.gypi we use treats all gyp files without
|
# The Chromium common.gypi we use treats all gyp files without
|
||||||
@ -113,6 +125,9 @@
|
|||||||
|
|
||||||
# Include ndk cpu features in Chromium build.
|
# Include ndk cpu features in Chromium build.
|
||||||
'include_ndk_cpu_features%': 1,
|
'include_ndk_cpu_features%': 1,
|
||||||
|
|
||||||
|
# lazily allocate the ~4MB of trace message buffers if set
|
||||||
|
'enable_lazy_trace_alloc%': 0,
|
||||||
}, { # Settings for the standalone (not-in-Chromium) build.
|
}, { # Settings for the standalone (not-in-Chromium) build.
|
||||||
# TODO(andrew): For now, disable the Chrome plugins, which causes a
|
# TODO(andrew): For now, disable the Chrome plugins, which causes a
|
||||||
# flood of chromium-style warnings. Investigate enabling them:
|
# flood of chromium-style warnings. Investigate enabling them:
|
||||||
@ -136,6 +151,21 @@
|
|||||||
# and Java Implementation
|
# and Java Implementation
|
||||||
'enable_android_opensl%': 0,
|
'enable_android_opensl%': 0,
|
||||||
}],
|
}],
|
||||||
|
['OS=="linux"', {
|
||||||
|
'include_alsa_audio%': 1,
|
||||||
|
}, {
|
||||||
|
'include_alsa_audio%': 0,
|
||||||
|
}],
|
||||||
|
['OS=="solaris" or os_bsd==1', {
|
||||||
|
'include_pulse_audio%': 1,
|
||||||
|
}, {
|
||||||
|
'include_pulse_audio%': 0,
|
||||||
|
}],
|
||||||
|
['OS=="linux" or OS=="solaris" or os_bsd==1', {
|
||||||
|
'include_v4l2_video_capture%': 1,
|
||||||
|
}, {
|
||||||
|
'include_v4l2_video_capture%': 0,
|
||||||
|
}],
|
||||||
['OS=="ios"', {
|
['OS=="ios"', {
|
||||||
'build_libjpeg%': 0,
|
'build_libjpeg%': 0,
|
||||||
'enable_protobuf%': 0,
|
'enable_protobuf%': 0,
|
||||||
@ -160,10 +190,15 @@
|
|||||||
'defines': [
|
'defines': [
|
||||||
# TODO(leozwang): Run this as a gclient hook rather than at build-time:
|
# TODO(leozwang): Run this as a gclient hook rather than at build-time:
|
||||||
# http://code.google.com/p/webrtc/issues/detail?id=687
|
# http://code.google.com/p/webrtc/issues/detail?id=687
|
||||||
'WEBRTC_SVNREVISION="Unavailable(issue687)"',
|
'WEBRTC_SVNREVISION="\\\"Unavailable_issue687\\\""',
|
||||||
#'WEBRTC_SVNREVISION="<!(python <(webrtc_root)/build/version.py)"',
|
#'WEBRTC_SVNREVISION="<!(python <(webrtc_root)/build/version.py)"',
|
||||||
],
|
],
|
||||||
'conditions': [
|
'conditions': [
|
||||||
|
['moz_widget_toolkit_gonk==1', {
|
||||||
|
'defines' : [
|
||||||
|
'WEBRTC_GONK',
|
||||||
|
],
|
||||||
|
}],
|
||||||
['enable_tracing==1', {
|
['enable_tracing==1', {
|
||||||
'defines': ['WEBRTC_LOGGING',],
|
'defines': ['WEBRTC_LOGGING',],
|
||||||
}],
|
}],
|
||||||
@ -201,7 +236,8 @@
|
|||||||
],
|
],
|
||||||
'conditions': [
|
'conditions': [
|
||||||
['armv7==1', {
|
['armv7==1', {
|
||||||
'defines': ['WEBRTC_ARCH_ARM_V7',],
|
'defines': ['WEBRTC_ARCH_ARM_V7',
|
||||||
|
'WEBRTC_BUILD_NEON_LIBS'],
|
||||||
'conditions': [
|
'conditions': [
|
||||||
['arm_neon==1', {
|
['arm_neon==1', {
|
||||||
'defines': ['WEBRTC_ARCH_ARM_NEON',],
|
'defines': ['WEBRTC_ARCH_ARM_NEON',],
|
||||||
@ -212,6 +248,19 @@
|
|||||||
}],
|
}],
|
||||||
],
|
],
|
||||||
}],
|
}],
|
||||||
|
['os_bsd==1', {
|
||||||
|
'defines': [
|
||||||
|
'WEBRTC_BSD',
|
||||||
|
'WEBRTC_THREAD_RR',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
['OS=="dragonfly" or OS=="netbsd"', {
|
||||||
|
'defines': [
|
||||||
|
# doesn't support pthread_condattr_setclock
|
||||||
|
'WEBRTC_CLOCK_TYPE_REALTIME',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
# Mozilla: if we support Mozilla on MIPS, we'll need to mod the cflags entries here
|
||||||
['target_arch=="mipsel"', {
|
['target_arch=="mipsel"', {
|
||||||
'defines': [
|
'defines': [
|
||||||
'MIPS32_LE',
|
'MIPS32_LE',
|
||||||
@ -272,6 +321,13 @@
|
|||||||
],
|
],
|
||||||
}],
|
}],
|
||||||
['OS=="linux"', {
|
['OS=="linux"', {
|
||||||
|
# 'conditions': [
|
||||||
|
# ['have_clock_monotonic==1', {
|
||||||
|
# 'defines': [
|
||||||
|
# 'WEBRTC_CLOCK_TYPE_REALTIME',
|
||||||
|
# ],
|
||||||
|
# }],
|
||||||
|
# ],
|
||||||
'defines': [
|
'defines': [
|
||||||
'WEBRTC_LINUX',
|
'WEBRTC_LINUX',
|
||||||
],
|
],
|
||||||
@ -295,17 +351,18 @@
|
|||||||
# Re-enable some warnings that Chromium disables.
|
# Re-enable some warnings that Chromium disables.
|
||||||
'msvs_disabled_warnings!': [4189,],
|
'msvs_disabled_warnings!': [4189,],
|
||||||
}],
|
}],
|
||||||
|
# used on GONK as well
|
||||||
|
['enable_android_opensl==1 and (OS=="android" or moz_widget_toolkit_gonk==1)', {
|
||||||
|
'defines': [
|
||||||
|
'WEBRTC_ANDROID_OPENSLES',
|
||||||
|
],
|
||||||
|
}],
|
||||||
['OS=="android"', {
|
['OS=="android"', {
|
||||||
'defines': [
|
'defines': [
|
||||||
'WEBRTC_LINUX',
|
'WEBRTC_LINUX',
|
||||||
'WEBRTC_ANDROID',
|
'WEBRTC_ANDROID',
|
||||||
],
|
],
|
||||||
'conditions': [
|
'conditions': [
|
||||||
['enable_android_opensl==1', {
|
|
||||||
'defines': [
|
|
||||||
'WEBRTC_ANDROID_OPENSLES',
|
|
||||||
],
|
|
||||||
}],
|
|
||||||
['clang!=1', {
|
['clang!=1', {
|
||||||
# The Android NDK doesn't provide optimized versions of these
|
# The Android NDK doesn't provide optimized versions of these
|
||||||
# functions. Ensure they are disabled for all compilers.
|
# functions. Ensure they are disabled for all compilers.
|
||||||
|
@ -44,5 +44,7 @@
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
# }],
|
||||||
|
# ],
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
@ -155,6 +155,7 @@
|
|||||||
'resampler/sinc_resampler_sse.cc',
|
'resampler/sinc_resampler_sse.cc',
|
||||||
],
|
],
|
||||||
'cflags': ['-msse2',],
|
'cflags': ['-msse2',],
|
||||||
|
'cflags_mozilla': ['-msse2',],
|
||||||
'xcode_settings': {
|
'xcode_settings': {
|
||||||
'OTHER_CFLAGS': ['-msse2',],
|
'OTHER_CFLAGS': ['-msse2',],
|
||||||
},
|
},
|
||||||
|
@ -17,98 +17,47 @@
|
|||||||
#define WEBRTC_RESAMPLER_RESAMPLER_H_
|
#define WEBRTC_RESAMPLER_RESAMPLER_H_
|
||||||
|
|
||||||
#include "webrtc/typedefs.h"
|
#include "webrtc/typedefs.h"
|
||||||
|
#include "speex/speex_resampler.h"
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
|
|
||||||
// TODO(andrew): the implementation depends on the exact values of this enum.
|
#define FIXED_RATE_RESAMPLER 0x10
|
||||||
// It should be rewritten in a less fragile way.
|
|
||||||
enum ResamplerType
|
enum ResamplerType
|
||||||
{
|
{
|
||||||
// 4 MSB = Number of channels
|
kResamplerSynchronous = 0x00,
|
||||||
// 4 LSB = Synchronous or asynchronous
|
kResamplerSynchronousStereo = 0x01,
|
||||||
|
kResamplerFixedSynchronous = 0x00 | FIXED_RATE_RESAMPLER,
|
||||||
kResamplerSynchronous = 0x10,
|
kResamplerFixedSynchronousStereo = 0x01 | FIXED_RATE_RESAMPLER,
|
||||||
kResamplerAsynchronous = 0x11,
|
|
||||||
kResamplerSynchronousStereo = 0x20,
|
|
||||||
kResamplerAsynchronousStereo = 0x21,
|
|
||||||
kResamplerInvalid = 0xff
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO(andrew): doesn't need to be part of the interface.
|
|
||||||
enum ResamplerMode
|
|
||||||
{
|
|
||||||
kResamplerMode1To1,
|
|
||||||
kResamplerMode1To2,
|
|
||||||
kResamplerMode1To3,
|
|
||||||
kResamplerMode1To4,
|
|
||||||
kResamplerMode1To6,
|
|
||||||
kResamplerMode1To12,
|
|
||||||
kResamplerMode2To3,
|
|
||||||
kResamplerMode2To11,
|
|
||||||
kResamplerMode4To11,
|
|
||||||
kResamplerMode8To11,
|
|
||||||
kResamplerMode11To16,
|
|
||||||
kResamplerMode11To32,
|
|
||||||
kResamplerMode2To1,
|
|
||||||
kResamplerMode3To1,
|
|
||||||
kResamplerMode4To1,
|
|
||||||
kResamplerMode6To1,
|
|
||||||
kResamplerMode12To1,
|
|
||||||
kResamplerMode3To2,
|
|
||||||
kResamplerMode11To2,
|
|
||||||
kResamplerMode11To4,
|
|
||||||
kResamplerMode11To8
|
|
||||||
};
|
};
|
||||||
|
|
||||||
class Resampler
|
class Resampler
|
||||||
{
|
{
|
||||||
|
|
||||||
public:
|
public:
|
||||||
Resampler();
|
Resampler();
|
||||||
// TODO(andrew): use an init function instead.
|
// TODO(andrew): use an init function instead.
|
||||||
Resampler(int inFreq, int outFreq, ResamplerType type);
|
Resampler(int in_freq, int out_freq, ResamplerType type);
|
||||||
~Resampler();
|
~Resampler();
|
||||||
|
|
||||||
// Reset all states
|
// Reset all states
|
||||||
int Reset(int inFreq, int outFreq, ResamplerType type);
|
int Reset(int in_freq, int out_freq, ResamplerType type);
|
||||||
|
|
||||||
// Reset all states if any parameter has changed
|
// Reset all states if any parameter has changed
|
||||||
int ResetIfNeeded(int inFreq, int outFreq, ResamplerType type);
|
int ResetIfNeeded(int in_freq, int out_freq, ResamplerType type);
|
||||||
|
|
||||||
// Synchronous resampling, all output samples are written to samplesOut
|
// Synchronous resampling, all output samples are written to samplesOut
|
||||||
int Push(const int16_t* samplesIn, int lengthIn, int16_t* samplesOut,
|
int Push(const int16_t* samples_in, int length_in,
|
||||||
int maxLen, int &outLen);
|
int16_t* samples_out, int max_len, int &out_len);
|
||||||
|
|
||||||
// Asynchronous resampling, input
|
|
||||||
int Insert(int16_t* samplesIn, int lengthIn);
|
|
||||||
|
|
||||||
// Asynchronous resampling output, remaining samples are buffered
|
|
||||||
int Pull(int16_t* samplesOut, int desiredLen, int &outLen);
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// Generic pointers since we don't know what states we'll need
|
bool IsFixedRate() { return !!(type_ & FIXED_RATE_RESAMPLER); }
|
||||||
void* state1_;
|
|
||||||
void* state2_;
|
|
||||||
void* state3_;
|
|
||||||
|
|
||||||
// Storage if needed
|
SpeexResamplerState* state_;
|
||||||
int16_t* in_buffer_;
|
|
||||||
int16_t* out_buffer_;
|
|
||||||
int in_buffer_size_;
|
|
||||||
int out_buffer_size_;
|
|
||||||
int in_buffer_size_max_;
|
|
||||||
int out_buffer_size_max_;
|
|
||||||
|
|
||||||
// State
|
int in_freq_;
|
||||||
int my_in_frequency_khz_;
|
int out_freq_;
|
||||||
int my_out_frequency_khz_;
|
int channels_;
|
||||||
ResamplerMode my_mode_;
|
ResamplerType type_;
|
||||||
ResamplerType my_type_;
|
|
||||||
|
|
||||||
// Extra instance for stereo
|
|
||||||
Resampler* slave_left_;
|
|
||||||
Resampler* slave_right_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -13,7 +13,6 @@
|
|||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
#include "webrtc/common_audio/include/audio_util.h"
|
#include "webrtc/common_audio/include/audio_util.h"
|
||||||
#include "webrtc/common_audio/resampler/include/resampler.h"
|
|
||||||
#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
|
#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -8,6 +8,8 @@
|
|||||||
* be found in the AUTHORS file in the root of the source tree.
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include <math.h>
|
||||||
|
|
||||||
#include "testing/gtest/include/gtest/gtest.h"
|
#include "testing/gtest/include/gtest/gtest.h"
|
||||||
|
|
||||||
#include "webrtc/common_audio/resampler/include/resampler.h"
|
#include "webrtc/common_audio/resampler/include/resampler.h"
|
||||||
@ -18,10 +20,7 @@ namespace webrtc {
|
|||||||
namespace {
|
namespace {
|
||||||
const ResamplerType kTypes[] = {
|
const ResamplerType kTypes[] = {
|
||||||
kResamplerSynchronous,
|
kResamplerSynchronous,
|
||||||
kResamplerAsynchronous,
|
|
||||||
kResamplerSynchronousStereo,
|
kResamplerSynchronousStereo,
|
||||||
kResamplerAsynchronousStereo
|
|
||||||
// kResamplerInvalid excluded
|
|
||||||
};
|
};
|
||||||
const size_t kTypesSize = sizeof(kTypes) / sizeof(*kTypes);
|
const size_t kTypesSize = sizeof(kTypes) / sizeof(*kTypes);
|
||||||
|
|
||||||
@ -31,7 +30,7 @@ const int kRates[] = {
|
|||||||
8000,
|
8000,
|
||||||
16000,
|
16000,
|
||||||
32000,
|
32000,
|
||||||
44000,
|
44100,
|
||||||
48000,
|
48000,
|
||||||
kMaxRate
|
kMaxRate
|
||||||
};
|
};
|
||||||
@ -39,26 +38,19 @@ const size_t kRatesSize = sizeof(kRates) / sizeof(*kRates);
|
|||||||
const int kMaxChannels = 2;
|
const int kMaxChannels = 2;
|
||||||
const size_t kDataSize = static_cast<size_t> (kMaxChannels * kMaxRate / 100);
|
const size_t kDataSize = static_cast<size_t> (kMaxChannels * kMaxRate / 100);
|
||||||
|
|
||||||
// TODO(andrew): should we be supporting these combinations?
|
|
||||||
bool ValidRates(int in_rate, int out_rate) {
|
|
||||||
// Not the most compact notation, for clarity.
|
|
||||||
if ((in_rate == 44000 && (out_rate == 48000 || out_rate == 96000)) ||
|
|
||||||
(out_rate == 44000 && (in_rate == 48000 || in_rate == 96000))) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
class ResamplerTest : public testing::Test {
|
class ResamplerTest : public testing::Test {
|
||||||
protected:
|
protected:
|
||||||
ResamplerTest();
|
ResamplerTest();
|
||||||
virtual void SetUp();
|
virtual void SetUp();
|
||||||
virtual void TearDown();
|
virtual void TearDown();
|
||||||
|
void RunResampleTest(int channels,
|
||||||
|
int src_sample_rate_hz,
|
||||||
|
int dst_sample_rate_hz);
|
||||||
|
|
||||||
Resampler rs_;
|
Resampler rs_;
|
||||||
int16_t data_in_[kDataSize];
|
int16_t data_in_[kDataSize];
|
||||||
int16_t data_out_[kDataSize];
|
int16_t data_out_[kDataSize];
|
||||||
|
int16_t data_reference_[kDataSize];
|
||||||
};
|
};
|
||||||
|
|
||||||
ResamplerTest::ResamplerTest() {}
|
ResamplerTest::ResamplerTest() {}
|
||||||
@ -83,34 +75,119 @@ TEST_F(ResamplerTest, Reset) {
|
|||||||
ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j]
|
ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j]
|
||||||
<< ", type: " << kTypes[k];
|
<< ", type: " << kTypes[k];
|
||||||
SCOPED_TRACE(ss.str());
|
SCOPED_TRACE(ss.str());
|
||||||
if (ValidRates(kRates[i], kRates[j]))
|
|
||||||
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kTypes[k]));
|
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kTypes[k]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sets the signal value to increase by |data| with every sample. Floats are
|
||||||
|
// used so non-integer values result in rounding error, but not an accumulating
|
||||||
|
// error.
|
||||||
|
void SetMonoFrame(int16_t* buffer, float data, int sample_rate_hz) {
|
||||||
|
for (int i = 0; i < sample_rate_hz / 100; i++) {
|
||||||
|
buffer[i] = data * i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sets the signal value to increase by |left| and |right| with every sample in
|
||||||
|
// each channel respectively.
|
||||||
|
void SetStereoFrame(int16_t* buffer, float left, float right,
|
||||||
|
int sample_rate_hz) {
|
||||||
|
for (int i = 0; i < sample_rate_hz / 100; i++) {
|
||||||
|
buffer[i * 2] = left * i;
|
||||||
|
buffer[i * 2 + 1] = right * i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Computes the best SNR based on the error between |ref_frame| and
|
||||||
|
// |test_frame|. It allows for a sample delay between the signals to
|
||||||
|
// compensate for the resampling delay.
|
||||||
|
float ComputeSNR(const int16_t* reference, const int16_t* test,
|
||||||
|
int sample_rate_hz, int channels, int max_delay) {
|
||||||
|
float best_snr = 0;
|
||||||
|
int best_delay = 0;
|
||||||
|
int samples_per_channel = sample_rate_hz/100;
|
||||||
|
for (int delay = 0; delay < max_delay; delay++) {
|
||||||
|
float mse = 0;
|
||||||
|
float variance = 0;
|
||||||
|
for (int i = 0; i < samples_per_channel * channels - delay; i++) {
|
||||||
|
int error = reference[i] - test[i + delay];
|
||||||
|
mse += error * error;
|
||||||
|
variance += reference[i] * reference[i];
|
||||||
|
}
|
||||||
|
float snr = 100; // We assign 100 dB to the zero-error case.
|
||||||
|
if (mse > 0)
|
||||||
|
snr = 10 * log10(variance / mse);
|
||||||
|
if (snr > best_snr) {
|
||||||
|
best_snr = snr;
|
||||||
|
best_delay = delay;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
printf("SNR=%.1f dB at delay=%d\n", best_snr, best_delay);
|
||||||
|
return best_snr;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ResamplerTest::RunResampleTest(int channels,
|
||||||
|
int src_sample_rate_hz,
|
||||||
|
int dst_sample_rate_hz) {
|
||||||
|
Resampler resampler; // Create a new one with every test.
|
||||||
|
const int16_t kSrcLeft = 60; // Shouldn't overflow for any used sample rate.
|
||||||
|
const int16_t kSrcRight = 30;
|
||||||
|
const float kResamplingFactor = (1.0 * src_sample_rate_hz) /
|
||||||
|
dst_sample_rate_hz;
|
||||||
|
const float kDstLeft = kResamplingFactor * kSrcLeft;
|
||||||
|
const float kDstRight = kResamplingFactor * kSrcRight;
|
||||||
|
if (channels == 1)
|
||||||
|
SetMonoFrame(data_in_, kSrcLeft, src_sample_rate_hz);
|
||||||
else
|
else
|
||||||
EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j], kTypes[k]));
|
SetStereoFrame(data_in_, kSrcLeft, kSrcRight, src_sample_rate_hz);
|
||||||
}
|
|
||||||
}
|
if (channels == 1) {
|
||||||
}
|
SetMonoFrame(data_out_, 0, dst_sample_rate_hz);
|
||||||
|
SetMonoFrame(data_reference_, kDstLeft, dst_sample_rate_hz);
|
||||||
|
} else {
|
||||||
|
SetStereoFrame(data_out_, 0, 0, dst_sample_rate_hz);
|
||||||
|
SetStereoFrame(data_reference_, kDstLeft, kDstRight, dst_sample_rate_hz);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(tlegrand): Replace code inside the two tests below with a function
|
// The speex resampler has a known delay dependent on quality and rates,
|
||||||
// with number of channels and ResamplerType as input.
|
// which we approximate here. Multiplying by two gives us a crude maximum
|
||||||
TEST_F(ResamplerTest, Synchronous) {
|
// for any resampling, as the old resampler typically (but not always)
|
||||||
for (size_t i = 0; i < kRatesSize; ++i) {
|
// has lower delay. The actual delay is calculated internally based on the
|
||||||
for (size_t j = 0; j < kRatesSize; ++j) {
|
// filter length in the QualityMap.
|
||||||
std::ostringstream ss;
|
static const int kInputKernelDelaySamples = 16*3;
|
||||||
ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j];
|
const int max_delay = std::min(1.0f, 1/kResamplingFactor) *
|
||||||
SCOPED_TRACE(ss.str());
|
kInputKernelDelaySamples * channels * 2;
|
||||||
|
printf("(%d, %d Hz) -> (%d, %d Hz) ", // SNR reported on the same line later.
|
||||||
|
channels, src_sample_rate_hz, channels, dst_sample_rate_hz);
|
||||||
|
|
||||||
if (ValidRates(kRates[i], kRates[j])) {
|
int in_length = channels * src_sample_rate_hz / 100;
|
||||||
int in_length = kRates[i] / 100;
|
|
||||||
int out_length = 0;
|
int out_length = 0;
|
||||||
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kResamplerSynchronous));
|
EXPECT_EQ(0, rs_.Reset(src_sample_rate_hz, dst_sample_rate_hz,
|
||||||
|
(channels == 1 ?
|
||||||
|
kResamplerSynchronous :
|
||||||
|
kResamplerSynchronousStereo)));
|
||||||
EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize,
|
EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize,
|
||||||
out_length));
|
out_length));
|
||||||
EXPECT_EQ(kRates[j] / 100, out_length);
|
EXPECT_EQ(channels * dst_sample_rate_hz / 100, out_length);
|
||||||
} else {
|
|
||||||
EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j], kResamplerSynchronous));
|
// EXPECT_EQ(0, Resample(src_frame_, &resampler, &dst_frame_));
|
||||||
|
EXPECT_GT(ComputeSNR(data_reference_, data_out_, dst_sample_rate_hz,
|
||||||
|
channels, max_delay), 40.0f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_F(ResamplerTest, Synchronous) {
|
||||||
|
// Number of channels is 1, mono mode.
|
||||||
|
const int kChannels = 1;
|
||||||
|
// We don't attempt to be exhaustive here, but just get good coverage. Some
|
||||||
|
// combinations of rates will not be resampled, and some give an odd
|
||||||
|
// resampling factor which makes it more difficult to evaluate.
|
||||||
|
const int kSampleRates[] = {16000, 32000, 44100, 48000};
|
||||||
|
const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
|
||||||
|
for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
|
||||||
|
for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
|
||||||
|
RunResampleTest(kChannels, kSampleRates[src_rate], kSampleRates[dst_rate]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -118,24 +195,14 @@ TEST_F(ResamplerTest, Synchronous) {
|
|||||||
TEST_F(ResamplerTest, SynchronousStereo) {
|
TEST_F(ResamplerTest, SynchronousStereo) {
|
||||||
// Number of channels is 2, stereo mode.
|
// Number of channels is 2, stereo mode.
|
||||||
const int kChannels = 2;
|
const int kChannels = 2;
|
||||||
for (size_t i = 0; i < kRatesSize; ++i) {
|
// We don't attempt to be exhaustive here, but just get good coverage. Some
|
||||||
for (size_t j = 0; j < kRatesSize; ++j) {
|
// combinations of rates will not be resampled, and some give an odd
|
||||||
std::ostringstream ss;
|
// resampling factor which makes it more difficult to evaluate.
|
||||||
ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j];
|
const int kSampleRates[] = {16000, 32000, 44100, 48000};
|
||||||
SCOPED_TRACE(ss.str());
|
const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
|
||||||
|
for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
|
||||||
if (ValidRates(kRates[i], kRates[j])) {
|
for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
|
||||||
int in_length = kChannels * kRates[i] / 100;
|
RunResampleTest(kChannels, kSampleRates[src_rate], kSampleRates[dst_rate]);
|
||||||
int out_length = 0;
|
|
||||||
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j],
|
|
||||||
kResamplerSynchronousStereo));
|
|
||||||
EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize,
|
|
||||||
out_length));
|
|
||||||
EXPECT_EQ(kChannels * kRates[j] / 100, out_length);
|
|
||||||
} else {
|
|
||||||
EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j],
|
|
||||||
kResamplerSynchronousStereo));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -26,11 +26,11 @@ float SincResampler::Convolve_NEON(const float* input_ptr, const float* k1,
|
|||||||
|
|
||||||
const float* upper = input_ptr + kKernelSize;
|
const float* upper = input_ptr + kKernelSize;
|
||||||
for (; input_ptr < upper; ) {
|
for (; input_ptr < upper; ) {
|
||||||
m_input = vld1q_f32(input_ptr);
|
m_input = vld1q_f32((const float32_t *) input_ptr);
|
||||||
input_ptr += 4;
|
input_ptr += 4;
|
||||||
m_sums1 = vmlaq_f32(m_sums1, m_input, vld1q_f32(k1));
|
m_sums1 = vmlaq_f32(m_sums1, m_input, vld1q_f32((const float32_t *) k1));
|
||||||
k1 += 4;
|
k1 += 4;
|
||||||
m_sums2 = vmlaq_f32(m_sums2, m_input, vld1q_f32(k2));
|
m_sums2 = vmlaq_f32(m_sums2, m_input, vld1q_f32((const float32_t *) k2));
|
||||||
k2 += 4;
|
k2 += 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -341,7 +341,7 @@ enum NsModes // type of Noise Suppression
|
|||||||
kNsLowSuppression, // lowest suppression
|
kNsLowSuppression, // lowest suppression
|
||||||
kNsModerateSuppression,
|
kNsModerateSuppression,
|
||||||
kNsHighSuppression,
|
kNsHighSuppression,
|
||||||
kNsVeryHighSuppression, // highest suppression
|
kNsVeryHighSuppression // highest suppression
|
||||||
};
|
};
|
||||||
|
|
||||||
enum AgcModes // type of Automatic Gain Control
|
enum AgcModes // type of Automatic Gain Control
|
||||||
@ -366,7 +366,7 @@ enum EcModes // type of Echo Control
|
|||||||
kEcDefault, // platform default
|
kEcDefault, // platform default
|
||||||
kEcConference, // conferencing default (aggressive AEC)
|
kEcConference, // conferencing default (aggressive AEC)
|
||||||
kEcAec, // Acoustic Echo Cancellation
|
kEcAec, // Acoustic Echo Cancellation
|
||||||
kEcAecm, // AEC mobile
|
kEcAecm // AEC mobile
|
||||||
};
|
};
|
||||||
|
|
||||||
// AECM modes
|
// AECM modes
|
||||||
@ -418,7 +418,7 @@ enum NetEqModes // NetEQ playout configurations
|
|||||||
kNetEqFax = 2,
|
kNetEqFax = 2,
|
||||||
// Minimal buffer management. Inserts zeros for lost packets and during
|
// Minimal buffer management. Inserts zeros for lost packets and during
|
||||||
// buffer increases.
|
// buffer increases.
|
||||||
kNetEqOff = 3,
|
kNetEqOff = 3
|
||||||
};
|
};
|
||||||
|
|
||||||
enum OnHoldModes // On Hold direction
|
enum OnHoldModes // On Hold direction
|
||||||
@ -432,7 +432,7 @@ enum AmrMode
|
|||||||
{
|
{
|
||||||
kRfc3267BwEfficient = 0,
|
kRfc3267BwEfficient = 0,
|
||||||
kRfc3267OctetAligned = 1,
|
kRfc3267OctetAligned = 1,
|
||||||
kRfc3267FileStorage = 2,
|
kRfc3267FileStorage = 2
|
||||||
};
|
};
|
||||||
|
|
||||||
// ==================================================================
|
// ==================================================================
|
||||||
|
@ -35,7 +35,9 @@
|
|||||||
#define WEBRTC_CODEC_AVT
|
#define WEBRTC_CODEC_AVT
|
||||||
|
|
||||||
// PCM16 is useful for testing and incurs only a small binary size cost.
|
// PCM16 is useful for testing and incurs only a small binary size cost.
|
||||||
|
#ifndef WEBRTC_CODEC_PCM16
|
||||||
#define WEBRTC_CODEC_PCM16
|
#define WEBRTC_CODEC_PCM16
|
||||||
|
#endif
|
||||||
|
|
||||||
// iLBC, G.722, and Redundancy coding are excluded from Chromium and Mozilla
|
// iLBC, G.722, and Redundancy coding are excluded from Chromium and Mozilla
|
||||||
// builds to reduce binary size.
|
// builds to reduce binary size.
|
||||||
|
@ -15,10 +15,6 @@
|
|||||||
|
|
||||||
#include "typedefs.h"
|
#include "typedefs.h"
|
||||||
|
|
||||||
#ifdef WEBRTC_BIG_ENDIAN
|
|
||||||
#include "signal_processing_library.h"
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define HIGHEND 0xFF00
|
#define HIGHEND 0xFF00
|
||||||
#define LOWEND 0xFF
|
#define LOWEND 0xFF
|
||||||
|
|
||||||
@ -30,7 +26,7 @@ int16_t WebRtcPcm16b_EncodeW16(int16_t *speechIn16b,
|
|||||||
int16_t *speechOut16b)
|
int16_t *speechOut16b)
|
||||||
{
|
{
|
||||||
#ifdef WEBRTC_BIG_ENDIAN
|
#ifdef WEBRTC_BIG_ENDIAN
|
||||||
WEBRTC_SPL_MEMCPY_W16(speechOut16b, speechIn16b, len);
|
memcpy(speechOut16b, speechIn16b, len * sizeof(int16_t));
|
||||||
#else
|
#else
|
||||||
int i;
|
int i;
|
||||||
for (i=0;i<len;i++) {
|
for (i=0;i<len;i++) {
|
||||||
@ -69,7 +65,7 @@ int16_t WebRtcPcm16b_DecodeW16(void *inst,
|
|||||||
int16_t* speechType)
|
int16_t* speechType)
|
||||||
{
|
{
|
||||||
#ifdef WEBRTC_BIG_ENDIAN
|
#ifdef WEBRTC_BIG_ENDIAN
|
||||||
WEBRTC_SPL_MEMCPY_W8(speechOut16b, speechIn16b, ((len*sizeof(int16_t)+1)>>1));
|
memcpy(speechOut16b, speechIn16b, ((len*sizeof(int16_t)+1)>>1));
|
||||||
#else
|
#else
|
||||||
int i;
|
int i;
|
||||||
int samples=len>>1;
|
int samples=len>>1;
|
||||||
|
@ -44,12 +44,6 @@
|
|||||||
'acm_common_defs.h',
|
'acm_common_defs.h',
|
||||||
'acm_dtmf_playout.cc',
|
'acm_dtmf_playout.cc',
|
||||||
'acm_dtmf_playout.h',
|
'acm_dtmf_playout.h',
|
||||||
'acm_g722.cc',
|
|
||||||
'acm_g722.h',
|
|
||||||
'acm_g7221.cc',
|
|
||||||
'acm_g7221.h',
|
|
||||||
'acm_g7221c.cc',
|
|
||||||
'acm_g7221c.h',
|
|
||||||
'acm_g729.cc',
|
'acm_g729.cc',
|
||||||
'acm_g729.h',
|
'acm_g729.h',
|
||||||
'acm_g7291.cc',
|
'acm_g7291.cc',
|
||||||
@ -58,11 +52,6 @@
|
|||||||
'acm_generic_codec.h',
|
'acm_generic_codec.h',
|
||||||
'acm_gsmfr.cc',
|
'acm_gsmfr.cc',
|
||||||
'acm_gsmfr.h',
|
'acm_gsmfr.h',
|
||||||
'acm_ilbc.cc',
|
|
||||||
'acm_ilbc.h',
|
|
||||||
'acm_isac.cc',
|
|
||||||
'acm_isac.h',
|
|
||||||
'acm_isac_macros.h',
|
|
||||||
'acm_opus.cc',
|
'acm_opus.cc',
|
||||||
'acm_opus.h',
|
'acm_opus.h',
|
||||||
'acm_speex.cc',
|
'acm_speex.cc',
|
||||||
|
@ -10,12 +10,6 @@
|
|||||||
'variables': {
|
'variables': {
|
||||||
'audio_coding_dependencies': [
|
'audio_coding_dependencies': [
|
||||||
'CNG',
|
'CNG',
|
||||||
'G711',
|
|
||||||
'G722',
|
|
||||||
'iLBC',
|
|
||||||
'iSAC',
|
|
||||||
'iSACFix',
|
|
||||||
'PCM16B',
|
|
||||||
'NetEq',
|
'NetEq',
|
||||||
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
|
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
|
||||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||||
@ -25,6 +19,57 @@
|
|||||||
['include_opus==1', {
|
['include_opus==1', {
|
||||||
'audio_coding_dependencies': ['webrtc_opus',],
|
'audio_coding_dependencies': ['webrtc_opus',],
|
||||||
'audio_coding_defines': ['WEBRTC_CODEC_OPUS',],
|
'audio_coding_defines': ['WEBRTC_CODEC_OPUS',],
|
||||||
|
'audio_coding_sources': [
|
||||||
|
'acm_opus.cc',
|
||||||
|
'acm_opus.h',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
['include_g711==1', {
|
||||||
|
'audio_coding_dependencies': ['G711',],
|
||||||
|
'audio_coding_defines': ['WEBRTC_CODEC_G711',],
|
||||||
|
'audio_coding_sources': [
|
||||||
|
'acm_pcma.cc',
|
||||||
|
'acm_pcma.h',
|
||||||
|
'acm_pcmu.cc',
|
||||||
|
'acm_pcmu.h',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
['include_g722==1', {
|
||||||
|
'audio_coding_dependencies': ['G722',],
|
||||||
|
'audio_coding_defines': ['WEBRTC_CODEC_G722',],
|
||||||
|
'audio_coding_sources': [
|
||||||
|
'acm_g722.cc',
|
||||||
|
'acm_g722.h',
|
||||||
|
'acm_g7221.cc',
|
||||||
|
'acm_g7221.h',
|
||||||
|
'acm_g7221c.cc',
|
||||||
|
'acm_g7221c.h',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
['include_ilbc==1', {
|
||||||
|
'audio_coding_dependencies': ['iLBC',],
|
||||||
|
'audio_coding_defines': ['WEBRTC_CODEC_ILBC',],
|
||||||
|
'audio_coding_sources': [
|
||||||
|
'acm_ilbc.cc',
|
||||||
|
'acm_ilbc.h',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
['include_isac==1', {
|
||||||
|
'audio_coding_dependencies': ['iSAC', 'iSACFix',],
|
||||||
|
'audio_coding_defines': ['WEBRTC_CODEC_ISAC', 'WEBRTC_CODEC_ISACFX',],
|
||||||
|
'audio_coding_sources': [
|
||||||
|
'acm_isac.cc',
|
||||||
|
'acm_isac.h',
|
||||||
|
'acm_isac_macros.h',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
['include_pcm16b==1', {
|
||||||
|
'audio_coding_dependencies': ['PCM16B',],
|
||||||
|
'audio_coding_defines': ['WEBRTC_CODEC_PCM16',],
|
||||||
|
'audio_coding_sources': [
|
||||||
|
'acm_pcm16b.cc',
|
||||||
|
'acm_pcm16b.h',
|
||||||
|
],
|
||||||
}],
|
}],
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@ -50,14 +95,9 @@
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
'sources': [
|
'sources': [
|
||||||
|
# '<@(audio_coding_sources)',
|
||||||
'../interface/audio_coding_module.h',
|
'../interface/audio_coding_module.h',
|
||||||
'../interface/audio_coding_module_typedefs.h',
|
'../interface/audio_coding_module_typedefs.h',
|
||||||
'acm_amr.cc',
|
|
||||||
'acm_amr.h',
|
|
||||||
'acm_amrwb.cc',
|
|
||||||
'acm_amrwb.h',
|
|
||||||
'acm_celt.cc',
|
|
||||||
'acm_celt.h',
|
|
||||||
'acm_cng.cc',
|
'acm_cng.cc',
|
||||||
'acm_cng.h',
|
'acm_cng.h',
|
||||||
'acm_codec_database.cc',
|
'acm_codec_database.cc',
|
||||||
@ -66,31 +106,13 @@
|
|||||||
'acm_dtmf_detection.h',
|
'acm_dtmf_detection.h',
|
||||||
'acm_dtmf_playout.cc',
|
'acm_dtmf_playout.cc',
|
||||||
'acm_dtmf_playout.h',
|
'acm_dtmf_playout.h',
|
||||||
'acm_g722.cc',
|
|
||||||
'acm_g722.h',
|
|
||||||
'acm_g7221.cc',
|
|
||||||
'acm_g7221.h',
|
|
||||||
'acm_g7221c.cc',
|
|
||||||
'acm_g7221c.h',
|
|
||||||
'acm_g729.cc',
|
|
||||||
'acm_g729.h',
|
|
||||||
'acm_g7291.cc',
|
|
||||||
'acm_g7291.h',
|
|
||||||
'acm_generic_codec.cc',
|
'acm_generic_codec.cc',
|
||||||
'acm_generic_codec.h',
|
'acm_generic_codec.h',
|
||||||
'acm_gsmfr.cc',
|
|
||||||
'acm_gsmfr.h',
|
|
||||||
'acm_ilbc.cc',
|
|
||||||
'acm_ilbc.h',
|
|
||||||
'acm_isac.cc',
|
|
||||||
'acm_isac.h',
|
|
||||||
'acm_isac_macros.h',
|
|
||||||
'acm_neteq.cc',
|
'acm_neteq.cc',
|
||||||
'acm_neteq.h',
|
'acm_neteq.h',
|
||||||
|
# cheat until I get audio_coding_sources to work
|
||||||
'acm_opus.cc',
|
'acm_opus.cc',
|
||||||
'acm_opus.h',
|
'acm_opus.h',
|
||||||
'acm_speex.cc',
|
|
||||||
'acm_speex.h',
|
|
||||||
'acm_pcm16b.cc',
|
'acm_pcm16b.cc',
|
||||||
'acm_pcm16b.h',
|
'acm_pcm16b.h',
|
||||||
'acm_pcma.cc',
|
'acm_pcma.cc',
|
||||||
|
@ -69,6 +69,8 @@
|
|||||||
* decoded signal is at 32 kHz.
|
* decoded signal is at 32 kHz.
|
||||||
* NETEQ_ISAC_FB_CODEC Enable iSAC-FB
|
* NETEQ_ISAC_FB_CODEC Enable iSAC-FB
|
||||||
*
|
*
|
||||||
|
* NETEQ_OPUS_CODEC Enable Opus
|
||||||
|
*
|
||||||
* NETEQ_G722_CODEC Enable G.722
|
* NETEQ_G722_CODEC Enable G.722
|
||||||
*
|
*
|
||||||
* NETEQ_G729_CODEC Enable G.729
|
* NETEQ_G729_CODEC Enable G.729
|
||||||
@ -321,6 +323,7 @@
|
|||||||
#define NETEQ_PCM16B_CODEC
|
#define NETEQ_PCM16B_CODEC
|
||||||
#define NETEQ_G711_CODEC
|
#define NETEQ_G711_CODEC
|
||||||
#define NETEQ_ILBC_CODEC
|
#define NETEQ_ILBC_CODEC
|
||||||
|
#define NETEQ_OPUS_CODEC
|
||||||
#define NETEQ_G729_CODEC
|
#define NETEQ_G729_CODEC
|
||||||
#define NETEQ_G726_CODEC
|
#define NETEQ_G726_CODEC
|
||||||
#define NETEQ_GSMFR_CODEC
|
#define NETEQ_GSMFR_CODEC
|
||||||
@ -329,6 +332,7 @@
|
|||||||
/* Wideband codecs */
|
/* Wideband codecs */
|
||||||
#define NETEQ_WIDEBAND
|
#define NETEQ_WIDEBAND
|
||||||
#define NETEQ_ISAC_CODEC
|
#define NETEQ_ISAC_CODEC
|
||||||
|
/*#define NETEQ_OPUS_CODEC define only once */
|
||||||
#define NETEQ_G722_CODEC
|
#define NETEQ_G722_CODEC
|
||||||
#define NETEQ_G722_1_CODEC
|
#define NETEQ_G722_1_CODEC
|
||||||
#define NETEQ_G729_1_CODEC
|
#define NETEQ_G729_1_CODEC
|
||||||
@ -337,13 +341,18 @@
|
|||||||
|
|
||||||
/* Super wideband 32kHz codecs */
|
/* Super wideband 32kHz codecs */
|
||||||
#define NETEQ_ISAC_SWB_CODEC
|
#define NETEQ_ISAC_SWB_CODEC
|
||||||
|
/*#define NETEQ_OPUS_CODEC*/
|
||||||
#define NETEQ_32KHZ_WIDEBAND
|
#define NETEQ_32KHZ_WIDEBAND
|
||||||
#define NETEQ_G722_1C_CODEC
|
#define NETEQ_G722_1C_CODEC
|
||||||
#define NETEQ_CELT_CODEC
|
#define NETEQ_CELT_CODEC
|
||||||
|
/*#define NETEQ_OPUS_CODEC*/
|
||||||
|
|
||||||
|
/* hack in 48 kHz support */
|
||||||
|
#define NETEQ_48KHZ_WIDEBAND
|
||||||
|
|
||||||
/* Super wideband 48kHz codecs */
|
/* Super wideband 48kHz codecs */
|
||||||
#define NETEQ_48KHZ_WIDEBAND
|
#define NETEQ_48KHZ_WIDEBAND
|
||||||
#define NETEQ_OPUS_CODEC
|
/*#define NETEQ_OPUS_CODEC*/
|
||||||
#define NETEQ_ISAC_FB
|
#define NETEQ_ISAC_FB
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -678,6 +678,11 @@ int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID,
|
|||||||
codecBytes = 15300; /* 240ms @ 510kbps (60ms frames) */
|
codecBytes = 15300; /* 240ms @ 510kbps (60ms frames) */
|
||||||
codecBuffers = 30; /* Replicating the value for PCMu/a */
|
codecBuffers = 30; /* Replicating the value for PCMu/a */
|
||||||
}
|
}
|
||||||
|
else if (codecID[i] == kDecoderOpus)
|
||||||
|
{
|
||||||
|
codecBytes = 15300; /* 240ms @ 510kbps (60ms frames) */
|
||||||
|
codecBuffers = 30; /* ?? Codec supports down to 2.5-60 ms frames */
|
||||||
|
}
|
||||||
else if ((codecID[i] == kDecoderPCM16B) ||
|
else if ((codecID[i] == kDecoderPCM16B) ||
|
||||||
(codecID[i] == kDecoderPCM16B_2ch))
|
(codecID[i] == kDecoderPCM16B_2ch))
|
||||||
{
|
{
|
||||||
|
@ -26,12 +26,13 @@
|
|||||||
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
|
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
|
||||||
#include "webrtc/system_wrappers/interface/trace.h"
|
#include "webrtc/system_wrappers/interface/trace.h"
|
||||||
|
|
||||||
|
#include "AndroidJNIWrapper.h"
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
// TODO(leozwang): Refactor jni and the following global variables, a
|
// TODO(leozwang): Refactor jni and the following global variables, a
|
||||||
// good example is jni_helper in Chromium.
|
// good example is jni_helper in Chromium.
|
||||||
JavaVM* AudioDeviceAndroidJni::globalJvm = NULL;
|
JavaVM* AudioDeviceAndroidJni::globalJvm = NULL;
|
||||||
JNIEnv* AudioDeviceAndroidJni::globalJNIEnv = NULL;
|
|
||||||
jobject AudioDeviceAndroidJni::globalContext = NULL;
|
jobject AudioDeviceAndroidJni::globalContext = NULL;
|
||||||
jclass AudioDeviceAndroidJni::globalScClass = NULL;
|
jclass AudioDeviceAndroidJni::globalScClass = NULL;
|
||||||
|
|
||||||
@ -45,62 +46,62 @@ jclass AudioDeviceAndroidJni::globalScClass = NULL;
|
|||||||
|
|
||||||
int32_t AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects(
|
int32_t AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects(
|
||||||
void* javaVM,
|
void* javaVM,
|
||||||
void* env,
|
|
||||||
void* context) {
|
void* context) {
|
||||||
__android_log_print(ANDROID_LOG_DEBUG, "WEBRTC", "JNI:%s", __FUNCTION__);
|
return SetAndroidAudioDeviceObjects(javaVM, NULL, context);
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects(
|
||||||
|
void* javaVM,
|
||||||
|
void* null_env,
|
||||||
|
void* context) {
|
||||||
|
WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, -1,
|
||||||
|
"%s called", __FUNCTION__);
|
||||||
|
|
||||||
// TODO(leozwang): Make this function thread-safe.
|
// TODO(leozwang): Make this function thread-safe.
|
||||||
globalJvm = reinterpret_cast<JavaVM*>(javaVM);
|
globalJvm = reinterpret_cast<JavaVM*>(javaVM);
|
||||||
|
|
||||||
if (env) {
|
JNIEnv* env = NULL;
|
||||||
globalJNIEnv = reinterpret_cast<JNIEnv*>(env);
|
|
||||||
|
// Check if we already got a reference
|
||||||
|
if (globalJvm && !globalScClass) {
|
||||||
|
if (globalJvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
|
||||||
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, -1,
|
||||||
|
"%s: could not get Java environment", __FUNCTION__);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
globalJvm->AttachCurrentThread(&env, NULL);
|
||||||
|
|
||||||
// Get java class type (note path to class packet).
|
// Get java class type (note path to class packet).
|
||||||
jclass javaScClassLocal = globalJNIEnv->FindClass(
|
globalScClass = jsjni_GetGlobalClassRef(AudioCaptureClass);
|
||||||
"org/webrtc/voiceengine/WebRTCAudioDevice");
|
if (!globalScClass) {
|
||||||
if (!javaScClassLocal) {
|
|
||||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
||||||
"%s: could not find java class", __FUNCTION__);
|
"%s: could not find java class", __FUNCTION__);
|
||||||
return -1; // exception thrown
|
return -1; // exception thrown
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a global reference to the class (to tell JNI that we are
|
globalContext = env->NewGlobalRef(
|
||||||
// referencing it after this function has returned).
|
|
||||||
globalScClass = reinterpret_cast<jclass> (
|
|
||||||
globalJNIEnv->NewGlobalRef(javaScClassLocal));
|
|
||||||
if (!globalScClass) {
|
|
||||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
|
||||||
"%s: could not create reference", __FUNCTION__);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
globalContext = globalJNIEnv->NewGlobalRef(
|
|
||||||
reinterpret_cast<jobject>(context));
|
reinterpret_cast<jobject>(context));
|
||||||
if (!globalContext) {
|
if (!globalContext) {
|
||||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
||||||
"%s: could not create context reference", __FUNCTION__);
|
"%s: could not create context reference", __FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete local class ref, we only use the global ref
|
|
||||||
globalJNIEnv->DeleteLocalRef(javaScClassLocal);
|
|
||||||
}
|
}
|
||||||
else { // User is resetting the env variable
|
else { // User is resetting the env variable
|
||||||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
|
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
|
||||||
"%s: env is NULL, assuming deinit", __FUNCTION__);
|
"%s: env is NULL, assuming deinit", __FUNCTION__);
|
||||||
|
|
||||||
if (!globalJNIEnv) {
|
if (!env) {
|
||||||
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
|
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
|
||||||
"%s: saved env already NULL", __FUNCTION__);
|
"%s: saved env already NULL", __FUNCTION__);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
globalJNIEnv->DeleteGlobalRef(globalScClass);
|
env->DeleteGlobalRef(globalScClass);
|
||||||
globalScClass = reinterpret_cast<jclass>(NULL);
|
globalScClass = reinterpret_cast<jclass>(NULL);
|
||||||
|
|
||||||
globalJNIEnv->DeleteGlobalRef(globalContext);
|
env->DeleteGlobalRef(globalContext);
|
||||||
globalContext = reinterpret_cast<jobject>(NULL);
|
globalContext = reinterpret_cast<jobject>(NULL);
|
||||||
|
|
||||||
globalJNIEnv = reinterpret_cast<JNIEnv*>(NULL);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
@ -140,8 +141,8 @@ AudioDeviceAndroidJni::AudioDeviceAndroidJni(const int32_t id) :
|
|||||||
_playError(0), _recWarning(0), _recError(0), _delayPlayout(0),
|
_playError(0), _recWarning(0), _recError(0), _delayPlayout(0),
|
||||||
_delayRecording(0),
|
_delayRecording(0),
|
||||||
_AGC(false),
|
_AGC(false),
|
||||||
_samplingFreqIn((N_REC_SAMPLES_PER_SEC/1000)),
|
_samplingFreqIn((N_REC_SAMPLES_PER_SEC)),
|
||||||
_samplingFreqOut((N_PLAY_SAMPLES_PER_SEC/1000)),
|
_samplingFreqOut((N_PLAY_SAMPLES_PER_SEC)),
|
||||||
_maxSpeakerVolume(0),
|
_maxSpeakerVolume(0),
|
||||||
_loudSpeakerOn(false),
|
_loudSpeakerOn(false),
|
||||||
_recAudioSource(1), // 1 is AudioSource.MIC which is our default
|
_recAudioSource(1), // 1 is AudioSource.MIC which is our default
|
||||||
@ -1385,17 +1386,10 @@ int32_t AudioDeviceAndroidJni::InitPlayout()
|
|||||||
// get the method ID
|
// get the method ID
|
||||||
jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback",
|
jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback",
|
||||||
"(I)I");
|
"(I)I");
|
||||||
|
|
||||||
int samplingFreq = 44100;
|
|
||||||
if (_samplingFreqOut != 44)
|
|
||||||
{
|
|
||||||
samplingFreq = _samplingFreqOut * 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
int retVal = -1;
|
int retVal = -1;
|
||||||
|
|
||||||
// Call java sc object method
|
// Call java sc object method
|
||||||
jint res = env->CallIntMethod(_javaScObj, initPlaybackID, samplingFreq);
|
jint res = env->CallIntMethod(_javaScObj, initPlaybackID, _samplingFreqOut);
|
||||||
if (res < 0)
|
if (res < 0)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||||
@ -1404,7 +1398,7 @@ int32_t AudioDeviceAndroidJni::InitPlayout()
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
// Set the audio device buffer sampling rate
|
// Set the audio device buffer sampling rate
|
||||||
_ptrAudioBuffer->SetPlayoutSampleRate(_samplingFreqOut * 1000);
|
_ptrAudioBuffer->SetPlayoutSampleRate(_samplingFreqOut);
|
||||||
_playIsInitialized = true;
|
_playIsInitialized = true;
|
||||||
retVal = 0;
|
retVal = 0;
|
||||||
}
|
}
|
||||||
@ -1490,18 +1484,11 @@ int32_t AudioDeviceAndroidJni::InitRecording()
|
|||||||
// get the method ID
|
// get the method ID
|
||||||
jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording",
|
jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording",
|
||||||
"(II)I");
|
"(II)I");
|
||||||
|
|
||||||
int samplingFreq = 44100;
|
|
||||||
if (_samplingFreqIn != 44)
|
|
||||||
{
|
|
||||||
samplingFreq = _samplingFreqIn * 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
int retVal = -1;
|
int retVal = -1;
|
||||||
|
|
||||||
// call java sc object method
|
// call java sc object method
|
||||||
jint res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource,
|
jint res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource,
|
||||||
samplingFreq);
|
_samplingFreqIn);
|
||||||
if (res < 0)
|
if (res < 0)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||||
@ -1510,10 +1497,10 @@ int32_t AudioDeviceAndroidJni::InitRecording()
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
// Set the audio device buffer sampling rate
|
// Set the audio device buffer sampling rate
|
||||||
_ptrAudioBuffer->SetRecordingSampleRate(_samplingFreqIn * 1000);
|
_ptrAudioBuffer->SetRecordingSampleRate(_samplingFreqIn);
|
||||||
|
|
||||||
// the init rec function returns a fixed delay
|
// the init rec function returns a fixed delay
|
||||||
_delayRecording = res / _samplingFreqIn;
|
_delayRecording = (res * 1000) / _samplingFreqIn;
|
||||||
|
|
||||||
_recIsInitialized = true;
|
_recIsInitialized = true;
|
||||||
retVal = 0;
|
retVal = 0;
|
||||||
@ -2031,14 +2018,7 @@ int32_t AudioDeviceAndroidJni::SetRecordingSampleRate(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// set the recording sample rate to use
|
// set the recording sample rate to use
|
||||||
if (samplesPerSec == 44100)
|
_samplingFreqIn = samplesPerSec;
|
||||||
{
|
|
||||||
_samplingFreqIn = 44;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
_samplingFreqIn = samplesPerSec / 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the AudioDeviceBuffer
|
// Update the AudioDeviceBuffer
|
||||||
_ptrAudioBuffer->SetRecordingSampleRate(samplesPerSec);
|
_ptrAudioBuffer->SetRecordingSampleRate(samplesPerSec);
|
||||||
@ -2062,14 +2042,7 @@ int32_t AudioDeviceAndroidJni::SetPlayoutSampleRate(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// set the playout sample rate to use
|
// set the playout sample rate to use
|
||||||
if (samplesPerSec == 44100)
|
_samplingFreqOut = samplesPerSec;
|
||||||
{
|
|
||||||
_samplingFreqOut = 44;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
_samplingFreqOut = samplesPerSec / 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the AudioDeviceBuffer
|
// Update the AudioDeviceBuffer
|
||||||
_ptrAudioBuffer->SetPlayoutSampleRate(samplesPerSec);
|
_ptrAudioBuffer->SetPlayoutSampleRate(samplesPerSec);
|
||||||
@ -2211,7 +2184,7 @@ int32_t AudioDeviceAndroidJni::InitJavaResources()
|
|||||||
}
|
}
|
||||||
|
|
||||||
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
|
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
|
||||||
"construct object", __FUNCTION__);
|
"%s: construct object", __FUNCTION__);
|
||||||
|
|
||||||
// construct the object
|
// construct the object
|
||||||
jobject javaScObjLocal = env->NewObject(_javaScClass, cid);
|
jobject javaScObjLocal = env->NewObject(_javaScClass, cid);
|
||||||
@ -2423,11 +2396,7 @@ int32_t AudioDeviceAndroidJni::InitSampleRate()
|
|||||||
if (_samplingFreqIn > 0)
|
if (_samplingFreqIn > 0)
|
||||||
{
|
{
|
||||||
// read the configured sampling rate
|
// read the configured sampling rate
|
||||||
samplingFreq = 44100;
|
samplingFreq = _samplingFreqIn;
|
||||||
if (_samplingFreqIn != 44)
|
|
||||||
{
|
|
||||||
samplingFreq = _samplingFreqIn * 1000;
|
|
||||||
}
|
|
||||||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
|
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
|
||||||
" Trying configured recording sampling rate %d",
|
" Trying configured recording sampling rate %d",
|
||||||
samplingFreq);
|
samplingFreq);
|
||||||
@ -2468,14 +2437,7 @@ int32_t AudioDeviceAndroidJni::InitSampleRate()
|
|||||||
}
|
}
|
||||||
|
|
||||||
// set the recording sample rate to use
|
// set the recording sample rate to use
|
||||||
if (samplingFreq == 44100)
|
_samplingFreqIn = samplingFreq;
|
||||||
{
|
|
||||||
_samplingFreqIn = 44;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
_samplingFreqIn = samplingFreq / 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
|
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
|
||||||
"Recording sample rate set to (%d)", _samplingFreqIn);
|
"Recording sample rate set to (%d)", _samplingFreqIn);
|
||||||
@ -2499,11 +2461,7 @@ int32_t AudioDeviceAndroidJni::InitSampleRate()
|
|||||||
if (_samplingFreqOut > 0)
|
if (_samplingFreqOut > 0)
|
||||||
{
|
{
|
||||||
// read the configured sampling rate
|
// read the configured sampling rate
|
||||||
samplingFreq = 44100;
|
samplingFreq = _samplingFreqOut;
|
||||||
if (_samplingFreqOut != 44)
|
|
||||||
{
|
|
||||||
samplingFreq = _samplingFreqOut * 1000;
|
|
||||||
}
|
|
||||||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
|
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
|
||||||
" Trying configured playback sampling rate %d",
|
" Trying configured playback sampling rate %d",
|
||||||
samplingFreq);
|
samplingFreq);
|
||||||
@ -2557,15 +2515,7 @@ int32_t AudioDeviceAndroidJni::InitSampleRate()
|
|||||||
}
|
}
|
||||||
|
|
||||||
// set the playback sample rate to use
|
// set the playback sample rate to use
|
||||||
if (samplingFreq == 44100)
|
_samplingFreqOut = samplingFreq;
|
||||||
{
|
|
||||||
_samplingFreqOut = 44;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
_samplingFreqOut = samplingFreq / 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
|
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
|
||||||
"Playback sample rate set to (%d)", _samplingFreqOut);
|
"Playback sample rate set to (%d)", _samplingFreqOut);
|
||||||
|
|
||||||
@ -2678,7 +2628,7 @@ bool AudioDeviceAndroidJni::PlayThreadProcess()
|
|||||||
if (_playing)
|
if (_playing)
|
||||||
{
|
{
|
||||||
int8_t playBuffer[2 * 480]; // Max 10 ms @ 48 kHz / 16 bit
|
int8_t playBuffer[2 * 480]; // Max 10 ms @ 48 kHz / 16 bit
|
||||||
uint32_t samplesToPlay = _samplingFreqOut * 10;
|
uint32_t samplesToPlay = _samplingFreqOut / 100;
|
||||||
|
|
||||||
// ask for new PCM data to be played out using the AudioDeviceBuffer
|
// ask for new PCM data to be played out using the AudioDeviceBuffer
|
||||||
// ensure that this callback is executed without taking the
|
// ensure that this callback is executed without taking the
|
||||||
@ -2723,7 +2673,7 @@ bool AudioDeviceAndroidJni::PlayThreadProcess()
|
|||||||
else if (res > 0)
|
else if (res > 0)
|
||||||
{
|
{
|
||||||
// we are not recording and have got a delay value from playback
|
// we are not recording and have got a delay value from playback
|
||||||
_delayPlayout = res / _samplingFreqOut;
|
_delayPlayout = (res * 1000) / _samplingFreqOut;
|
||||||
}
|
}
|
||||||
// If 0 is returned we are recording and then play delay is updated
|
// If 0 is returned we are recording and then play delay is updated
|
||||||
// in RecordProcess
|
// in RecordProcess
|
||||||
@ -2821,7 +2771,7 @@ bool AudioDeviceAndroidJni::RecThreadProcess()
|
|||||||
|
|
||||||
if (_recording)
|
if (_recording)
|
||||||
{
|
{
|
||||||
uint32_t samplesToRec = _samplingFreqIn * 10;
|
uint32_t samplesToRec = _samplingFreqIn / 100;
|
||||||
|
|
||||||
// Call java sc object method to record data to direct buffer
|
// Call java sc object method to record data to direct buffer
|
||||||
// Will block until data has been recorded (see java sc class),
|
// Will block until data has been recorded (see java sc class),
|
||||||
@ -2838,7 +2788,7 @@ bool AudioDeviceAndroidJni::RecThreadProcess()
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
_delayPlayout = playDelayInSamples / _samplingFreqOut;
|
_delayPlayout = (playDelayInSamples * 1000) / _samplingFreqOut;
|
||||||
}
|
}
|
||||||
Lock();
|
Lock();
|
||||||
|
|
||||||
|
@ -20,19 +20,12 @@
|
|||||||
|
|
||||||
#include <jni.h> // For accessing AudioDeviceAndroid java class
|
#include <jni.h> // For accessing AudioDeviceAndroid java class
|
||||||
|
|
||||||
|
#define AudioCaptureClass "org/webrtc/voiceengine/WebRTCAudioDevice"
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
class EventWrapper;
|
class EventWrapper;
|
||||||
|
|
||||||
const uint32_t N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
|
|
||||||
const uint32_t N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
|
|
||||||
|
|
||||||
const uint32_t N_REC_CHANNELS = 1; // default is mono recording
|
|
||||||
const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout
|
|
||||||
|
|
||||||
const uint32_t REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
|
|
||||||
|
|
||||||
|
|
||||||
class ThreadWrapper;
|
class ThreadWrapper;
|
||||||
|
|
||||||
class AudioDeviceAndroidJni : public AudioDeviceGeneric {
|
class AudioDeviceAndroidJni : public AudioDeviceGeneric {
|
||||||
@ -40,6 +33,9 @@ class AudioDeviceAndroidJni : public AudioDeviceGeneric {
|
|||||||
AudioDeviceAndroidJni(const int32_t id);
|
AudioDeviceAndroidJni(const int32_t id);
|
||||||
~AudioDeviceAndroidJni();
|
~AudioDeviceAndroidJni();
|
||||||
|
|
||||||
|
static int32_t SetAndroidAudioDeviceObjects(void* javaVM,
|
||||||
|
void* context);
|
||||||
|
|
||||||
static int32_t SetAndroidAudioDeviceObjects(void* javaVM,
|
static int32_t SetAndroidAudioDeviceObjects(void* javaVM,
|
||||||
void* env,
|
void* env,
|
||||||
void* context);
|
void* context);
|
||||||
@ -158,6 +154,14 @@ class AudioDeviceAndroidJni : public AudioDeviceGeneric {
|
|||||||
virtual int32_t SetLoudspeakerStatus(bool enable);
|
virtual int32_t SetLoudspeakerStatus(bool enable);
|
||||||
virtual int32_t GetLoudspeakerStatus(bool& enable) const;
|
virtual int32_t GetLoudspeakerStatus(bool& enable) const;
|
||||||
|
|
||||||
|
static const uint32_t N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
|
||||||
|
static const uint32_t N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
|
||||||
|
|
||||||
|
static const uint32_t N_REC_CHANNELS = 1; // default is mono recording
|
||||||
|
static const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout
|
||||||
|
|
||||||
|
static const uint32_t REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// Lock
|
// Lock
|
||||||
void Lock() {
|
void Lock() {
|
||||||
|
@ -36,6 +36,7 @@ class AudioManagerJni {
|
|||||||
// It has to be called for this class' APIs to be successful. Calling
|
// It has to be called for this class' APIs to be successful. Calling
|
||||||
// ClearAndroidAudioDeviceObjects will prevent this class' APIs to be called
|
// ClearAndroidAudioDeviceObjects will prevent this class' APIs to be called
|
||||||
// successfully if SetAndroidAudioDeviceObjects is not called after it.
|
// successfully if SetAndroidAudioDeviceObjects is not called after it.
|
||||||
|
static void SetAndroidAudioDeviceObjects(void* jvm, void* context);
|
||||||
static void SetAndroidAudioDeviceObjects(void* jvm, void* env,
|
static void SetAndroidAudioDeviceObjects(void* jvm, void* env,
|
||||||
void* context);
|
void* context);
|
||||||
// This function must be called when the AudioManagerJni class is no
|
// This function must be called when the AudioManagerJni class is no
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
#include "webrtc/modules/audio_device/android/opensles_input.h"
|
#include "webrtc/modules/audio_device/android/opensles_input.h"
|
||||||
|
|
||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
|
#include <dlfcn.h>
|
||||||
|
|
||||||
#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
|
#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
|
||||||
#include "webrtc/modules/audio_device/audio_device_buffer.h"
|
#include "webrtc/modules/audio_device/audio_device_buffer.h"
|
||||||
@ -65,7 +66,8 @@ OpenSlesInput::OpenSlesInput(
|
|||||||
active_queue_(0),
|
active_queue_(0),
|
||||||
rec_sampling_rate_(0),
|
rec_sampling_rate_(0),
|
||||||
agc_enabled_(false),
|
agc_enabled_(false),
|
||||||
recording_delay_(0) {
|
recording_delay_(0),
|
||||||
|
opensles_lib_(NULL) {
|
||||||
}
|
}
|
||||||
|
|
||||||
OpenSlesInput::~OpenSlesInput() {
|
OpenSlesInput::~OpenSlesInput() {
|
||||||
@ -74,15 +76,41 @@ OpenSlesInput::~OpenSlesInput() {
|
|||||||
int32_t OpenSlesInput::Init() {
|
int32_t OpenSlesInput::Init() {
|
||||||
assert(!initialized_);
|
assert(!initialized_);
|
||||||
|
|
||||||
|
/* Try to dynamically open the OpenSLES library */
|
||||||
|
opensles_lib_ = dlopen("libOpenSLES.so", RTLD_LAZY);
|
||||||
|
if (!opensles_lib_) {
|
||||||
|
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_,
|
||||||
|
" failed to dlopen OpenSLES library");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
f_slCreateEngine = (slCreateEngine_t)dlsym(opensles_lib_, "slCreateEngine");
|
||||||
|
SL_IID_ENGINE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ENGINE");
|
||||||
|
SL_IID_BUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_BUFFERQUEUE");
|
||||||
|
SL_IID_ANDROIDCONFIGURATION_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDCONFIGURATION");
|
||||||
|
SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDSIMPLEBUFFERQUEUE");
|
||||||
|
SL_IID_RECORD_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_RECORD");
|
||||||
|
|
||||||
|
if (!f_slCreateEngine ||
|
||||||
|
!SL_IID_ENGINE_ ||
|
||||||
|
!SL_IID_BUFFERQUEUE_ ||
|
||||||
|
!SL_IID_ANDROIDCONFIGURATION_ ||
|
||||||
|
!SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ ||
|
||||||
|
!SL_IID_RECORD_) {
|
||||||
|
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_,
|
||||||
|
" failed to find OpenSLES function");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
// Set up OpenSL engine.
|
// Set up OpenSL engine.
|
||||||
OPENSL_RETURN_ON_FAILURE(slCreateEngine(&sles_engine_, 1, kOption, 0,
|
OPENSL_RETURN_ON_FAILURE(f_slCreateEngine(&sles_engine_, 1, kOption, 0,
|
||||||
NULL, NULL),
|
NULL, NULL),
|
||||||
-1);
|
-1);
|
||||||
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_,
|
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_,
|
||||||
SL_BOOLEAN_FALSE),
|
SL_BOOLEAN_FALSE),
|
||||||
-1);
|
-1);
|
||||||
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_,
|
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_,
|
||||||
SL_IID_ENGINE,
|
SL_IID_ENGINE_,
|
||||||
&sles_engine_itf_),
|
&sles_engine_itf_),
|
||||||
-1);
|
-1);
|
||||||
|
|
||||||
@ -101,6 +129,7 @@ int32_t OpenSlesInput::Terminate() {
|
|||||||
initialized_ = false;
|
initialized_ = false;
|
||||||
mic_initialized_ = false;
|
mic_initialized_ = false;
|
||||||
rec_initialized_ = false;
|
rec_initialized_ = false;
|
||||||
|
dlclose(opensles_lib_);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -270,8 +299,12 @@ void OpenSlesInput::UpdateRecordingDelay() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void OpenSlesInput::UpdateSampleRate() {
|
void OpenSlesInput::UpdateSampleRate() {
|
||||||
|
#if !defined(WEBRTC_GONK)
|
||||||
rec_sampling_rate_ = audio_manager_.low_latency_supported() ?
|
rec_sampling_rate_ = audio_manager_.low_latency_supported() ?
|
||||||
audio_manager_.native_output_sample_rate() : kDefaultSampleRate;
|
audio_manager_.native_output_sample_rate() : kDefaultSampleRate;
|
||||||
|
#else
|
||||||
|
rec_sampling_rate_ = kDefaultSampleRate;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
void OpenSlesInput::CalculateNumFifoBuffersNeeded() {
|
void OpenSlesInput::CalculateNumFifoBuffersNeeded() {
|
||||||
@ -345,7 +378,7 @@ bool OpenSlesInput::CreateAudioRecorder() {
|
|||||||
// Note the interfaces still need to be initialized. This only tells OpenSl
|
// Note the interfaces still need to be initialized. This only tells OpenSl
|
||||||
// that the interfaces will be needed at some point.
|
// that the interfaces will be needed at some point.
|
||||||
const SLInterfaceID id[kNumInterfaces] = {
|
const SLInterfaceID id[kNumInterfaces] = {
|
||||||
SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION };
|
SL_IID_ANDROIDSIMPLEBUFFERQUEUE_, SL_IID_ANDROIDCONFIGURATION_ };
|
||||||
const SLboolean req[kNumInterfaces] = {
|
const SLboolean req[kNumInterfaces] = {
|
||||||
SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
|
SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
|
||||||
OPENSL_RETURN_ON_FAILURE(
|
OPENSL_RETURN_ON_FAILURE(
|
||||||
@ -363,13 +396,13 @@ bool OpenSlesInput::CreateAudioRecorder() {
|
|||||||
SL_BOOLEAN_FALSE),
|
SL_BOOLEAN_FALSE),
|
||||||
false);
|
false);
|
||||||
OPENSL_RETURN_ON_FAILURE(
|
OPENSL_RETURN_ON_FAILURE(
|
||||||
(*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD,
|
(*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD_,
|
||||||
static_cast<void*>(&sles_recorder_itf_)),
|
static_cast<void*>(&sles_recorder_itf_)),
|
||||||
false);
|
false);
|
||||||
OPENSL_RETURN_ON_FAILURE(
|
OPENSL_RETURN_ON_FAILURE(
|
||||||
(*sles_recorder_)->GetInterface(
|
(*sles_recorder_)->GetInterface(
|
||||||
sles_recorder_,
|
sles_recorder_,
|
||||||
SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
|
SL_IID_ANDROIDSIMPLEBUFFERQUEUE_,
|
||||||
static_cast<void*>(&sles_recorder_sbq_itf_)),
|
static_cast<void*>(&sles_recorder_sbq_itf_)),
|
||||||
false);
|
false);
|
||||||
return true;
|
return true;
|
||||||
|
@ -15,7 +15,9 @@
|
|||||||
#include <SLES/OpenSLES_Android.h>
|
#include <SLES/OpenSLES_Android.h>
|
||||||
#include <SLES/OpenSLES_AndroidConfiguration.h>
|
#include <SLES/OpenSLES_AndroidConfiguration.h>
|
||||||
|
|
||||||
|
#if !defined(WEBRTC_GONK)
|
||||||
#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
|
#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
|
||||||
|
#endif
|
||||||
#include "webrtc/modules/audio_device/android/low_latency_event.h"
|
#include "webrtc/modules/audio_device/android/low_latency_event.h"
|
||||||
#include "webrtc/modules/audio_device/android/opensles_common.h"
|
#include "webrtc/modules/audio_device/android/opensles_common.h"
|
||||||
#include "webrtc/modules/audio_device/include/audio_device.h"
|
#include "webrtc/modules/audio_device/include/audio_device.h"
|
||||||
@ -165,8 +167,10 @@ class OpenSlesInput {
|
|||||||
// Thread-compatible.
|
// Thread-compatible.
|
||||||
bool CbThreadImpl();
|
bool CbThreadImpl();
|
||||||
|
|
||||||
|
#if !defined(WEBRTC_GONK)
|
||||||
// Java API handle
|
// Java API handle
|
||||||
AudioManagerJni audio_manager_;
|
AudioManagerJni audio_manager_;
|
||||||
|
#endif
|
||||||
|
|
||||||
int id_;
|
int id_;
|
||||||
webrtc_opensl::PlayoutDelayProvider* delay_provider_;
|
webrtc_opensl::PlayoutDelayProvider* delay_provider_;
|
||||||
@ -212,6 +216,21 @@ class OpenSlesInput {
|
|||||||
|
|
||||||
// Audio status
|
// Audio status
|
||||||
uint16_t recording_delay_;
|
uint16_t recording_delay_;
|
||||||
|
|
||||||
|
// dlopen for OpenSLES
|
||||||
|
void *opensles_lib_;
|
||||||
|
typedef SLresult (*slCreateEngine_t)(SLObjectItf *,
|
||||||
|
SLuint32,
|
||||||
|
const SLEngineOption *,
|
||||||
|
SLuint32,
|
||||||
|
const SLInterfaceID *,
|
||||||
|
const SLboolean *);
|
||||||
|
slCreateEngine_t f_slCreateEngine;
|
||||||
|
SLInterfaceID SL_IID_ENGINE_;
|
||||||
|
SLInterfaceID SL_IID_BUFFERQUEUE_;
|
||||||
|
SLInterfaceID SL_IID_ANDROIDCONFIGURATION_;
|
||||||
|
SLInterfaceID SL_IID_ANDROIDSIMPLEBUFFERQUEUE_;
|
||||||
|
SLInterfaceID SL_IID_RECORD_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
#include "webrtc/modules/audio_device/android/opensles_output.h"
|
#include "webrtc/modules/audio_device/android/opensles_output.h"
|
||||||
|
|
||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
|
#include <dlfcn.h>
|
||||||
|
|
||||||
#include "webrtc/modules/audio_device/android/fine_audio_buffer.h"
|
#include "webrtc/modules/audio_device/android/fine_audio_buffer.h"
|
||||||
#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
|
#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
|
||||||
@ -65,7 +66,8 @@ OpenSlesOutput::OpenSlesOutput(const int32_t id)
|
|||||||
speaker_sampling_rate_(kDefaultSampleRate),
|
speaker_sampling_rate_(kDefaultSampleRate),
|
||||||
buffer_size_samples_(0),
|
buffer_size_samples_(0),
|
||||||
buffer_size_bytes_(0),
|
buffer_size_bytes_(0),
|
||||||
playout_delay_(0) {
|
playout_delay_(0),
|
||||||
|
opensles_lib_(NULL) {
|
||||||
}
|
}
|
||||||
|
|
||||||
OpenSlesOutput::~OpenSlesOutput() {
|
OpenSlesOutput::~OpenSlesOutput() {
|
||||||
@ -74,15 +76,43 @@ OpenSlesOutput::~OpenSlesOutput() {
|
|||||||
int32_t OpenSlesOutput::Init() {
|
int32_t OpenSlesOutput::Init() {
|
||||||
assert(!initialized_);
|
assert(!initialized_);
|
||||||
|
|
||||||
|
/* Try to dynamically open the OpenSLES library */
|
||||||
|
opensles_lib_ = dlopen("libOpenSLES.so", RTLD_LAZY);
|
||||||
|
if (!opensles_lib_) {
|
||||||
|
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_,
|
||||||
|
" failed to dlopen OpenSLES library");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
f_slCreateEngine = (slCreateEngine_t)dlsym(opensles_lib_, "slCreateEngine");
|
||||||
|
SL_IID_ENGINE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ENGINE");
|
||||||
|
SL_IID_BUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_BUFFERQUEUE");
|
||||||
|
SL_IID_ANDROIDCONFIGURATION_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDCONFIGURATION");
|
||||||
|
SL_IID_PLAY_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_PLAY");
|
||||||
|
SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDSIMPLEBUFFERQUEUE");
|
||||||
|
SL_IID_VOLUME_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_VOLUME");
|
||||||
|
|
||||||
|
if (!f_slCreateEngine ||
|
||||||
|
!SL_IID_ENGINE_ ||
|
||||||
|
!SL_IID_BUFFERQUEUE_ ||
|
||||||
|
!SL_IID_ANDROIDCONFIGURATION_ ||
|
||||||
|
!SL_IID_PLAY_ ||
|
||||||
|
!SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ ||
|
||||||
|
!SL_IID_VOLUME_) {
|
||||||
|
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_,
|
||||||
|
" failed to find OpenSLES function");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
// Set up OpenSl engine.
|
// Set up OpenSl engine.
|
||||||
OPENSL_RETURN_ON_FAILURE(slCreateEngine(&sles_engine_, 1, kOption, 0,
|
OPENSL_RETURN_ON_FAILURE(f_slCreateEngine(&sles_engine_, 1, kOption, 0,
|
||||||
NULL, NULL),
|
NULL, NULL),
|
||||||
-1);
|
-1);
|
||||||
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_,
|
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_,
|
||||||
SL_BOOLEAN_FALSE),
|
SL_BOOLEAN_FALSE),
|
||||||
-1);
|
-1);
|
||||||
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_,
|
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_,
|
||||||
SL_IID_ENGINE,
|
SL_IID_ENGINE_,
|
||||||
&sles_engine_itf_),
|
&sles_engine_itf_),
|
||||||
-1);
|
-1);
|
||||||
// Set up OpenSl output mix.
|
// Set up OpenSl output mix.
|
||||||
@ -114,6 +144,7 @@ int32_t OpenSlesOutput::Terminate() {
|
|||||||
initialized_ = false;
|
initialized_ = false;
|
||||||
speaker_initialized_ = false;
|
speaker_initialized_ = false;
|
||||||
play_initialized_ = false;
|
play_initialized_ = false;
|
||||||
|
dlclose(opensles_lib_);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -302,6 +333,7 @@ void OpenSlesOutput::UpdatePlayoutDelay() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool OpenSlesOutput::SetLowLatency() {
|
bool OpenSlesOutput::SetLowLatency() {
|
||||||
|
#if !defined(WEBRTC_GONK)
|
||||||
if (!audio_manager_.low_latency_supported()) {
|
if (!audio_manager_.low_latency_supported()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -310,6 +342,9 @@ bool OpenSlesOutput::SetLowLatency() {
|
|||||||
speaker_sampling_rate_ = audio_manager_.native_output_sample_rate();
|
speaker_sampling_rate_ = audio_manager_.native_output_sample_rate();
|
||||||
assert(speaker_sampling_rate_ > 0);
|
assert(speaker_sampling_rate_ > 0);
|
||||||
return true;
|
return true;
|
||||||
|
#else
|
||||||
|
return false;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
void OpenSlesOutput::CalculateNumFifoBuffersNeeded() {
|
void OpenSlesOutput::CalculateNumFifoBuffersNeeded() {
|
||||||
@ -395,7 +430,7 @@ bool OpenSlesOutput::CreateAudioPlayer() {
|
|||||||
// Note the interfaces still need to be initialized. This only tells OpenSl
|
// Note the interfaces still need to be initialized. This only tells OpenSl
|
||||||
// that the interfaces will be needed at some point.
|
// that the interfaces will be needed at some point.
|
||||||
SLInterfaceID ids[kNumInterfaces] = {
|
SLInterfaceID ids[kNumInterfaces] = {
|
||||||
SL_IID_BUFFERQUEUE, SL_IID_VOLUME, SL_IID_ANDROIDCONFIGURATION };
|
SL_IID_BUFFERQUEUE_, SL_IID_VOLUME_, SL_IID_ANDROIDCONFIGURATION_ };
|
||||||
SLboolean req[kNumInterfaces] = {
|
SLboolean req[kNumInterfaces] = {
|
||||||
SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
|
SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
|
||||||
OPENSL_RETURN_ON_FAILURE(
|
OPENSL_RETURN_ON_FAILURE(
|
||||||
@ -408,11 +443,11 @@ bool OpenSlesOutput::CreateAudioPlayer() {
|
|||||||
SL_BOOLEAN_FALSE),
|
SL_BOOLEAN_FALSE),
|
||||||
false);
|
false);
|
||||||
OPENSL_RETURN_ON_FAILURE(
|
OPENSL_RETURN_ON_FAILURE(
|
||||||
(*sles_player_)->GetInterface(sles_player_, SL_IID_PLAY,
|
(*sles_player_)->GetInterface(sles_player_, SL_IID_PLAY_,
|
||||||
&sles_player_itf_),
|
&sles_player_itf_),
|
||||||
false);
|
false);
|
||||||
OPENSL_RETURN_ON_FAILURE(
|
OPENSL_RETURN_ON_FAILURE(
|
||||||
(*sles_player_)->GetInterface(sles_player_, SL_IID_BUFFERQUEUE,
|
(*sles_player_)->GetInterface(sles_player_, SL_IID_BUFFERQUEUE_,
|
||||||
&sles_player_sbq_itf_),
|
&sles_player_sbq_itf_),
|
||||||
false);
|
false);
|
||||||
return true;
|
return true;
|
||||||
|
@ -15,7 +15,9 @@
|
|||||||
#include <SLES/OpenSLES_Android.h>
|
#include <SLES/OpenSLES_Android.h>
|
||||||
#include <SLES/OpenSLES_AndroidConfiguration.h>
|
#include <SLES/OpenSLES_AndroidConfiguration.h>
|
||||||
|
|
||||||
|
#if !defined(WEBRTC_GONK)
|
||||||
#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
|
#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
|
||||||
|
#endif
|
||||||
#include "webrtc/modules/audio_device/android/low_latency_event.h"
|
#include "webrtc/modules/audio_device/android/low_latency_event.h"
|
||||||
#include "webrtc/modules/audio_device/android/opensles_common.h"
|
#include "webrtc/modules/audio_device/android/opensles_common.h"
|
||||||
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
|
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
|
||||||
@ -181,8 +183,10 @@ class OpenSlesOutput : public webrtc_opensl::PlayoutDelayProvider {
|
|||||||
// Thread-compatible.
|
// Thread-compatible.
|
||||||
bool CbThreadImpl();
|
bool CbThreadImpl();
|
||||||
|
|
||||||
|
#if !defined(WEBRTC_GONK)
|
||||||
// Java API handle
|
// Java API handle
|
||||||
AudioManagerJni audio_manager_;
|
AudioManagerJni audio_manager_;
|
||||||
|
#endif
|
||||||
|
|
||||||
int id_;
|
int id_;
|
||||||
bool initialized_;
|
bool initialized_;
|
||||||
@ -229,6 +233,22 @@ class OpenSlesOutput : public webrtc_opensl::PlayoutDelayProvider {
|
|||||||
|
|
||||||
// Audio status
|
// Audio status
|
||||||
uint16_t playout_delay_;
|
uint16_t playout_delay_;
|
||||||
|
|
||||||
|
// dlopen for OpenSLES
|
||||||
|
void *opensles_lib_;
|
||||||
|
typedef SLresult (*slCreateEngine_t)(SLObjectItf *,
|
||||||
|
SLuint32,
|
||||||
|
const SLEngineOption *,
|
||||||
|
SLuint32,
|
||||||
|
const SLInterfaceID *,
|
||||||
|
const SLboolean *);
|
||||||
|
slCreateEngine_t f_slCreateEngine;
|
||||||
|
SLInterfaceID SL_IID_ENGINE_;
|
||||||
|
SLInterfaceID SL_IID_BUFFERQUEUE_;
|
||||||
|
SLInterfaceID SL_IID_ANDROIDCONFIGURATION_;
|
||||||
|
SLInterfaceID SL_IID_PLAY_;
|
||||||
|
SLInterfaceID SL_IID_ANDROIDSIMPLEBUFFERQUEUE_;
|
||||||
|
SLInterfaceID SL_IID_VOLUME_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -46,11 +46,19 @@
|
|||||||
'dummy/audio_device_utility_dummy.h',
|
'dummy/audio_device_utility_dummy.h',
|
||||||
],
|
],
|
||||||
'conditions': [
|
'conditions': [
|
||||||
['OS=="linux"', {
|
['build_with_mozilla==1', {
|
||||||
|
'include_dirs': [
|
||||||
|
'$(DIST)/include',
|
||||||
|
],
|
||||||
|
'cflags_mozilla': [
|
||||||
|
'$(NSPR_CFLAGS)',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
['OS=="linux" or include_alsa_audio==1 or include_pulse_audio==1', {
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'linux',
|
'linux',
|
||||||
],
|
],
|
||||||
}], # OS==linux
|
}], # OS=="linux" or include_alsa_audio==1 or include_pulse_audio==1
|
||||||
['OS=="ios"', {
|
['OS=="ios"', {
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'ios',
|
'ios',
|
||||||
@ -68,9 +76,22 @@
|
|||||||
}],
|
}],
|
||||||
['OS=="android"', {
|
['OS=="android"', {
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
|
'$(topsrcdir)/widget/android',
|
||||||
'android',
|
'android',
|
||||||
],
|
],
|
||||||
}], # OS==android
|
}], # OS==android
|
||||||
|
['moz_widget_toolkit_gonk==1', {
|
||||||
|
'include_dirs': [
|
||||||
|
'$(ANDROID_SOURCE)/frameworks/wilhelm/include',
|
||||||
|
'$(ANDROID_SOURCE)/system/media/wilhelm/include',
|
||||||
|
'android',
|
||||||
|
],
|
||||||
|
}], # moz_widget_toolkit_gonk==1
|
||||||
|
['enable_android_opensl==1', {
|
||||||
|
'include_dirs': [
|
||||||
|
'opensl',
|
||||||
|
],
|
||||||
|
}], # enable_android_opensl
|
||||||
['include_internal_audio_device==0', {
|
['include_internal_audio_device==0', {
|
||||||
'defines': [
|
'defines': [
|
||||||
'WEBRTC_DUMMY_AUDIO_BUILD',
|
'WEBRTC_DUMMY_AUDIO_BUILD',
|
||||||
@ -78,14 +99,8 @@
|
|||||||
}],
|
}],
|
||||||
['include_internal_audio_device==1', {
|
['include_internal_audio_device==1', {
|
||||||
'sources': [
|
'sources': [
|
||||||
'linux/alsasymboltable_linux.cc',
|
|
||||||
'linux/alsasymboltable_linux.h',
|
|
||||||
'linux/audio_device_alsa_linux.cc',
|
|
||||||
'linux/audio_device_alsa_linux.h',
|
|
||||||
'linux/audio_device_utility_linux.cc',
|
'linux/audio_device_utility_linux.cc',
|
||||||
'linux/audio_device_utility_linux.h',
|
'linux/audio_device_utility_linux.h',
|
||||||
'linux/audio_mixer_manager_alsa_linux.cc',
|
|
||||||
'linux/audio_mixer_manager_alsa_linux.h',
|
|
||||||
'linux/latebindingsymboltable_linux.cc',
|
'linux/latebindingsymboltable_linux.cc',
|
||||||
'linux/latebindingsymboltable_linux.h',
|
'linux/latebindingsymboltable_linux.h',
|
||||||
'ios/audio_device_ios.cc',
|
'ios/audio_device_ios.cc',
|
||||||
@ -109,11 +124,17 @@
|
|||||||
'win/audio_device_utility_win.h',
|
'win/audio_device_utility_win.h',
|
||||||
'win/audio_mixer_manager_win.cc',
|
'win/audio_mixer_manager_win.cc',
|
||||||
'win/audio_mixer_manager_win.h',
|
'win/audio_mixer_manager_win.h',
|
||||||
'android/audio_device_utility_android.cc',
|
|
||||||
'android/audio_device_utility_android.h',
|
|
||||||
],
|
],
|
||||||
'conditions': [
|
'conditions': [
|
||||||
['OS=="android"', {
|
['OS=="android"', {
|
||||||
|
'sources': [
|
||||||
|
'opensl/audio_manager_jni.cc',
|
||||||
|
'opensl/audio_manager_jni.h',
|
||||||
|
'android/audio_device_jni_android.cc',
|
||||||
|
'android/audio_device_jni_android.h',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
['OS=="android" or moz_widget_toolkit_gonk==1', {
|
||||||
'link_settings': {
|
'link_settings': {
|
||||||
'libraries': [
|
'libraries': [
|
||||||
'-llog',
|
'-llog',
|
||||||
@ -123,25 +144,27 @@
|
|||||||
'conditions': [
|
'conditions': [
|
||||||
['enable_android_opensl==1', {
|
['enable_android_opensl==1', {
|
||||||
'sources': [
|
'sources': [
|
||||||
'android/audio_device_opensles_android.cc',
|
'opensl/audio_device_opensles.cc',
|
||||||
'android/audio_device_opensles_android.h',
|
'opensl/audio_device_opensles.h',
|
||||||
'android/audio_manager_jni.cc',
|
'opensl/fine_audio_buffer.cc',
|
||||||
'android/audio_manager_jni.h',
|
'opensl/fine_audio_buffer.h',
|
||||||
'android/fine_audio_buffer.cc',
|
'opensl/low_latency_event_posix.cc',
|
||||||
'android/fine_audio_buffer.h',
|
'opensl/low_latency_event.h',
|
||||||
'android/low_latency_event_posix.cc',
|
'opensl/opensles_common.cc',
|
||||||
'android/low_latency_event.h',
|
'opensl/opensles_common.h',
|
||||||
'android/opensles_common.cc',
|
'opensl/opensles_input.cc',
|
||||||
'android/opensles_common.h',
|
'opensl/opensles_input.h',
|
||||||
'android/opensles_input.cc',
|
'opensl/opensles_output.cc',
|
||||||
'android/opensles_input.h',
|
'opensl/opensles_output.h',
|
||||||
'android/opensles_output.cc',
|
'opensl/single_rw_fifo.cc',
|
||||||
'android/opensles_output.h',
|
'opensl/single_rw_fifo.h',
|
||||||
'android/single_rw_fifo.cc',
|
'shared/audio_device_utility_shared.cc',
|
||||||
'android/single_rw_fifo.h',
|
'shared/audio_device_utility_shared.h',
|
||||||
],
|
],
|
||||||
}, {
|
}, {
|
||||||
'sources': [
|
'sources': [
|
||||||
|
'shared/audio_device_utility_shared.cc',
|
||||||
|
'shared/audio_device_utility_shared.h',
|
||||||
'android/audio_device_jni_android.cc',
|
'android/audio_device_jni_android.cc',
|
||||||
'android/audio_device_jni_android.h',
|
'android/audio_device_jni_android.h',
|
||||||
],
|
],
|
||||||
@ -149,16 +172,32 @@
|
|||||||
],
|
],
|
||||||
}],
|
}],
|
||||||
['OS=="linux"', {
|
['OS=="linux"', {
|
||||||
'defines': [
|
|
||||||
'LINUX_ALSA',
|
|
||||||
],
|
|
||||||
'link_settings': {
|
'link_settings': {
|
||||||
'libraries': [
|
'libraries': [
|
||||||
'-ldl','-lX11',
|
'-ldl','-lX11',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
'conditions': [
|
}],
|
||||||
|
['include_alsa_audio==1', {
|
||||||
|
'cflags_mozilla': [
|
||||||
|
'$(MOZ_ALSA_CFLAGS)',
|
||||||
|
],
|
||||||
|
'defines': [
|
||||||
|
'LINUX_ALSA',
|
||||||
|
],
|
||||||
|
'sources': [
|
||||||
|
'linux/alsasymboltable_linux.cc',
|
||||||
|
'linux/alsasymboltable_linux.h',
|
||||||
|
'linux/audio_device_alsa_linux.cc',
|
||||||
|
'linux/audio_device_alsa_linux.h',
|
||||||
|
'linux/audio_mixer_manager_alsa_linux.cc',
|
||||||
|
'linux/audio_mixer_manager_alsa_linux.h',
|
||||||
|
],
|
||||||
|
}],
|
||||||
['include_pulse_audio==1', {
|
['include_pulse_audio==1', {
|
||||||
|
'cflags_mozilla': [
|
||||||
|
'$(MOZ_PULSEAUDIO_CFLAGS)',
|
||||||
|
],
|
||||||
'defines': [
|
'defines': [
|
||||||
'LINUX_PULSE',
|
'LINUX_PULSE',
|
||||||
],
|
],
|
||||||
@ -171,8 +210,6 @@
|
|||||||
'linux/pulseaudiosymboltable_linux.h',
|
'linux/pulseaudiosymboltable_linux.h',
|
||||||
],
|
],
|
||||||
}],
|
}],
|
||||||
],
|
|
||||||
}],
|
|
||||||
['OS=="mac" or OS=="ios"', {
|
['OS=="mac" or OS=="ios"', {
|
||||||
'link_settings': {
|
'link_settings': {
|
||||||
'libraries': [
|
'libraries': [
|
||||||
@ -279,4 +316,3 @@
|
|||||||
}], # include_tests
|
}], # include_tests
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,21 +16,29 @@
|
|||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
#if defined(_WIN32)
|
#if defined(WEBRTC_DUMMY_AUDIO_BUILD)
|
||||||
|
// do not include platform specific headers
|
||||||
|
#elif defined(_WIN32)
|
||||||
#include "audio_device_utility_win.h"
|
#include "audio_device_utility_win.h"
|
||||||
#include "audio_device_wave_win.h"
|
#include "audio_device_wave_win.h"
|
||||||
#if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
|
#if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
|
||||||
#include "audio_device_core_win.h"
|
#include "audio_device_core_win.h"
|
||||||
#endif
|
#endif
|
||||||
#elif defined(WEBRTC_ANDROID_OPENSLES)
|
#elif defined(WEBRTC_ANDROID_OPENSLES)
|
||||||
|
// ANDROID and GONK
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
|
#include <dlfcn.h>
|
||||||
#include "audio_device_utility_android.h"
|
#include "audio_device_utility_android.h"
|
||||||
#include "audio_device_opensles_android.h"
|
#include "audio_device_opensles_android.h"
|
||||||
|
#if !defined(WEBRTC_GONK)
|
||||||
|
#include "audio_device_jni_android.h"
|
||||||
|
#endif
|
||||||
#elif defined(WEBRTC_ANDROID)
|
#elif defined(WEBRTC_ANDROID)
|
||||||
|
// GONK only supports opensles; android can use that or jni
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include "audio_device_utility_android.h"
|
#include "audio_device_utility_android.h"
|
||||||
#include "audio_device_jni_android.h"
|
#include "audio_device_jni_android.h"
|
||||||
#elif defined(WEBRTC_LINUX)
|
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
#include "audio_device_utility_linux.h"
|
#include "audio_device_utility_linux.h"
|
||||||
#if defined(LINUX_ALSA)
|
#if defined(LINUX_ALSA)
|
||||||
#include "audio_device_alsa_linux.h"
|
#include "audio_device_alsa_linux.h"
|
||||||
@ -159,7 +167,7 @@ int32_t AudioDeviceModuleImpl::CheckPlatform()
|
|||||||
#elif defined(WEBRTC_ANDROID)
|
#elif defined(WEBRTC_ANDROID)
|
||||||
platform = kPlatformAndroid;
|
platform = kPlatformAndroid;
|
||||||
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is ANDROID");
|
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is ANDROID");
|
||||||
#elif defined(WEBRTC_LINUX)
|
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
platform = kPlatformLinux;
|
platform = kPlatformLinux;
|
||||||
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is LINUX");
|
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is LINUX");
|
||||||
#elif defined(WEBRTC_IOS)
|
#elif defined(WEBRTC_IOS)
|
||||||
@ -259,41 +267,41 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects()
|
|||||||
// Create the *Android OpenSLES* implementation of the Audio Device
|
// Create the *Android OpenSLES* implementation of the Audio Device
|
||||||
//
|
//
|
||||||
#if defined(WEBRTC_ANDROID_OPENSLES)
|
#if defined(WEBRTC_ANDROID_OPENSLES)
|
||||||
|
// Check if the OpenSLES library is available before going further.
|
||||||
|
void* opensles_lib = dlopen("libOpenSLES.so", RTLD_LAZY);
|
||||||
|
if (opensles_lib) {
|
||||||
|
// That worked, close for now and proceed normally.
|
||||||
|
dlclose(opensles_lib);
|
||||||
if (audioLayer == kPlatformDefaultAudio)
|
if (audioLayer == kPlatformDefaultAudio)
|
||||||
{
|
{
|
||||||
// Create *Android OpenELSE Audio* implementation
|
// Create *Android OpenSLES Audio* implementation
|
||||||
ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id());
|
ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id());
|
||||||
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
|
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
|
||||||
"Android OpenSLES Audio APIs will be utilized");
|
"Android OpenSLES Audio APIs will be utilized");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ptrAudioDevice != NULL)
|
|
||||||
{
|
|
||||||
// Create the Android implementation of the Device Utility.
|
|
||||||
ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
|
|
||||||
}
|
}
|
||||||
// END #if defined(WEBRTC_ANDROID_OPENSLES)
|
|
||||||
|
|
||||||
|
#if !defined(WEBRTC_GONK)
|
||||||
|
// Fall back to this case if on Android 2.2/OpenSLES not available.
|
||||||
|
if (ptrAudioDevice == NULL) {
|
||||||
// Create the *Android Java* implementation of the Audio Device
|
// Create the *Android Java* implementation of the Audio Device
|
||||||
//
|
|
||||||
#elif defined(WEBRTC_ANDROID)
|
|
||||||
if (audioLayer == kPlatformDefaultAudio)
|
if (audioLayer == kPlatformDefaultAudio)
|
||||||
{
|
{
|
||||||
// Create *Android JNI Audio* implementation
|
// Create *Android JNI Audio* implementation
|
||||||
ptrAudioDevice = new AudioDeviceAndroidJni(Id());
|
ptrAudioDevice = new AudioDeviceAndroidJni(Id());
|
||||||
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized");
|
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (ptrAudioDevice != NULL)
|
if (ptrAudioDevice != NULL)
|
||||||
{
|
{
|
||||||
// Create the Android implementation of the Device Utility.
|
// Create the Android implementation of the Device Utility.
|
||||||
ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
|
ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
|
||||||
}
|
}
|
||||||
// END #if defined(WEBRTC_ANDROID)
|
#endif
|
||||||
|
|
||||||
// Create the *Linux* implementation of the Audio Device
|
// Create the *Linux* implementation of the Audio Device
|
||||||
//
|
//
|
||||||
#elif defined(WEBRTC_LINUX)
|
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
if ((audioLayer == kLinuxPulseAudio) || (audioLayer == kPlatformDefaultAudio))
|
if ((audioLayer == kLinuxPulseAudio) || (audioLayer == kPlatformDefaultAudio))
|
||||||
{
|
{
|
||||||
#if defined(LINUX_PULSE)
|
#if defined(LINUX_PULSE)
|
||||||
@ -339,7 +347,7 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects()
|
|||||||
//
|
//
|
||||||
ptrAudioDeviceUtility = new AudioDeviceUtilityLinux(Id());
|
ptrAudioDeviceUtility = new AudioDeviceUtilityLinux(Id());
|
||||||
}
|
}
|
||||||
#endif // #if defined(WEBRTC_LINUX)
|
#endif // #if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
|
|
||||||
// Create the *iPhone* implementation of the Audio Device
|
// Create the *iPhone* implementation of the Audio Device
|
||||||
//
|
//
|
||||||
|
@ -46,7 +46,7 @@ bool AudioDeviceUtility::StringCompare(
|
|||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|
||||||
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
|
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// Linux & Mac
|
// Linux & Mac
|
||||||
@ -109,4 +109,4 @@ bool AudioDeviceUtility::StringCompare(
|
|||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|
||||||
#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
|
#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
|
||||||
|
@ -1332,7 +1332,7 @@ int32_t AudioDeviceIPhone::InitPlayOrRecord() {
|
|||||||
// todo: Add 48 kHz (increase buffer sizes). Other fs?
|
// todo: Add 48 kHz (increase buffer sizes). Other fs?
|
||||||
if ((playoutDesc.mSampleRate > 44090.0)
|
if ((playoutDesc.mSampleRate > 44090.0)
|
||||||
&& (playoutDesc.mSampleRate < 44110.0)) {
|
&& (playoutDesc.mSampleRate < 44110.0)) {
|
||||||
_adbSampFreq = 44000;
|
_adbSampFreq = 44100;
|
||||||
} else if ((playoutDesc.mSampleRate > 15990.0)
|
} else if ((playoutDesc.mSampleRate > 15990.0)
|
||||||
&& (playoutDesc.mSampleRate < 16010.0)) {
|
&& (playoutDesc.mSampleRate < 16010.0)) {
|
||||||
_adbSampFreq = 16000;
|
_adbSampFreq = 16000;
|
||||||
|
@ -19,8 +19,8 @@
|
|||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
class ThreadWrapper;
|
class ThreadWrapper;
|
||||||
|
|
||||||
const uint32_t N_REC_SAMPLES_PER_SEC = 44000;
|
const uint32_t N_REC_SAMPLES_PER_SEC = 44100;
|
||||||
const uint32_t N_PLAY_SAMPLES_PER_SEC = 44000;
|
const uint32_t N_PLAY_SAMPLES_PER_SEC = 44100;
|
||||||
|
|
||||||
const uint32_t N_REC_CHANNELS = 1; // default is mono recording
|
const uint32_t N_REC_CHANNELS = 1; // default is mono recording
|
||||||
const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout
|
const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout
|
||||||
|
@ -19,6 +19,13 @@
|
|||||||
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
|
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
|
||||||
#include "webrtc/system_wrappers/interface/trace.h"
|
#include "webrtc/system_wrappers/interface/trace.h"
|
||||||
|
|
||||||
|
#include "Latency.h"
|
||||||
|
|
||||||
|
#define LOG_FIRST_CAPTURE(x) LogTime(AsyncLatencyLogger::AudioCaptureBase, \
|
||||||
|
reinterpret_cast<uint64_t>(x), 0)
|
||||||
|
#define LOG_CAPTURE_FRAMES(x, frames) LogLatency(AsyncLatencyLogger::AudioCapture, \
|
||||||
|
reinterpret_cast<uint64_t>(x), frames)
|
||||||
|
|
||||||
webrtc_adm_linux_alsa::AlsaSymbolTable AlsaSymbolTable;
|
webrtc_adm_linux_alsa::AlsaSymbolTable AlsaSymbolTable;
|
||||||
|
|
||||||
// Accesses ALSA functions through our late-binding symbol table instead of
|
// Accesses ALSA functions through our late-binding symbol table instead of
|
||||||
@ -96,6 +103,7 @@ AudioDeviceLinuxALSA::AudioDeviceLinuxALSA(const int32_t id) :
|
|||||||
_playBufType(AudioDeviceModule::kFixedBufferSize),
|
_playBufType(AudioDeviceModule::kFixedBufferSize),
|
||||||
_initialized(false),
|
_initialized(false),
|
||||||
_recording(false),
|
_recording(false),
|
||||||
|
_firstRecord(true),
|
||||||
_playing(false),
|
_playing(false),
|
||||||
_recIsInitialized(false),
|
_recIsInitialized(false),
|
||||||
_playIsInitialized(false),
|
_playIsInitialized(false),
|
||||||
@ -986,7 +994,8 @@ int32_t AudioDeviceLinuxALSA::RecordingDeviceName(
|
|||||||
memset(guid, 0, kAdmMaxGuidSize);
|
memset(guid, 0, kAdmMaxGuidSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
return GetDevicesInfo(1, false, index, name, kAdmMaxDeviceNameSize);
|
return GetDevicesInfo(1, false, index, name, kAdmMaxDeviceNameSize,
|
||||||
|
guid, kAdmMaxGuidSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t AudioDeviceLinuxALSA::RecordingDevices()
|
int16_t AudioDeviceLinuxALSA::RecordingDevices()
|
||||||
@ -1448,6 +1457,7 @@ int32_t AudioDeviceLinuxALSA::StartRecording()
|
|||||||
}
|
}
|
||||||
// RECORDING
|
// RECORDING
|
||||||
const char* threadName = "webrtc_audio_module_capture_thread";
|
const char* threadName = "webrtc_audio_module_capture_thread";
|
||||||
|
_firstRecord = true;
|
||||||
_ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc,
|
_ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc,
|
||||||
this,
|
this,
|
||||||
kRealtimePriority,
|
kRealtimePriority,
|
||||||
@ -1634,6 +1644,17 @@ int32_t AudioDeviceLinuxALSA::StartPlayout()
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int errVal = LATE(snd_pcm_prepare)(_handlePlayout);
|
||||||
|
if (errVal < 0)
|
||||||
|
{
|
||||||
|
WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
|
||||||
|
" playout snd_pcm_prepare failed (%s)\n",
|
||||||
|
LATE(snd_strerror)(errVal));
|
||||||
|
// just log error
|
||||||
|
// if snd_pcm_open fails will return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
unsigned int threadID(0);
|
unsigned int threadID(0);
|
||||||
if (!_ptrThreadPlay->Start(threadID))
|
if (!_ptrThreadPlay->Start(threadID))
|
||||||
{
|
{
|
||||||
@ -1648,16 +1669,6 @@ int32_t AudioDeviceLinuxALSA::StartPlayout()
|
|||||||
}
|
}
|
||||||
_playThreadID = threadID;
|
_playThreadID = threadID;
|
||||||
|
|
||||||
int errVal = LATE(snd_pcm_prepare)(_handlePlayout);
|
|
||||||
if (errVal < 0)
|
|
||||||
{
|
|
||||||
WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
|
|
||||||
" playout snd_pcm_prepare failed (%s)\n",
|
|
||||||
LATE(snd_strerror)(errVal));
|
|
||||||
// just log error
|
|
||||||
// if snd_pcm_open fails will return -1
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1829,7 +1840,9 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo(
|
|||||||
const bool playback,
|
const bool playback,
|
||||||
const int32_t enumDeviceNo,
|
const int32_t enumDeviceNo,
|
||||||
char* enumDeviceName,
|
char* enumDeviceName,
|
||||||
const int32_t ednLen) const
|
const int32_t ednLen,
|
||||||
|
char* enumDeviceId,
|
||||||
|
const int32_t ediLen) const
|
||||||
{
|
{
|
||||||
|
|
||||||
// Device enumeration based on libjingle implementation
|
// Device enumeration based on libjingle implementation
|
||||||
@ -1868,6 +1881,8 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo(
|
|||||||
function == FUNC_GET_DEVICE_NAME_FOR_AN_ENUM) && enumDeviceNo == 0)
|
function == FUNC_GET_DEVICE_NAME_FOR_AN_ENUM) && enumDeviceNo == 0)
|
||||||
{
|
{
|
||||||
strcpy(enumDeviceName, "default");
|
strcpy(enumDeviceName, "default");
|
||||||
|
if (enumDeviceId)
|
||||||
|
memset(enumDeviceId, 0, ediLen);
|
||||||
|
|
||||||
err = LATE(snd_device_name_free_hint)(hints);
|
err = LATE(snd_device_name_free_hint)(hints);
|
||||||
if (err != 0)
|
if (err != 0)
|
||||||
@ -1930,6 +1945,11 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo(
|
|||||||
// We have found the enum device, copy the name to buffer.
|
// We have found the enum device, copy the name to buffer.
|
||||||
strncpy(enumDeviceName, desc, ednLen);
|
strncpy(enumDeviceName, desc, ednLen);
|
||||||
enumDeviceName[ednLen-1] = '\0';
|
enumDeviceName[ednLen-1] = '\0';
|
||||||
|
if (enumDeviceId)
|
||||||
|
{
|
||||||
|
strncpy(enumDeviceId, name, ediLen);
|
||||||
|
enumDeviceId[ediLen-1] = '\0';
|
||||||
|
}
|
||||||
keepSearching = false;
|
keepSearching = false;
|
||||||
// Replace '\n' with '-'.
|
// Replace '\n' with '-'.
|
||||||
char * pret = strchr(enumDeviceName, '\n'/*0xa*/); //LF
|
char * pret = strchr(enumDeviceName, '\n'/*0xa*/); //LF
|
||||||
@ -1942,6 +1962,11 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo(
|
|||||||
// We have found the enum device, copy the name to buffer.
|
// We have found the enum device, copy the name to buffer.
|
||||||
strncpy(enumDeviceName, name, ednLen);
|
strncpy(enumDeviceName, name, ednLen);
|
||||||
enumDeviceName[ednLen-1] = '\0';
|
enumDeviceName[ednLen-1] = '\0';
|
||||||
|
if (enumDeviceId)
|
||||||
|
{
|
||||||
|
strncpy(enumDeviceId, name, ediLen);
|
||||||
|
enumDeviceId[ediLen-1] = '\0';
|
||||||
|
}
|
||||||
keepSearching = false;
|
keepSearching = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2251,6 +2276,11 @@ bool AudioDeviceLinuxALSA::RecThreadProcess()
|
|||||||
{ // buf is full
|
{ // buf is full
|
||||||
_recordingFramesLeft = _recordingFramesIn10MS;
|
_recordingFramesLeft = _recordingFramesIn10MS;
|
||||||
|
|
||||||
|
if (_firstRecord) {
|
||||||
|
LOG_FIRST_CAPTURE(this);
|
||||||
|
_firstRecord = false;
|
||||||
|
}
|
||||||
|
LOG_CAPTURE_FRAMES(this, _recordingFramesIn10MS);
|
||||||
// store the recorded buffer (no action will be taken if the
|
// store the recorded buffer (no action will be taken if the
|
||||||
// #recorded samples is not a full buffer)
|
// #recorded samples is not a full buffer)
|
||||||
_ptrAudioBuffer->SetRecordedBuffer(_recordingBuffer,
|
_ptrAudioBuffer->SetRecordedBuffer(_recordingBuffer,
|
||||||
|
@ -167,7 +167,9 @@ private:
|
|||||||
const bool playback,
|
const bool playback,
|
||||||
const int32_t enumDeviceNo = 0,
|
const int32_t enumDeviceNo = 0,
|
||||||
char* enumDeviceName = NULL,
|
char* enumDeviceName = NULL,
|
||||||
const int32_t ednLen = 0) const;
|
const int32_t ednLen = 0,
|
||||||
|
char* enumDeviceID = NULL,
|
||||||
|
const int32_t ediLen = 0) const;
|
||||||
int32_t ErrorRecovery(int32_t error, snd_pcm_t* deviceHandle);
|
int32_t ErrorRecovery(int32_t error, snd_pcm_t* deviceHandle);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
@ -233,6 +235,7 @@ private:
|
|||||||
private:
|
private:
|
||||||
bool _initialized;
|
bool _initialized;
|
||||||
bool _recording;
|
bool _recording;
|
||||||
|
bool _firstRecord;
|
||||||
bool _playing;
|
bool _playing;
|
||||||
bool _recIsInitialized;
|
bool _recIsInitialized;
|
||||||
bool _playIsInitialized;
|
bool _playIsInitialized;
|
||||||
|
@ -27,7 +27,7 @@
|
|||||||
|
|
||||||
#include "webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h"
|
#include "webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h"
|
||||||
|
|
||||||
#ifdef WEBRTC_LINUX
|
#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
#include <dlfcn.h>
|
#include <dlfcn.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -37,8 +37,8 @@ using namespace webrtc;
|
|||||||
namespace webrtc_adm_linux {
|
namespace webrtc_adm_linux {
|
||||||
|
|
||||||
inline static const char *GetDllError() {
|
inline static const char *GetDllError() {
|
||||||
#ifdef WEBRTC_LINUX
|
#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
char *err = dlerror();
|
const char *err = dlerror();
|
||||||
if (err) {
|
if (err) {
|
||||||
return err;
|
return err;
|
||||||
} else {
|
} else {
|
||||||
@ -50,7 +50,7 @@ inline static const char *GetDllError() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DllHandle InternalLoadDll(const char dll_name[]) {
|
DllHandle InternalLoadDll(const char dll_name[]) {
|
||||||
#ifdef WEBRTC_LINUX
|
#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
DllHandle handle = dlopen(dll_name, RTLD_NOW);
|
DllHandle handle = dlopen(dll_name, RTLD_NOW);
|
||||||
#else
|
#else
|
||||||
#error Not implemented
|
#error Not implemented
|
||||||
@ -63,7 +63,7 @@ DllHandle InternalLoadDll(const char dll_name[]) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void InternalUnloadDll(DllHandle handle) {
|
void InternalUnloadDll(DllHandle handle) {
|
||||||
#ifdef WEBRTC_LINUX
|
#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
if (dlclose(handle) != 0) {
|
if (dlclose(handle) != 0) {
|
||||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
||||||
"%s", GetDllError());
|
"%s", GetDllError());
|
||||||
@ -76,9 +76,9 @@ void InternalUnloadDll(DllHandle handle) {
|
|||||||
static bool LoadSymbol(DllHandle handle,
|
static bool LoadSymbol(DllHandle handle,
|
||||||
const char *symbol_name,
|
const char *symbol_name,
|
||||||
void **symbol) {
|
void **symbol) {
|
||||||
#ifdef WEBRTC_LINUX
|
#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
*symbol = dlsym(handle, symbol_name);
|
*symbol = dlsym(handle, symbol_name);
|
||||||
char *err = dlerror();
|
const char *err = dlerror();
|
||||||
if (err) {
|
if (err) {
|
||||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
||||||
"Error loading symbol %s : %d", symbol_name, err);
|
"Error loading symbol %s : %d", symbol_name, err);
|
||||||
@ -101,7 +101,7 @@ bool InternalLoadSymbols(DllHandle handle,
|
|||||||
int num_symbols,
|
int num_symbols,
|
||||||
const char *const symbol_names[],
|
const char *const symbol_names[],
|
||||||
void *symbols[]) {
|
void *symbols[]) {
|
||||||
#ifdef WEBRTC_LINUX
|
#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
// Clear any old errors.
|
// Clear any old errors.
|
||||||
dlerror();
|
dlerror();
|
||||||
#endif
|
#endif
|
||||||
|
@ -42,7 +42,7 @@
|
|||||||
|
|
||||||
namespace webrtc_adm_linux {
|
namespace webrtc_adm_linux {
|
||||||
|
|
||||||
#ifdef WEBRTC_LINUX
|
#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
typedef void *DllHandle;
|
typedef void *DllHandle;
|
||||||
|
|
||||||
const DllHandle kInvalidDllHandle = NULL;
|
const DllHandle kInvalidDllHandle = NULL;
|
||||||
|
@ -29,7 +29,11 @@
|
|||||||
|
|
||||||
namespace webrtc_adm_linux_pulse {
|
namespace webrtc_adm_linux_pulse {
|
||||||
|
|
||||||
|
#if defined(__OpenBSD__) || defined(WEBRTC_GONK)
|
||||||
|
LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(PulseAudioSymbolTable, "libpulse.so")
|
||||||
|
#else
|
||||||
LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(PulseAudioSymbolTable, "libpulse.so.0")
|
LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(PulseAudioSymbolTable, "libpulse.so.0")
|
||||||
|
#endif
|
||||||
#define X(sym) \
|
#define X(sym) \
|
||||||
LATE_BINDING_SYMBOL_TABLE_DEFINE_ENTRY(PulseAudioSymbolTable, sym)
|
LATE_BINDING_SYMBOL_TABLE_DEFINE_ENTRY(PulseAudioSymbolTable, sym)
|
||||||
PULSE_AUDIO_SYMBOLS_LIST
|
PULSE_AUDIO_SYMBOLS_LIST
|
||||||
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/audio_device_opensles_android.cc"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/audio_device_opensles_android.h"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/audio_manager_jni.cc"
|
@ -0,0 +1,6 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/audio_manager_jni.h"
|
||||||
|
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/fine_audio_buffer.cc"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/fine_audio_buffer.h"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/low_latency_event_posix.cc"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/low_latency_event_posix.h"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/opensles_common.cc"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/opensles_common.h"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/opensles_input.cc"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/opensles_input.h"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/opensles_output.cc"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/opensles_output.h"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/single_rw_fifo.cc"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/single_rw_fifo.h"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/audio_device_jni_android.cc"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/audio_device_jni_android.h"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/audio_device_utility_android.cc"
|
@ -0,0 +1,5 @@
|
|||||||
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
#include "../android/audio_device_utility_android.h"
|
@ -210,7 +210,7 @@ class AudioDeviceAPITest: public testing::Test {
|
|||||||
// Create default implementation instance
|
// Create default implementation instance
|
||||||
EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
|
EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
|
||||||
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
|
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
|
||||||
#elif defined(WEBRTC_LINUX)
|
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
|
EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
|
||||||
kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
|
kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
|
||||||
EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
|
EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
|
||||||
@ -1703,7 +1703,7 @@ TEST_F(AudioDeviceAPITest, CPULoad) {
|
|||||||
|
|
||||||
// TODO(kjellander): Fix flakiness causing failures on Windows.
|
// TODO(kjellander): Fix flakiness causing failures on Windows.
|
||||||
// TODO(phoglund): Fix flakiness causing failures on Linux.
|
// TODO(phoglund): Fix flakiness causing failures on Linux.
|
||||||
#if !defined(_WIN32) && !defined(WEBRTC_LINUX)
|
#if !defined(_WIN32) && !defined(WEBRTC_LINUX) && !defined(WEBRTC_BSD)
|
||||||
TEST_F(AudioDeviceAPITest, StartAndStopRawOutputFileRecording) {
|
TEST_F(AudioDeviceAPITest, StartAndStopRawOutputFileRecording) {
|
||||||
// NOTE: this API is better tested in a functional test
|
// NOTE: this API is better tested in a functional test
|
||||||
CheckInitialPlayoutStates();
|
CheckInitialPlayoutStates();
|
||||||
@ -1772,7 +1772,7 @@ TEST_F(AudioDeviceAPITest, StartAndStopRawInputFileRecording) {
|
|||||||
// - size of raw_input_not_recording.pcm shall be 0
|
// - size of raw_input_not_recording.pcm shall be 0
|
||||||
// - size of raw_input_not_recording.pcm shall be > 0
|
// - size of raw_input_not_recording.pcm shall be > 0
|
||||||
}
|
}
|
||||||
#endif // !WIN32 && !WEBRTC_LINUX
|
#endif // !WIN32 && !WEBRTC_LINUX && !defined(WEBRTC_BSD)
|
||||||
|
|
||||||
TEST_F(AudioDeviceAPITest, RecordingSampleRate) {
|
TEST_F(AudioDeviceAPITest, RecordingSampleRate) {
|
||||||
uint32_t sampleRate(0);
|
uint32_t sampleRate(0);
|
||||||
@ -1783,10 +1783,10 @@ TEST_F(AudioDeviceAPITest, RecordingSampleRate) {
|
|||||||
EXPECT_EQ(48000, sampleRate);
|
EXPECT_EQ(48000, sampleRate);
|
||||||
#elif defined(ANDROID)
|
#elif defined(ANDROID)
|
||||||
TEST_LOG("Recording sample rate is %u\n\n", sampleRate);
|
TEST_LOG("Recording sample rate is %u\n\n", sampleRate);
|
||||||
EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000));
|
EXPECT_TRUE((sampleRate == 44100) || (sampleRate == 16000));
|
||||||
#elif defined(WEBRTC_IOS)
|
#elif defined(WEBRTC_IOS)
|
||||||
TEST_LOG("Recording sample rate is %u\n\n", sampleRate);
|
TEST_LOG("Recording sample rate is %u\n\n", sampleRate);
|
||||||
EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000) ||
|
EXPECT_TRUE((sampleRate == 44100) || (sampleRate == 16000) ||
|
||||||
(sampleRate == 8000));
|
(sampleRate == 8000));
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -1802,10 +1802,10 @@ TEST_F(AudioDeviceAPITest, PlayoutSampleRate) {
|
|||||||
EXPECT_EQ(48000, sampleRate);
|
EXPECT_EQ(48000, sampleRate);
|
||||||
#elif defined(ANDROID)
|
#elif defined(ANDROID)
|
||||||
TEST_LOG("Playout sample rate is %u\n\n", sampleRate);
|
TEST_LOG("Playout sample rate is %u\n\n", sampleRate);
|
||||||
EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000));
|
EXPECT_TRUE((sampleRate == 44100) || (sampleRate == 16000));
|
||||||
#elif defined(WEBRTC_IOS)
|
#elif defined(WEBRTC_IOS)
|
||||||
TEST_LOG("Playout sample rate is %u\n\n", sampleRate);
|
TEST_LOG("Playout sample rate is %u\n\n", sampleRate);
|
||||||
EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000) ||
|
EXPECT_TRUE((sampleRate == 44100) || (sampleRate == 16000) ||
|
||||||
(sampleRate == 8000));
|
(sampleRate == 8000));
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
@ -348,12 +348,6 @@ int32_t AudioTransportImpl::NeedMorePlayData(
|
|||||||
int32_t fsInHz(samplesPerSecIn);
|
int32_t fsInHz(samplesPerSecIn);
|
||||||
int32_t fsOutHz(samplesPerSec);
|
int32_t fsOutHz(samplesPerSec);
|
||||||
|
|
||||||
if (fsInHz == 44100)
|
|
||||||
fsInHz = 44000;
|
|
||||||
|
|
||||||
if (fsOutHz == 44100)
|
|
||||||
fsOutHz = 44000;
|
|
||||||
|
|
||||||
if (nChannelsIn == 2 && nBytesPerSampleIn == 4)
|
if (nChannelsIn == 2 && nBytesPerSampleIn == 4)
|
||||||
{
|
{
|
||||||
// input is stereo => we will resample in stereo
|
// input is stereo => we will resample in stereo
|
||||||
@ -1258,7 +1252,7 @@ int32_t FuncTestManager::TestAudioTransport()
|
|||||||
if (samplesPerSec == 48000) {
|
if (samplesPerSec == 48000) {
|
||||||
_audioTransport->SetFilePlayout(
|
_audioTransport->SetFilePlayout(
|
||||||
true, GetResource(_playoutFile48.c_str()));
|
true, GetResource(_playoutFile48.c_str()));
|
||||||
} else if (samplesPerSec == 44100 || samplesPerSec == 44000) {
|
} else if (samplesPerSec == 44100) {
|
||||||
_audioTransport->SetFilePlayout(
|
_audioTransport->SetFilePlayout(
|
||||||
true, GetResource(_playoutFile44.c_str()));
|
true, GetResource(_playoutFile44.c_str()));
|
||||||
} else if (samplesPerSec == 16000) {
|
} else if (samplesPerSec == 16000) {
|
||||||
@ -1491,7 +1485,7 @@ int32_t FuncTestManager::TestSpeakerVolume()
|
|||||||
if (48000 == samplesPerSec) {
|
if (48000 == samplesPerSec) {
|
||||||
_audioTransport->SetFilePlayout(
|
_audioTransport->SetFilePlayout(
|
||||||
true, GetResource(_playoutFile48.c_str()));
|
true, GetResource(_playoutFile48.c_str()));
|
||||||
} else if (44100 == samplesPerSec || samplesPerSec == 44000) {
|
} else if (44100 == samplesPerSec) {
|
||||||
_audioTransport->SetFilePlayout(
|
_audioTransport->SetFilePlayout(
|
||||||
true, GetResource(_playoutFile44.c_str()));
|
true, GetResource(_playoutFile44.c_str()));
|
||||||
} else if (samplesPerSec == 16000) {
|
} else if (samplesPerSec == 16000) {
|
||||||
@ -1592,7 +1586,7 @@ int32_t FuncTestManager::TestSpeakerMute()
|
|||||||
EXPECT_EQ(0, audioDevice->PlayoutSampleRate(&samplesPerSec));
|
EXPECT_EQ(0, audioDevice->PlayoutSampleRate(&samplesPerSec));
|
||||||
if (48000 == samplesPerSec)
|
if (48000 == samplesPerSec)
|
||||||
_audioTransport->SetFilePlayout(true, _playoutFile48.c_str());
|
_audioTransport->SetFilePlayout(true, _playoutFile48.c_str());
|
||||||
else if (44100 == samplesPerSec || 44000 == samplesPerSec)
|
else if (44100 == samplesPerSec)
|
||||||
_audioTransport->SetFilePlayout(true, _playoutFile44.c_str());
|
_audioTransport->SetFilePlayout(true, _playoutFile44.c_str());
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -109,7 +109,17 @@ const float WebRtcAec_overDriveCurve[65] = {
|
|||||||
// Target suppression levels for nlp modes.
|
// Target suppression levels for nlp modes.
|
||||||
// log{0.001, 0.00001, 0.00000001}
|
// log{0.001, 0.00001, 0.00000001}
|
||||||
static const float kTargetSupp[3] = { -6.9f, -11.5f, -18.4f };
|
static const float kTargetSupp[3] = { -6.9f, -11.5f, -18.4f };
|
||||||
static const float kMinOverDrive[3] = { 1.0f, 2.0f, 5.0f };
|
|
||||||
|
// Two sets of parameters, one for the extended filter mode.
|
||||||
|
static const float kExtendedMinOverDrive[3] = { 3.0f, 6.0f, 15.0f };
|
||||||
|
static const float kNormalMinOverDrive[3] = { 1.0f, 2.0f, 5.0f };
|
||||||
|
static const float kExtendedSmoothingCoefficients[2][2] =
|
||||||
|
{ { 0.9f, 0.1f }, { 0.92f, 0.08f } };
|
||||||
|
static const float kNormalSmoothingCoefficients[2][2] =
|
||||||
|
{ { 0.9f, 0.1f }, { 0.93f, 0.07f } };
|
||||||
|
|
||||||
|
// Number of partitions forming the NLP's "preferred" bands.
|
||||||
|
enum { kPrefBandSize = 24 };
|
||||||
|
|
||||||
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
||||||
extern int webrtc_aec_instance_count;
|
extern int webrtc_aec_instance_count;
|
||||||
@ -281,13 +291,13 @@ int WebRtcAec_FreeAec(AecCore* aec)
|
|||||||
static void FilterFar(AecCore* aec, float yf[2][PART_LEN1])
|
static void FilterFar(AecCore* aec, float yf[2][PART_LEN1])
|
||||||
{
|
{
|
||||||
int i;
|
int i;
|
||||||
for (i = 0; i < NR_PART; i++) {
|
for (i = 0; i < aec->num_partitions; i++) {
|
||||||
int j;
|
int j;
|
||||||
int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
|
int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
|
||||||
int pos = i * PART_LEN1;
|
int pos = i * PART_LEN1;
|
||||||
// Check for wrap
|
// Check for wrap
|
||||||
if (i + aec->xfBufBlockPos >= NR_PART) {
|
if (i + aec->xfBufBlockPos >= aec->num_partitions) {
|
||||||
xPos -= NR_PART*(PART_LEN1);
|
xPos -= aec->num_partitions*(PART_LEN1);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (j = 0; j < PART_LEN1; j++) {
|
for (j = 0; j < PART_LEN1; j++) {
|
||||||
@ -301,22 +311,25 @@ static void FilterFar(AecCore* aec, float yf[2][PART_LEN1])
|
|||||||
|
|
||||||
static void ScaleErrorSignal(AecCore* aec, float ef[2][PART_LEN1])
|
static void ScaleErrorSignal(AecCore* aec, float ef[2][PART_LEN1])
|
||||||
{
|
{
|
||||||
|
const float mu = aec->extended_filter_enabled ? kExtendedMu : aec->normal_mu;
|
||||||
|
const float error_threshold = aec->extended_filter_enabled ?
|
||||||
|
kExtendedErrorThreshold : aec->normal_error_threshold;
|
||||||
int i;
|
int i;
|
||||||
float absEf;
|
float abs_ef;
|
||||||
for (i = 0; i < (PART_LEN1); i++) {
|
for (i = 0; i < (PART_LEN1); i++) {
|
||||||
ef[0][i] /= (aec->xPow[i] + 1e-10f);
|
ef[0][i] /= (aec->xPow[i] + 1e-10f);
|
||||||
ef[1][i] /= (aec->xPow[i] + 1e-10f);
|
ef[1][i] /= (aec->xPow[i] + 1e-10f);
|
||||||
absEf = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]);
|
abs_ef = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]);
|
||||||
|
|
||||||
if (absEf > aec->errThresh) {
|
if (abs_ef > error_threshold) {
|
||||||
absEf = aec->errThresh / (absEf + 1e-10f);
|
abs_ef = error_threshold / (abs_ef + 1e-10f);
|
||||||
ef[0][i] *= absEf;
|
ef[0][i] *= abs_ef;
|
||||||
ef[1][i] *= absEf;
|
ef[1][i] *= abs_ef;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stepsize factor
|
// Stepsize factor
|
||||||
ef[0][i] *= aec->mu;
|
ef[0][i] *= mu;
|
||||||
ef[1][i] *= aec->mu;
|
ef[1][i] *= mu;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -325,35 +338,35 @@ static void ScaleErrorSignal(AecCore* aec, float ef[2][PART_LEN1])
|
|||||||
//static void FilterAdaptationUnconstrained(AecCore* aec, float *fft,
|
//static void FilterAdaptationUnconstrained(AecCore* aec, float *fft,
|
||||||
// float ef[2][PART_LEN1]) {
|
// float ef[2][PART_LEN1]) {
|
||||||
// int i, j;
|
// int i, j;
|
||||||
// for (i = 0; i < NR_PART; i++) {
|
// for (i = 0; i < aec->num_partitions; i++) {
|
||||||
// int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
|
// int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
|
||||||
// int pos;
|
// int pos;
|
||||||
// // Check for wrap
|
// // Check for wrap
|
||||||
// if (i + aec->xfBufBlockPos >= NR_PART) {
|
// if (i + aec->xfBufBlockPos >= aec->num_partitions) {
|
||||||
// xPos -= NR_PART * PART_LEN1;
|
// xPos -= aec->num_partitions * PART_LEN1;
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// pos = i * PART_LEN1;
|
// pos = i * PART_LEN1;
|
||||||
//
|
//
|
||||||
// for (j = 0; j < PART_LEN1; j++) {
|
// for (j = 0; j < PART_LEN1; j++) {
|
||||||
// aec->wfBuf[pos + j][0] += MulRe(aec->xfBuf[xPos + j][0],
|
// aec->wfBuf[0][pos + j] += MulRe(aec->xfBuf[0][xPos + j],
|
||||||
// -aec->xfBuf[xPos + j][1],
|
// -aec->xfBuf[1][xPos + j],
|
||||||
// ef[j][0], ef[j][1]);
|
// ef[0][j], ef[1][j]);
|
||||||
// aec->wfBuf[pos + j][1] += MulIm(aec->xfBuf[xPos + j][0],
|
// aec->wfBuf[1][pos + j] += MulIm(aec->xfBuf[0][xPos + j],
|
||||||
// -aec->xfBuf[xPos + j][1],
|
// -aec->xfBuf[1][xPos + j],
|
||||||
// ef[j][0], ef[j][1]);
|
// ef[0][j], ef[1][j]);
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
//}
|
//}
|
||||||
|
|
||||||
static void FilterAdaptation(AecCore* aec, float *fft, float ef[2][PART_LEN1]) {
|
static void FilterAdaptation(AecCore* aec, float *fft, float ef[2][PART_LEN1]) {
|
||||||
int i, j;
|
int i, j;
|
||||||
for (i = 0; i < NR_PART; i++) {
|
for (i = 0; i < aec->num_partitions; i++) {
|
||||||
int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
|
int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
|
||||||
int pos;
|
int pos;
|
||||||
// Check for wrap
|
// Check for wrap
|
||||||
if (i + aec->xfBufBlockPos >= NR_PART) {
|
if (i + aec->xfBufBlockPos >= aec->num_partitions) {
|
||||||
xPos -= NR_PART * PART_LEN1;
|
xPos -= aec->num_partitions * PART_LEN1;
|
||||||
}
|
}
|
||||||
|
|
||||||
pos = i * PART_LEN1;
|
pos = i * PART_LEN1;
|
||||||
@ -427,12 +440,12 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq)
|
|||||||
aec->sampFreq = sampFreq;
|
aec->sampFreq = sampFreq;
|
||||||
|
|
||||||
if (sampFreq == 8000) {
|
if (sampFreq == 8000) {
|
||||||
aec->mu = 0.6f;
|
aec->normal_mu = 0.6f;
|
||||||
aec->errThresh = 2e-6f;
|
aec->normal_error_threshold = 2e-6f;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
aec->mu = 0.5f;
|
aec->normal_mu = 0.5f;
|
||||||
aec->errThresh = 1.5e-6f;
|
aec->normal_error_threshold = 1.5e-6f;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (WebRtc_InitBuffer(aec->nearFrBuf) == -1) {
|
if (WebRtc_InitBuffer(aec->nearFrBuf) == -1) {
|
||||||
@ -474,6 +487,9 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq)
|
|||||||
aec->delay_logging_enabled = 0;
|
aec->delay_logging_enabled = 0;
|
||||||
memset(aec->delay_histogram, 0, sizeof(aec->delay_histogram));
|
memset(aec->delay_histogram, 0, sizeof(aec->delay_histogram));
|
||||||
|
|
||||||
|
aec->extended_filter_enabled = 0;
|
||||||
|
aec->num_partitions = kNormalNumPartitions;
|
||||||
|
|
||||||
// Default target suppression mode.
|
// Default target suppression mode.
|
||||||
aec->nlp_mode = 1;
|
aec->nlp_mode = 1;
|
||||||
|
|
||||||
@ -514,11 +530,14 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq)
|
|||||||
aec->xfBufBlockPos = 0;
|
aec->xfBufBlockPos = 0;
|
||||||
// TODO: Investigate need for these initializations. Deleting them doesn't
|
// TODO: Investigate need for these initializations. Deleting them doesn't
|
||||||
// change the output at all and yields 0.4% overall speedup.
|
// change the output at all and yields 0.4% overall speedup.
|
||||||
memset(aec->xfBuf, 0, sizeof(complex_t) * NR_PART * PART_LEN1);
|
memset(aec->xfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions *
|
||||||
memset(aec->wfBuf, 0, sizeof(complex_t) * NR_PART * PART_LEN1);
|
PART_LEN1);
|
||||||
|
memset(aec->wfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions *
|
||||||
|
PART_LEN1);
|
||||||
memset(aec->sde, 0, sizeof(complex_t) * PART_LEN1);
|
memset(aec->sde, 0, sizeof(complex_t) * PART_LEN1);
|
||||||
memset(aec->sxd, 0, sizeof(complex_t) * PART_LEN1);
|
memset(aec->sxd, 0, sizeof(complex_t) * PART_LEN1);
|
||||||
memset(aec->xfwBuf, 0, sizeof(complex_t) * NR_PART * PART_LEN1);
|
memset(aec->xfwBuf, 0, sizeof(complex_t) * kExtendedNumPartitions *
|
||||||
|
PART_LEN1);
|
||||||
memset(aec->se, 0, sizeof(float) * PART_LEN1);
|
memset(aec->se, 0, sizeof(float) * PART_LEN1);
|
||||||
|
|
||||||
// To prevent numerical instability in the first block.
|
// To prevent numerical instability in the first block.
|
||||||
@ -734,13 +753,11 @@ int WebRtcAec_GetDelayMetricsCore(AecCore* self, int* median, int* std) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int WebRtcAec_echo_state(AecCore* self) {
|
int WebRtcAec_echo_state(AecCore* self) {
|
||||||
assert(self != NULL);
|
|
||||||
return self->echoState;
|
return self->echoState;
|
||||||
}
|
}
|
||||||
|
|
||||||
void WebRtcAec_GetEchoStats(AecCore* self, Stats* erl, Stats* erle,
|
void WebRtcAec_GetEchoStats(AecCore* self, Stats* erl, Stats* erle,
|
||||||
Stats* a_nlp) {
|
Stats* a_nlp) {
|
||||||
assert(self != NULL);
|
|
||||||
assert(erl != NULL);
|
assert(erl != NULL);
|
||||||
assert(erle != NULL);
|
assert(erle != NULL);
|
||||||
assert(a_nlp != NULL);
|
assert(a_nlp != NULL);
|
||||||
@ -751,14 +768,12 @@ void WebRtcAec_GetEchoStats(AecCore* self, Stats* erl, Stats* erle,
|
|||||||
|
|
||||||
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
||||||
void* WebRtcAec_far_time_buf(AecCore* self) {
|
void* WebRtcAec_far_time_buf(AecCore* self) {
|
||||||
assert(self != NULL);
|
|
||||||
return self->far_time_buf;
|
return self->far_time_buf;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
void WebRtcAec_SetConfigCore(AecCore* self, int nlp_mode, int metrics_mode,
|
void WebRtcAec_SetConfigCore(AecCore* self, int nlp_mode, int metrics_mode,
|
||||||
int delay_logging) {
|
int delay_logging) {
|
||||||
assert(self != NULL);
|
|
||||||
assert(nlp_mode >= 0 && nlp_mode < 3);
|
assert(nlp_mode >= 0 && nlp_mode < 3);
|
||||||
self->nlp_mode = nlp_mode;
|
self->nlp_mode = nlp_mode;
|
||||||
self->metricsMode = metrics_mode;
|
self->metricsMode = metrics_mode;
|
||||||
@ -771,13 +786,20 @@ void WebRtcAec_SetConfigCore(AecCore* self, int nlp_mode, int metrics_mode,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void WebRtcAec_enable_delay_correction(AecCore* self, int enable) {
|
||||||
|
self->extended_filter_enabled = enable;
|
||||||
|
self->num_partitions = enable ? kExtendedNumPartitions : kNormalNumPartitions;
|
||||||
|
}
|
||||||
|
|
||||||
|
int WebRtcAec_delay_correction_enabled(AecCore* self) {
|
||||||
|
return self->extended_filter_enabled;
|
||||||
|
}
|
||||||
|
|
||||||
int WebRtcAec_system_delay(AecCore* self) {
|
int WebRtcAec_system_delay(AecCore* self) {
|
||||||
assert(self != NULL);
|
|
||||||
return self->system_delay;
|
return self->system_delay;
|
||||||
}
|
}
|
||||||
|
|
||||||
void WebRtcAec_SetSystemDelay(AecCore* self, int delay) {
|
void WebRtcAec_SetSystemDelay(AecCore* self, int delay) {
|
||||||
assert(self != NULL);
|
|
||||||
assert(delay >= 0);
|
assert(delay >= 0);
|
||||||
self->system_delay = delay;
|
self->system_delay = delay;
|
||||||
}
|
}
|
||||||
@ -853,7 +875,8 @@ static void ProcessBlock(AecCore* aec) {
|
|||||||
for (i = 0; i < PART_LEN1; i++) {
|
for (i = 0; i < PART_LEN1; i++) {
|
||||||
far_spectrum = (xf_ptr[i] * xf_ptr[i]) +
|
far_spectrum = (xf_ptr[i] * xf_ptr[i]) +
|
||||||
(xf_ptr[PART_LEN1 + i] * xf_ptr[PART_LEN1 + i]);
|
(xf_ptr[PART_LEN1 + i] * xf_ptr[PART_LEN1 + i]);
|
||||||
aec->xPow[i] = gPow[0] * aec->xPow[i] + gPow[1] * NR_PART * far_spectrum;
|
aec->xPow[i] = gPow[0] * aec->xPow[i] + gPow[1] * aec->num_partitions *
|
||||||
|
far_spectrum;
|
||||||
// Calculate absolute spectra
|
// Calculate absolute spectra
|
||||||
abs_far_spectrum[i] = sqrtf(far_spectrum);
|
abs_far_spectrum[i] = sqrtf(far_spectrum);
|
||||||
|
|
||||||
@ -913,7 +936,7 @@ static void ProcessBlock(AecCore* aec) {
|
|||||||
// Update the xfBuf block position.
|
// Update the xfBuf block position.
|
||||||
aec->xfBufBlockPos--;
|
aec->xfBufBlockPos--;
|
||||||
if (aec->xfBufBlockPos == -1) {
|
if (aec->xfBufBlockPos == -1) {
|
||||||
aec->xfBufBlockPos = NR_PART - 1;
|
aec->xfBufBlockPos = aec->num_partitions - 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Buffer xf
|
// Buffer xf
|
||||||
@ -1014,18 +1037,21 @@ static void NonLinearProcessing(AecCore* aec, short *output, short *outputH)
|
|||||||
float cohde[PART_LEN1], cohxd[PART_LEN1];
|
float cohde[PART_LEN1], cohxd[PART_LEN1];
|
||||||
float hNlDeAvg, hNlXdAvg;
|
float hNlDeAvg, hNlXdAvg;
|
||||||
float hNl[PART_LEN1];
|
float hNl[PART_LEN1];
|
||||||
float hNlPref[PREF_BAND_SIZE];
|
float hNlPref[kPrefBandSize];
|
||||||
float hNlFb = 0, hNlFbLow = 0;
|
float hNlFb = 0, hNlFbLow = 0;
|
||||||
const float prefBandQuant = 0.75f, prefBandQuantLow = 0.5f;
|
const float prefBandQuant = 0.75f, prefBandQuantLow = 0.5f;
|
||||||
const int prefBandSize = PREF_BAND_SIZE / aec->mult;
|
const int prefBandSize = kPrefBandSize / aec->mult;
|
||||||
const int minPrefBand = 4 / aec->mult;
|
const int minPrefBand = 4 / aec->mult;
|
||||||
|
|
||||||
// Near and error power sums
|
// Near and error power sums
|
||||||
float sdSum = 0, seSum = 0;
|
float sdSum = 0, seSum = 0;
|
||||||
|
|
||||||
// Power estimate smoothing coefficients
|
// Power estimate smoothing coefficients.
|
||||||
const float gCoh[2][2] = {{0.9f, 0.1f}, {0.93f, 0.07f}};
|
const float *ptrGCoh = aec->extended_filter_enabled ?
|
||||||
const float *ptrGCoh = gCoh[aec->mult - 1];
|
kExtendedSmoothingCoefficients[aec->mult - 1] :
|
||||||
|
kNormalSmoothingCoefficients[aec->mult - 1];
|
||||||
|
const float* min_overdrive = aec->extended_filter_enabled ?
|
||||||
|
kExtendedMinOverDrive : kNormalMinOverDrive;
|
||||||
|
|
||||||
// Filter energy
|
// Filter energy
|
||||||
float wfEnMax = 0, wfEn = 0;
|
float wfEnMax = 0, wfEn = 0;
|
||||||
@ -1048,7 +1074,7 @@ static void NonLinearProcessing(AecCore* aec, short *output, short *outputH)
|
|||||||
if (aec->delayEstCtr == 0) {
|
if (aec->delayEstCtr == 0) {
|
||||||
wfEnMax = 0;
|
wfEnMax = 0;
|
||||||
aec->delayIdx = 0;
|
aec->delayIdx = 0;
|
||||||
for (i = 0; i < NR_PART; i++) {
|
for (i = 0; i < aec->num_partitions; i++) {
|
||||||
pos = i * PART_LEN1;
|
pos = i * PART_LEN1;
|
||||||
wfEn = 0;
|
wfEn = 0;
|
||||||
for (j = 0; j < PART_LEN1; j++) {
|
for (j = 0; j < PART_LEN1; j++) {
|
||||||
@ -1189,7 +1215,7 @@ static void NonLinearProcessing(AecCore* aec, short *output, short *outputH)
|
|||||||
|
|
||||||
if (aec->hNlXdAvgMin == 1) {
|
if (aec->hNlXdAvgMin == 1) {
|
||||||
aec->echoState = 0;
|
aec->echoState = 0;
|
||||||
aec->overDrive = kMinOverDrive[aec->nlp_mode];
|
aec->overDrive = min_overdrive[aec->nlp_mode];
|
||||||
|
|
||||||
if (aec->stNearState == 1) {
|
if (aec->stNearState == 1) {
|
||||||
memcpy(hNl, cohde, sizeof(hNl));
|
memcpy(hNl, cohde, sizeof(hNl));
|
||||||
@ -1245,7 +1271,7 @@ static void NonLinearProcessing(AecCore* aec, short *output, short *outputH)
|
|||||||
aec->hNlMinCtr = 0;
|
aec->hNlMinCtr = 0;
|
||||||
aec->overDrive = WEBRTC_SPL_MAX(kTargetSupp[aec->nlp_mode] /
|
aec->overDrive = WEBRTC_SPL_MAX(kTargetSupp[aec->nlp_mode] /
|
||||||
((float)log(aec->hNlFbMin + 1e-10f) + 1e-10f),
|
((float)log(aec->hNlFbMin + 1e-10f) + 1e-10f),
|
||||||
kMinOverDrive[aec->nlp_mode]);
|
min_overdrive[aec->nlp_mode]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Smooth the overdrive.
|
// Smooth the overdrive.
|
||||||
@ -1465,7 +1491,6 @@ static void InitStats(Stats* stats) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void InitMetrics(AecCore* self) {
|
static void InitMetrics(AecCore* self) {
|
||||||
assert(self != NULL);
|
|
||||||
self->stateCounter = 0;
|
self->stateCounter = 0;
|
||||||
InitLevel(&self->farlevel);
|
InitLevel(&self->farlevel);
|
||||||
InitLevel(&self->nearlevel);
|
InitLevel(&self->nearlevel);
|
||||||
@ -1687,3 +1712,4 @@ static void TimeToFrequency(float time_data[PART_LEN2],
|
|||||||
freq_data[1][i] = time_data[2 * i + 1];
|
freq_data[1][i] = time_data[2 * i + 1];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,23 +70,38 @@ void WebRtcAec_ProcessFrame(AecCore* aec,
|
|||||||
// Returns the number of elements moved, and adjusts |system_delay| by the
|
// Returns the number of elements moved, and adjusts |system_delay| by the
|
||||||
// corresponding amount in ms.
|
// corresponding amount in ms.
|
||||||
int WebRtcAec_MoveFarReadPtr(AecCore* aec, int elements);
|
int WebRtcAec_MoveFarReadPtr(AecCore* aec, int elements);
|
||||||
|
|
||||||
// Calculates the median and standard deviation among the delay estimates
|
// Calculates the median and standard deviation among the delay estimates
|
||||||
// collected since the last call to this function.
|
// collected since the last call to this function.
|
||||||
int WebRtcAec_GetDelayMetricsCore(AecCore* self, int* median, int* std);
|
int WebRtcAec_GetDelayMetricsCore(AecCore* self, int* median, int* std);
|
||||||
|
|
||||||
// Returns the echo state (1: echo, 0: no echo).
|
// Returns the echo state (1: echo, 0: no echo).
|
||||||
int WebRtcAec_echo_state(AecCore* self);
|
int WebRtcAec_echo_state(AecCore* self);
|
||||||
|
|
||||||
// Gets statistics of the echo metrics ERL, ERLE, A_NLP.
|
// Gets statistics of the echo metrics ERL, ERLE, A_NLP.
|
||||||
void WebRtcAec_GetEchoStats(AecCore* self, Stats* erl, Stats* erle,
|
void WebRtcAec_GetEchoStats(AecCore* self, Stats* erl, Stats* erle,
|
||||||
Stats* a_nlp);
|
Stats* a_nlp);
|
||||||
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
||||||
void* WebRtcAec_far_time_buf(AecCore* self);
|
void* WebRtcAec_far_time_buf(AecCore* self);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Sets local configuration modes.
|
// Sets local configuration modes.
|
||||||
void WebRtcAec_SetConfigCore(AecCore* self, int nlp_mode, int metrics_mode,
|
void WebRtcAec_SetConfigCore(AecCore* self, int nlp_mode, int metrics_mode,
|
||||||
int delay_logging);
|
int delay_logging);
|
||||||
|
|
||||||
|
// We now interpret delay correction to mean an extended filter length feature.
|
||||||
|
// We reuse the delay correction infrastructure to avoid changes through to
|
||||||
|
// libjingle. See details along with |DelayCorrection| in
|
||||||
|
// echo_cancellation_impl.h. Non-zero enables, zero disables.
|
||||||
|
void WebRtcAec_enable_delay_correction(AecCore* self, int enable);
|
||||||
|
|
||||||
|
// Returns non-zero if delay correction is enabled and zero if disabled.
|
||||||
|
int WebRtcAec_delay_correction_enabled(AecCore* self);
|
||||||
|
|
||||||
// Returns the current |system_delay|, i.e., the buffered difference between
|
// Returns the current |system_delay|, i.e., the buffered difference between
|
||||||
// far-end and near-end.
|
// far-end and near-end.
|
||||||
int WebRtcAec_system_delay(AecCore* self);
|
int WebRtcAec_system_delay(AecCore* self);
|
||||||
|
|
||||||
// Sets the |system_delay| to |value|. Note that if the value is changed
|
// Sets the |system_delay| to |value|. Note that if the value is changed
|
||||||
// improperly, there can be a performance regression. So it should be used with
|
// improperly, there can be a performance regression. So it should be used with
|
||||||
// care.
|
// care.
|
||||||
|
@ -22,6 +22,16 @@
|
|||||||
#define NR_PART 12 // Number of partitions in filter.
|
#define NR_PART 12 // Number of partitions in filter.
|
||||||
#define PREF_BAND_SIZE 24
|
#define PREF_BAND_SIZE 24
|
||||||
|
|
||||||
|
// Number of partitions for the extended filter mode. The first one is an enum
|
||||||
|
// to be used in array declarations, as it represents the maximum filter length.
|
||||||
|
enum { kExtendedNumPartitions = 32 };
|
||||||
|
static const int kNormalNumPartitions = 12;
|
||||||
|
|
||||||
|
// Extended filter adaptation parameters.
|
||||||
|
// TODO(ajm): No narrowband tuning yet.
|
||||||
|
static const float kExtendedMu = 0.4f;
|
||||||
|
static const float kExtendedErrorThreshold = 1.0e-6f;
|
||||||
|
|
||||||
typedef struct PowerLevel {
|
typedef struct PowerLevel {
|
||||||
float sfrsum;
|
float sfrsum;
|
||||||
int sfrcounter;
|
int sfrcounter;
|
||||||
@ -56,11 +66,12 @@ struct AecCore {
|
|||||||
float dInitMinPow[PART_LEN1];
|
float dInitMinPow[PART_LEN1];
|
||||||
float *noisePow;
|
float *noisePow;
|
||||||
|
|
||||||
float xfBuf[2][NR_PART * PART_LEN1]; // farend fft buffer
|
float xfBuf[2][kExtendedNumPartitions * PART_LEN1]; // farend fft buffer
|
||||||
float wfBuf[2][NR_PART * PART_LEN1]; // filter fft
|
float wfBuf[2][kExtendedNumPartitions * PART_LEN1]; // filter fft
|
||||||
complex_t sde[PART_LEN1]; // cross-psd of nearend and error
|
complex_t sde[PART_LEN1]; // cross-psd of nearend and error
|
||||||
complex_t sxd[PART_LEN1]; // cross-psd of farend and nearend
|
complex_t sxd[PART_LEN1]; // cross-psd of farend and nearend
|
||||||
complex_t xfwBuf[NR_PART * PART_LEN1]; // farend windowed fft buffer
|
// Farend windowed fft buffer.
|
||||||
|
complex_t xfwBuf[kExtendedNumPartitions * PART_LEN1];
|
||||||
|
|
||||||
float sx[PART_LEN1], sd[PART_LEN1], se[PART_LEN1]; // far, near, error psd
|
float sx[PART_LEN1], sd[PART_LEN1], se[PART_LEN1]; // far, near, error psd
|
||||||
float hNs[PART_LEN1];
|
float hNs[PART_LEN1];
|
||||||
@ -85,8 +96,8 @@ struct AecCore {
|
|||||||
int sampFreq;
|
int sampFreq;
|
||||||
uint32_t seed;
|
uint32_t seed;
|
||||||
|
|
||||||
float mu; // stepsize
|
float normal_mu; // stepsize
|
||||||
float errThresh; // error threshold
|
float normal_error_threshold; // error threshold
|
||||||
|
|
||||||
int noiseEstCtr;
|
int noiseEstCtr;
|
||||||
|
|
||||||
@ -112,6 +123,11 @@ struct AecCore {
|
|||||||
void* delay_estimator_farend;
|
void* delay_estimator_farend;
|
||||||
void* delay_estimator;
|
void* delay_estimator;
|
||||||
|
|
||||||
|
// 1 = extended filter mode enabled, 0 = disabled.
|
||||||
|
int extended_filter_enabled;
|
||||||
|
// Runtime selection of number of filter partitions.
|
||||||
|
int num_partitions;
|
||||||
|
|
||||||
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
||||||
RingBuffer* far_time_buf;
|
RingBuffer* far_time_buf;
|
||||||
FILE *farFile;
|
FILE *farFile;
|
||||||
|
@ -34,13 +34,14 @@ __inline static float MulIm(float aRe, float aIm, float bRe, float bIm)
|
|||||||
static void FilterFarSSE2(AecCore* aec, float yf[2][PART_LEN1])
|
static void FilterFarSSE2(AecCore* aec, float yf[2][PART_LEN1])
|
||||||
{
|
{
|
||||||
int i;
|
int i;
|
||||||
for (i = 0; i < NR_PART; i++) {
|
const int num_partitions = aec->num_partitions;
|
||||||
|
for (i = 0; i < num_partitions; i++) {
|
||||||
int j;
|
int j;
|
||||||
int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
|
int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
|
||||||
int pos = i * PART_LEN1;
|
int pos = i * PART_LEN1;
|
||||||
// Check for wrap
|
// Check for wrap
|
||||||
if (i + aec->xfBufBlockPos >= NR_PART) {
|
if (i + aec->xfBufBlockPos >= num_partitions) {
|
||||||
xPos -= NR_PART*(PART_LEN1);
|
xPos -= num_partitions*(PART_LEN1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// vectorized code (four at once)
|
// vectorized code (four at once)
|
||||||
@ -75,8 +76,11 @@ static void FilterFarSSE2(AecCore* aec, float yf[2][PART_LEN1])
|
|||||||
static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1])
|
static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1])
|
||||||
{
|
{
|
||||||
const __m128 k1e_10f = _mm_set1_ps(1e-10f);
|
const __m128 k1e_10f = _mm_set1_ps(1e-10f);
|
||||||
const __m128 kThresh = _mm_set1_ps(aec->errThresh);
|
const __m128 kMu = aec->extended_filter_enabled ?
|
||||||
const __m128 kMu = _mm_set1_ps(aec->mu);
|
_mm_set1_ps(kExtendedMu) : _mm_set1_ps(aec->normal_mu);
|
||||||
|
const __m128 kThresh = aec->extended_filter_enabled ?
|
||||||
|
_mm_set1_ps(kExtendedErrorThreshold) :
|
||||||
|
_mm_set1_ps(aec->normal_error_threshold);
|
||||||
|
|
||||||
int i;
|
int i;
|
||||||
// vectorized code (four at once)
|
// vectorized code (four at once)
|
||||||
@ -110,32 +114,39 @@ static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1])
|
|||||||
_mm_storeu_ps(&ef[1][i], ef_im);
|
_mm_storeu_ps(&ef[1][i], ef_im);
|
||||||
}
|
}
|
||||||
// scalar code for the remaining items.
|
// scalar code for the remaining items.
|
||||||
|
{
|
||||||
|
const float mu = aec->extended_filter_enabled ?
|
||||||
|
kExtendedMu : aec->normal_mu;
|
||||||
|
const float error_threshold = aec->extended_filter_enabled ?
|
||||||
|
kExtendedErrorThreshold : aec->normal_error_threshold;
|
||||||
for (; i < (PART_LEN1); i++) {
|
for (; i < (PART_LEN1); i++) {
|
||||||
float absEf;
|
float abs_ef;
|
||||||
ef[0][i] /= (aec->xPow[i] + 1e-10f);
|
ef[0][i] /= (aec->xPow[i] + 1e-10f);
|
||||||
ef[1][i] /= (aec->xPow[i] + 1e-10f);
|
ef[1][i] /= (aec->xPow[i] + 1e-10f);
|
||||||
absEf = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]);
|
abs_ef = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]);
|
||||||
|
|
||||||
if (absEf > aec->errThresh) {
|
if (abs_ef > error_threshold) {
|
||||||
absEf = aec->errThresh / (absEf + 1e-10f);
|
abs_ef = error_threshold / (abs_ef + 1e-10f);
|
||||||
ef[0][i] *= absEf;
|
ef[0][i] *= abs_ef;
|
||||||
ef[1][i] *= absEf;
|
ef[1][i] *= abs_ef;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stepsize factor
|
// Stepsize factor
|
||||||
ef[0][i] *= aec->mu;
|
ef[0][i] *= mu;
|
||||||
ef[1][i] *= aec->mu;
|
ef[1][i] *= mu;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void FilterAdaptationSSE2(AecCore* aec, float *fft, float ef[2][PART_LEN1]) {
|
static void FilterAdaptationSSE2(AecCore* aec, float *fft, float ef[2][PART_LEN1]) {
|
||||||
int i, j;
|
int i, j;
|
||||||
for (i = 0; i < NR_PART; i++) {
|
const int num_partitions = aec->num_partitions;
|
||||||
|
for (i = 0; i < num_partitions; i++) {
|
||||||
int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
|
int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
|
||||||
int pos = i * PART_LEN1;
|
int pos = i * PART_LEN1;
|
||||||
// Check for wrap
|
// Check for wrap
|
||||||
if (i + aec->xfBufBlockPos >= NR_PART) {
|
if (i + aec->xfBufBlockPos >= num_partitions) {
|
||||||
xPos -= NR_PART * PART_LEN1;
|
xPos -= num_partitions * PART_LEN1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process the whole array...
|
// Process the whole array...
|
||||||
@ -413,3 +424,4 @@ void WebRtcAec_InitAec_SSE2(void) {
|
|||||||
WebRtcAec_FilterAdaptation = FilterAdaptationSSE2;
|
WebRtcAec_FilterAdaptation = FilterAdaptationSSE2;
|
||||||
WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressSSE2;
|
WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressSSE2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -27,6 +27,61 @@
|
|||||||
#include "webrtc/modules/audio_processing/utility/ring_buffer.h"
|
#include "webrtc/modules/audio_processing/utility/ring_buffer.h"
|
||||||
#include "webrtc/typedefs.h"
|
#include "webrtc/typedefs.h"
|
||||||
|
|
||||||
|
// Measured delays [ms]
|
||||||
|
// Device Chrome GTP
|
||||||
|
// MacBook Air 10
|
||||||
|
// MacBook Retina 10 100
|
||||||
|
// MacPro 30?
|
||||||
|
//
|
||||||
|
// Win7 Desktop 70 80?
|
||||||
|
// Win7 T430s 110
|
||||||
|
// Win8 T420s 70
|
||||||
|
//
|
||||||
|
// Daisy 50
|
||||||
|
// Pixel (w/ preproc?) 240
|
||||||
|
// Pixel (w/o preproc?) 110 110
|
||||||
|
|
||||||
|
// The extended filter mode gives us the flexibility to ignore the system's
|
||||||
|
// reported delays. We do this for platforms which we believe provide results
|
||||||
|
// which are incompatible with the AEC's expectations. Based on measurements
|
||||||
|
// (some provided above) we set a conservative (i.e. lower than measured)
|
||||||
|
// fixed delay.
|
||||||
|
//
|
||||||
|
// WEBRTC_UNTRUSTED_DELAY will only have an impact when |extended_filter_mode|
|
||||||
|
// is enabled. See the note along with |DelayCorrection| in
|
||||||
|
// echo_cancellation_impl.h for more details on the mode.
|
||||||
|
//
|
||||||
|
// Justification:
|
||||||
|
// Chromium/Mac: Here, the true latency is so low (~10-20 ms), that it plays
|
||||||
|
// havoc with the AEC's buffering. To avoid this, we set a fixed delay of 20 ms
|
||||||
|
// and then compensate by rewinding by 10 ms (in wideband) through
|
||||||
|
// kDelayDiffOffsetSamples. This trick does not seem to work for larger rewind
|
||||||
|
// values, but fortunately this is sufficient.
|
||||||
|
//
|
||||||
|
// Chromium/Linux(ChromeOS): The values we get on this platform don't correspond
|
||||||
|
// well to reality. The variance doesn't match the AEC's buffer changes, and the
|
||||||
|
// bulk values tend to be too low. However, the range across different hardware
|
||||||
|
// appears to be too large to choose a single value.
|
||||||
|
//
|
||||||
|
// GTP/Linux(ChromeOS): TBD, but for the moment we will trust the values.
|
||||||
|
#if defined(WEBRTC_CHROMIUM_BUILD) && defined(WEBRTC_MAC)
|
||||||
|
#define WEBRTC_UNTRUSTED_DELAY
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if defined(WEBRTC_MAC)
|
||||||
|
static const int kFixedDelayMs = 20;
|
||||||
|
static const int kDelayDiffOffsetSamples = -160;
|
||||||
|
#elif defined(WEBRTC_WIN)
|
||||||
|
static const int kFixedDelayMs = 50;
|
||||||
|
static const int kDelayDiffOffsetSamples = 0;
|
||||||
|
#else
|
||||||
|
// Essentially ChromeOS.
|
||||||
|
static const int kFixedDelayMs = 50;
|
||||||
|
static const int kDelayDiffOffsetSamples = 0;
|
||||||
|
#endif
|
||||||
|
static const int kMinTrustedDelayMs = 20;
|
||||||
|
static const int kMaxTrustedDelayMs = 500;
|
||||||
|
|
||||||
// Maximum length of resampled signal. Must be an integer multiple of frames
|
// Maximum length of resampled signal. Must be an integer multiple of frames
|
||||||
// (ceil(1/(1 + MIN_SKEW)*2) + 1)*FRAME_LEN
|
// (ceil(1/(1 + MIN_SKEW)*2) + 1)*FRAME_LEN
|
||||||
// The factor of 2 handles wb, and the + 1 is as a safety margin
|
// The factor of 2 handles wb, and the + 1 is as a safety margin
|
||||||
@ -43,7 +98,14 @@ int webrtc_aec_instance_count = 0;
|
|||||||
|
|
||||||
// Estimates delay to set the position of the far-end buffer read pointer
|
// Estimates delay to set the position of the far-end buffer read pointer
|
||||||
// (controlled by knownDelay)
|
// (controlled by knownDelay)
|
||||||
static int EstBufDelay(aecpc_t *aecInst);
|
static void EstBufDelayNormal(aecpc_t *aecInst);
|
||||||
|
static void EstBufDelayExtended(aecpc_t *aecInst);
|
||||||
|
static int ProcessNormal(aecpc_t* self, const int16_t* near,
|
||||||
|
const int16_t* near_high, int16_t* out, int16_t* out_high,
|
||||||
|
int16_t num_samples, int16_t reported_delay_ms, int32_t skew);
|
||||||
|
static void ProcessExtended(aecpc_t* self, const int16_t* near,
|
||||||
|
const int16_t* near_high, int16_t* out, int16_t* out_high,
|
||||||
|
int16_t num_samples, int16_t reported_delay_ms, int32_t skew);
|
||||||
|
|
||||||
int32_t WebRtcAec_Create(void **aecInst)
|
int32_t WebRtcAec_Create(void **aecInst)
|
||||||
{
|
{
|
||||||
@ -135,10 +197,6 @@ int32_t WebRtcAec_Init(void *aecInst, int32_t sampFreq, int32_t scSampFreq)
|
|||||||
aecpc_t *aecpc = aecInst;
|
aecpc_t *aecpc = aecInst;
|
||||||
AecConfig aecConfig;
|
AecConfig aecConfig;
|
||||||
|
|
||||||
if (aecpc == NULL) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (sampFreq != 8000 && sampFreq != 16000 && sampFreq != 32000) {
|
if (sampFreq != 8000 && sampFreq != 16000 && sampFreq != 32000) {
|
||||||
aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
|
aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
|
||||||
return -1;
|
return -1;
|
||||||
@ -177,31 +235,31 @@ int32_t WebRtcAec_Init(void *aecInst, int32_t sampFreq, int32_t scSampFreq)
|
|||||||
aecpc->splitSampFreq = sampFreq;
|
aecpc->splitSampFreq = sampFreq;
|
||||||
}
|
}
|
||||||
|
|
||||||
aecpc->skewFrCtr = 0;
|
|
||||||
aecpc->activity = 0;
|
|
||||||
|
|
||||||
aecpc->delayCtr = 0;
|
aecpc->delayCtr = 0;
|
||||||
|
aecpc->sampFactor = (aecpc->scSampFreq * 1.0f) / aecpc->splitSampFreq;
|
||||||
|
// Sampling frequency multiplier (SWB is processed as 160 frame size).
|
||||||
|
aecpc->rate_factor = aecpc->splitSampFreq / 8000;
|
||||||
|
|
||||||
aecpc->sum = 0;
|
aecpc->sum = 0;
|
||||||
aecpc->counter = 0;
|
aecpc->counter = 0;
|
||||||
aecpc->checkBuffSize = 1;
|
aecpc->checkBuffSize = 1;
|
||||||
aecpc->firstVal = 0;
|
aecpc->firstVal = 0;
|
||||||
|
|
||||||
aecpc->ECstartup = 1;
|
aecpc->startup_phase = 1;
|
||||||
aecpc->bufSizeStart = 0;
|
aecpc->bufSizeStart = 0;
|
||||||
aecpc->checkBufSizeCtr = 0;
|
aecpc->checkBufSizeCtr = 0;
|
||||||
aecpc->filtDelay = 0;
|
aecpc->msInSndCardBuf = 0;
|
||||||
|
aecpc->filtDelay = -1; // -1 indicates an initialized state.
|
||||||
aecpc->timeForDelayChange = 0;
|
aecpc->timeForDelayChange = 0;
|
||||||
aecpc->knownDelay = 0;
|
aecpc->knownDelay = 0;
|
||||||
aecpc->lastDelayDiff = 0;
|
aecpc->lastDelayDiff = 0;
|
||||||
|
|
||||||
aecpc->skew = 0;
|
aecpc->skewFrCtr = 0;
|
||||||
aecpc->resample = kAecFalse;
|
aecpc->resample = kAecFalse;
|
||||||
aecpc->highSkewCtr = 0;
|
aecpc->highSkewCtr = 0;
|
||||||
aecpc->sampFactor = (aecpc->scSampFreq * 1.0f) / aecpc->splitSampFreq;
|
aecpc->skew = 0;
|
||||||
|
|
||||||
// Sampling frequency multiplier (SWB is processed as 160 frame size).
|
aecpc->farend_started = 0;
|
||||||
aecpc->rate_factor = aecpc->splitSampFreq / 8000;
|
|
||||||
|
|
||||||
// Default settings.
|
// Default settings.
|
||||||
aecConfig.nlpMode = kAecNlpModerate;
|
aecConfig.nlpMode = kAecNlpModerate;
|
||||||
@ -239,10 +297,6 @@ int32_t WebRtcAec_BufferFarend(void *aecInst, const int16_t *farend,
|
|||||||
float skew;
|
float skew;
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
|
||||||
if (aecpc == NULL) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (farend == NULL) {
|
if (farend == NULL) {
|
||||||
aecpc->lastError = AEC_NULL_POINTER_ERROR;
|
aecpc->lastError = AEC_NULL_POINTER_ERROR;
|
||||||
return -1;
|
return -1;
|
||||||
@ -268,6 +322,7 @@ int32_t WebRtcAec_BufferFarend(void *aecInst, const int16_t *farend,
|
|||||||
farend_ptr = (const int16_t*) newFarend;
|
farend_ptr = (const int16_t*) newFarend;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
aecpc->farend_started = 1;
|
||||||
WebRtcAec_SetSystemDelay(aecpc->aec, WebRtcAec_system_delay(aecpc->aec) +
|
WebRtcAec_SetSystemDelay(aecpc->aec, WebRtcAec_system_delay(aecpc->aec) +
|
||||||
newNrOfSamples);
|
newNrOfSamples);
|
||||||
|
|
||||||
@ -311,17 +366,6 @@ int32_t WebRtcAec_Process(void *aecInst, const int16_t *nearend,
|
|||||||
{
|
{
|
||||||
aecpc_t *aecpc = aecInst;
|
aecpc_t *aecpc = aecInst;
|
||||||
int32_t retVal = 0;
|
int32_t retVal = 0;
|
||||||
short i;
|
|
||||||
short nBlocks10ms;
|
|
||||||
short nFrames;
|
|
||||||
// Limit resampling to doubling/halving of signal
|
|
||||||
const float minSkewEst = -0.5f;
|
|
||||||
const float maxSkewEst = 1.0f;
|
|
||||||
|
|
||||||
if (aecpc == NULL) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nearend == NULL) {
|
if (nearend == NULL) {
|
||||||
aecpc->lastError = AEC_NULL_POINTER_ERROR;
|
aecpc->lastError = AEC_NULL_POINTER_ERROR;
|
||||||
return -1;
|
return -1;
|
||||||
@ -354,143 +398,20 @@ int32_t WebRtcAec_Process(void *aecInst, const int16_t *nearend,
|
|||||||
aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
|
aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
|
||||||
retVal = -1;
|
retVal = -1;
|
||||||
}
|
}
|
||||||
else if (msInSndCardBuf > 500) {
|
else if (msInSndCardBuf > kMaxTrustedDelayMs) {
|
||||||
msInSndCardBuf = 500;
|
// The clamping is now done in ProcessExtended/Normal().
|
||||||
aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
|
aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
|
||||||
retVal = -1;
|
retVal = -1;
|
||||||
}
|
}
|
||||||
// TODO(andrew): we need to investigate if this +10 is really wanted.
|
|
||||||
msInSndCardBuf += 10;
|
|
||||||
aecpc->msInSndCardBuf = msInSndCardBuf;
|
|
||||||
|
|
||||||
if (aecpc->skewMode == kAecTrue) {
|
// This returns the value of aec->extended_filter_enabled.
|
||||||
if (aecpc->skewFrCtr < 25) {
|
if (WebRtcAec_delay_correction_enabled(aecpc->aec)) {
|
||||||
aecpc->skewFrCtr++;
|
ProcessExtended(aecpc, nearend, nearendH, out, outH, nrOfSamples,
|
||||||
}
|
msInSndCardBuf, skew);
|
||||||
else {
|
|
||||||
retVal = WebRtcAec_GetSkew(aecpc->resampler, skew, &aecpc->skew);
|
|
||||||
if (retVal == -1) {
|
|
||||||
aecpc->skew = 0;
|
|
||||||
aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
|
|
||||||
}
|
|
||||||
|
|
||||||
aecpc->skew /= aecpc->sampFactor*nrOfSamples;
|
|
||||||
|
|
||||||
if (aecpc->skew < 1.0e-3 && aecpc->skew > -1.0e-3) {
|
|
||||||
aecpc->resample = kAecFalse;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
aecpc->resample = kAecTrue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (aecpc->skew < minSkewEst) {
|
|
||||||
aecpc->skew = minSkewEst;
|
|
||||||
}
|
|
||||||
else if (aecpc->skew > maxSkewEst) {
|
|
||||||
aecpc->skew = maxSkewEst;
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
|
||||||
(void)fwrite(&aecpc->skew, sizeof(aecpc->skew), 1, aecpc->skewFile);
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
nFrames = nrOfSamples / FRAME_LEN;
|
|
||||||
nBlocks10ms = nFrames / aecpc->rate_factor;
|
|
||||||
|
|
||||||
if (aecpc->ECstartup) {
|
|
||||||
if (nearend != out) {
|
|
||||||
// Only needed if they don't already point to the same place.
|
|
||||||
memcpy(out, nearend, sizeof(short) * nrOfSamples);
|
|
||||||
}
|
|
||||||
|
|
||||||
// The AEC is in the start up mode
|
|
||||||
// AEC is disabled until the system delay is OK
|
|
||||||
|
|
||||||
// Mechanism to ensure that the system delay is reasonably stable.
|
|
||||||
if (aecpc->checkBuffSize) {
|
|
||||||
aecpc->checkBufSizeCtr++;
|
|
||||||
// Before we fill up the far-end buffer we require the system delay
|
|
||||||
// to be stable (+/-8 ms) compared to the first value. This
|
|
||||||
// comparison is made during the following 6 consecutive 10 ms
|
|
||||||
// blocks. If it seems to be stable then we start to fill up the
|
|
||||||
// far-end buffer.
|
|
||||||
if (aecpc->counter == 0) {
|
|
||||||
aecpc->firstVal = aecpc->msInSndCardBuf;
|
|
||||||
aecpc->sum = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (abs(aecpc->firstVal - aecpc->msInSndCardBuf) <
|
|
||||||
WEBRTC_SPL_MAX(0.2 * aecpc->msInSndCardBuf, sampMsNb)) {
|
|
||||||
aecpc->sum += aecpc->msInSndCardBuf;
|
|
||||||
aecpc->counter++;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
aecpc->counter = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (aecpc->counter * nBlocks10ms >= 6) {
|
|
||||||
// The far-end buffer size is determined in partitions of
|
|
||||||
// PART_LEN samples. Use 75% of the average value of the system
|
|
||||||
// delay as buffer size to start with.
|
|
||||||
aecpc->bufSizeStart = WEBRTC_SPL_MIN((3 * aecpc->sum *
|
|
||||||
aecpc->rate_factor * 8) / (4 * aecpc->counter * PART_LEN),
|
|
||||||
kMaxBufSizeStart);
|
|
||||||
// Buffer size has now been determined.
|
|
||||||
aecpc->checkBuffSize = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (aecpc->checkBufSizeCtr * nBlocks10ms > 50) {
|
|
||||||
// For really bad systems, don't disable the echo canceller for
|
|
||||||
// more than 0.5 sec.
|
|
||||||
aecpc->bufSizeStart = WEBRTC_SPL_MIN((aecpc->msInSndCardBuf *
|
|
||||||
aecpc->rate_factor * 3) / 40, kMaxBufSizeStart);
|
|
||||||
aecpc->checkBuffSize = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If |checkBuffSize| changed in the if-statement above.
|
|
||||||
if (!aecpc->checkBuffSize) {
|
|
||||||
// The system delay is now reasonably stable (or has been unstable
|
|
||||||
// for too long). When the far-end buffer is filled with
|
|
||||||
// approximately the same amount of data as reported by the system
|
|
||||||
// we end the startup phase.
|
|
||||||
int overhead_elements =
|
|
||||||
WebRtcAec_system_delay(aecpc->aec) / PART_LEN -
|
|
||||||
aecpc->bufSizeStart;
|
|
||||||
if (overhead_elements == 0) {
|
|
||||||
// Enable the AEC
|
|
||||||
aecpc->ECstartup = 0;
|
|
||||||
} else if (overhead_elements > 0) {
|
|
||||||
// TODO(bjornv): Do we need a check on how much we actually
|
|
||||||
// moved the read pointer? It should always be possible to move
|
|
||||||
// the pointer |overhead_elements| since we have only added data
|
|
||||||
// to the buffer and no delay compensation nor AEC processing
|
|
||||||
// has been done.
|
|
||||||
WebRtcAec_MoveFarReadPtr(aecpc->aec, overhead_elements);
|
|
||||||
|
|
||||||
// Enable the AEC
|
|
||||||
aecpc->ECstartup = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// AEC is enabled.
|
if (ProcessNormal(aecpc, nearend, nearendH, out, outH, nrOfSamples,
|
||||||
|
msInSndCardBuf, skew) != 0) {
|
||||||
EstBufDelay(aecpc);
|
retVal = -1;
|
||||||
|
|
||||||
// Note that 1 frame is supported for NB and 2 frames for WB.
|
|
||||||
for (i = 0; i < nFrames; i++) {
|
|
||||||
// Call the AEC.
|
|
||||||
WebRtcAec_ProcessFrame(aecpc->aec,
|
|
||||||
&nearend[FRAME_LEN * i],
|
|
||||||
&nearendH[FRAME_LEN * i],
|
|
||||||
aecpc->knownDelay,
|
|
||||||
&out[FRAME_LEN * i],
|
|
||||||
&outH[FRAME_LEN * i]);
|
|
||||||
// TODO(bjornv): Re-structure such that we don't have to pass
|
|
||||||
// |aecpc->knownDelay| as input. Change name to something like
|
|
||||||
// |system_buffer_diff|.
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -509,11 +430,6 @@ int32_t WebRtcAec_Process(void *aecInst, const int16_t *nearend,
|
|||||||
|
|
||||||
int WebRtcAec_set_config(void* handle, AecConfig config) {
|
int WebRtcAec_set_config(void* handle, AecConfig config) {
|
||||||
aecpc_t* self = (aecpc_t*)handle;
|
aecpc_t* self = (aecpc_t*)handle;
|
||||||
|
|
||||||
if (handle == NULL ) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (self->initFlag != initCheck) {
|
if (self->initFlag != initCheck) {
|
||||||
self->lastError = AEC_UNINITIALIZED_ERROR;
|
self->lastError = AEC_UNINITIALIZED_ERROR;
|
||||||
return -1;
|
return -1;
|
||||||
@ -548,10 +464,6 @@ int WebRtcAec_set_config(void* handle, AecConfig config) {
|
|||||||
|
|
||||||
int WebRtcAec_get_echo_status(void* handle, int* status) {
|
int WebRtcAec_get_echo_status(void* handle, int* status) {
|
||||||
aecpc_t* self = (aecpc_t*)handle;
|
aecpc_t* self = (aecpc_t*)handle;
|
||||||
|
|
||||||
if (handle == NULL ) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (status == NULL ) {
|
if (status == NULL ) {
|
||||||
self->lastError = AEC_NULL_POINTER_ERROR;
|
self->lastError = AEC_NULL_POINTER_ERROR;
|
||||||
return -1;
|
return -1;
|
||||||
@ -665,10 +577,6 @@ int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) {
|
|||||||
|
|
||||||
int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std) {
|
int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std) {
|
||||||
aecpc_t* self = handle;
|
aecpc_t* self = handle;
|
||||||
|
|
||||||
if (handle == NULL) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (median == NULL) {
|
if (median == NULL) {
|
||||||
self->lastError = AEC_NULL_POINTER_ERROR;
|
self->lastError = AEC_NULL_POINTER_ERROR;
|
||||||
return -1;
|
return -1;
|
||||||
@ -693,11 +601,6 @@ int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std) {
|
|||||||
int32_t WebRtcAec_get_error_code(void *aecInst)
|
int32_t WebRtcAec_get_error_code(void *aecInst)
|
||||||
{
|
{
|
||||||
aecpc_t *aecpc = aecInst;
|
aecpc_t *aecpc = aecInst;
|
||||||
|
|
||||||
if (aecpc == NULL) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return aecpc->lastError;
|
return aecpc->lastError;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -708,7 +611,225 @@ AecCore* WebRtcAec_aec_core(void* handle) {
|
|||||||
return ((aecpc_t*) handle)->aec;
|
return ((aecpc_t*) handle)->aec;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int EstBufDelay(aecpc_t* aecpc) {
|
static int ProcessNormal(aecpc_t *aecpc, const int16_t *nearend,
|
||||||
|
const int16_t *nearendH, int16_t *out, int16_t *outH,
|
||||||
|
int16_t nrOfSamples, int16_t msInSndCardBuf,
|
||||||
|
int32_t skew) {
|
||||||
|
int retVal = 0;
|
||||||
|
short i;
|
||||||
|
short nBlocks10ms;
|
||||||
|
short nFrames;
|
||||||
|
// Limit resampling to doubling/halving of signal
|
||||||
|
const float minSkewEst = -0.5f;
|
||||||
|
const float maxSkewEst = 1.0f;
|
||||||
|
|
||||||
|
msInSndCardBuf = msInSndCardBuf > kMaxTrustedDelayMs ?
|
||||||
|
kMaxTrustedDelayMs : msInSndCardBuf;
|
||||||
|
// TODO(andrew): we need to investigate if this +10 is really wanted.
|
||||||
|
msInSndCardBuf += 10;
|
||||||
|
aecpc->msInSndCardBuf = msInSndCardBuf;
|
||||||
|
|
||||||
|
if (aecpc->skewMode == kAecTrue) {
|
||||||
|
if (aecpc->skewFrCtr < 25) {
|
||||||
|
aecpc->skewFrCtr++;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
retVal = WebRtcAec_GetSkew(aecpc->resampler, skew, &aecpc->skew);
|
||||||
|
if (retVal == -1) {
|
||||||
|
aecpc->skew = 0;
|
||||||
|
aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
|
||||||
|
}
|
||||||
|
|
||||||
|
aecpc->skew /= aecpc->sampFactor*nrOfSamples;
|
||||||
|
|
||||||
|
if (aecpc->skew < 1.0e-3 && aecpc->skew > -1.0e-3) {
|
||||||
|
aecpc->resample = kAecFalse;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
aecpc->resample = kAecTrue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (aecpc->skew < minSkewEst) {
|
||||||
|
aecpc->skew = minSkewEst;
|
||||||
|
}
|
||||||
|
else if (aecpc->skew > maxSkewEst) {
|
||||||
|
aecpc->skew = maxSkewEst;
|
||||||
|
}
|
||||||
|
|
||||||
|
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
||||||
|
(void)fwrite(&aecpc->skew, sizeof(aecpc->skew), 1, aecpc->skewFile);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nFrames = nrOfSamples / FRAME_LEN;
|
||||||
|
nBlocks10ms = nFrames / aecpc->rate_factor;
|
||||||
|
|
||||||
|
if (aecpc->startup_phase) {
|
||||||
|
// Only needed if they don't already point to the same place.
|
||||||
|
if (nearend != out) {
|
||||||
|
memcpy(out, nearend, sizeof(short) * nrOfSamples);
|
||||||
|
}
|
||||||
|
if (nearendH != outH) {
|
||||||
|
memcpy(outH, nearendH, sizeof(short) * nrOfSamples);
|
||||||
|
}
|
||||||
|
|
||||||
|
// The AEC is in the start up mode
|
||||||
|
// AEC is disabled until the system delay is OK
|
||||||
|
|
||||||
|
// Mechanism to ensure that the system delay is reasonably stable.
|
||||||
|
if (aecpc->checkBuffSize) {
|
||||||
|
aecpc->checkBufSizeCtr++;
|
||||||
|
// Before we fill up the far-end buffer we require the system delay
|
||||||
|
// to be stable (+/-8 ms) compared to the first value. This
|
||||||
|
// comparison is made during the following 6 consecutive 10 ms
|
||||||
|
// blocks. If it seems to be stable then we start to fill up the
|
||||||
|
// far-end buffer.
|
||||||
|
if (aecpc->counter == 0) {
|
||||||
|
aecpc->firstVal = aecpc->msInSndCardBuf;
|
||||||
|
aecpc->sum = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (abs(aecpc->firstVal - aecpc->msInSndCardBuf) <
|
||||||
|
WEBRTC_SPL_MAX(0.2 * aecpc->msInSndCardBuf, sampMsNb)) {
|
||||||
|
aecpc->sum += aecpc->msInSndCardBuf;
|
||||||
|
aecpc->counter++;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
aecpc->counter = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (aecpc->counter * nBlocks10ms >= 6) {
|
||||||
|
// The far-end buffer size is determined in partitions of
|
||||||
|
// PART_LEN samples. Use 75% of the average value of the system
|
||||||
|
// delay as buffer size to start with.
|
||||||
|
aecpc->bufSizeStart = WEBRTC_SPL_MIN((3 * aecpc->sum *
|
||||||
|
aecpc->rate_factor * 8) / (4 * aecpc->counter * PART_LEN),
|
||||||
|
kMaxBufSizeStart);
|
||||||
|
// Buffer size has now been determined.
|
||||||
|
aecpc->checkBuffSize = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (aecpc->checkBufSizeCtr * nBlocks10ms > 50) {
|
||||||
|
// For really bad systems, don't disable the echo canceller for
|
||||||
|
// more than 0.5 sec.
|
||||||
|
aecpc->bufSizeStart = WEBRTC_SPL_MIN((aecpc->msInSndCardBuf *
|
||||||
|
aecpc->rate_factor * 3) / 40, kMaxBufSizeStart);
|
||||||
|
aecpc->checkBuffSize = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If |checkBuffSize| changed in the if-statement above.
|
||||||
|
if (!aecpc->checkBuffSize) {
|
||||||
|
// The system delay is now reasonably stable (or has been unstable
|
||||||
|
// for too long). When the far-end buffer is filled with
|
||||||
|
// approximately the same amount of data as reported by the system
|
||||||
|
// we end the startup phase.
|
||||||
|
int overhead_elements =
|
||||||
|
WebRtcAec_system_delay(aecpc->aec) / PART_LEN - aecpc->bufSizeStart;
|
||||||
|
if (overhead_elements == 0) {
|
||||||
|
// Enable the AEC
|
||||||
|
aecpc->startup_phase = 0;
|
||||||
|
} else if (overhead_elements > 0) {
|
||||||
|
// TODO(bjornv): Do we need a check on how much we actually
|
||||||
|
// moved the read pointer? It should always be possible to move
|
||||||
|
// the pointer |overhead_elements| since we have only added data
|
||||||
|
// to the buffer and no delay compensation nor AEC processing
|
||||||
|
// has been done.
|
||||||
|
WebRtcAec_MoveFarReadPtr(aecpc->aec, overhead_elements);
|
||||||
|
|
||||||
|
// Enable the AEC
|
||||||
|
aecpc->startup_phase = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// AEC is enabled.
|
||||||
|
EstBufDelayNormal(aecpc);
|
||||||
|
|
||||||
|
// Note that 1 frame is supported for NB and 2 frames for WB.
|
||||||
|
for (i = 0; i < nFrames; i++) {
|
||||||
|
// Call the AEC.
|
||||||
|
WebRtcAec_ProcessFrame(aecpc->aec,
|
||||||
|
&nearend[FRAME_LEN * i],
|
||||||
|
&nearendH[FRAME_LEN * i],
|
||||||
|
aecpc->knownDelay,
|
||||||
|
&out[FRAME_LEN * i],
|
||||||
|
&outH[FRAME_LEN * i]);
|
||||||
|
// TODO(bjornv): Re-structure such that we don't have to pass
|
||||||
|
// |aecpc->knownDelay| as input. Change name to something like
|
||||||
|
// |system_buffer_diff|.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void ProcessExtended(aecpc_t* self, const int16_t* near,
|
||||||
|
const int16_t* near_high, int16_t* out, int16_t* out_high,
|
||||||
|
int16_t num_samples, int16_t reported_delay_ms, int32_t skew) {
|
||||||
|
int i;
|
||||||
|
const int num_frames = num_samples / FRAME_LEN;
|
||||||
|
#if defined(WEBRTC_UNTRUSTED_DELAY)
|
||||||
|
const int delay_diff_offset = kDelayDiffOffsetSamples;
|
||||||
|
reported_delay_ms = kFixedDelayMs;
|
||||||
|
#else
|
||||||
|
// This is the usual mode where we trust the reported system delay values.
|
||||||
|
const int delay_diff_offset = 0;
|
||||||
|
// Due to the longer filter, we no longer add 10 ms to the reported delay
|
||||||
|
// to reduce chance of non-causality. Instead we apply a minimum here to avoid
|
||||||
|
// issues with the read pointer jumping around needlessly.
|
||||||
|
reported_delay_ms = reported_delay_ms < kMinTrustedDelayMs ?
|
||||||
|
kMinTrustedDelayMs : reported_delay_ms;
|
||||||
|
// If the reported delay appears to be bogus, we attempt to recover by using
|
||||||
|
// the measured fixed delay values. We use >= here because higher layers
|
||||||
|
// may already clamp to this maximum value, and we would otherwise not
|
||||||
|
// detect it here.
|
||||||
|
reported_delay_ms = reported_delay_ms >= kMaxTrustedDelayMs ?
|
||||||
|
kFixedDelayMs : reported_delay_ms;
|
||||||
|
#endif
|
||||||
|
self->msInSndCardBuf = reported_delay_ms;
|
||||||
|
|
||||||
|
if (!self->farend_started) {
|
||||||
|
// Only needed if they don't already point to the same place.
|
||||||
|
if (near != out) {
|
||||||
|
memcpy(out, near, sizeof(short) * num_samples);
|
||||||
|
}
|
||||||
|
if (near_high != out_high) {
|
||||||
|
memcpy(out_high, near_high, sizeof(short) * num_samples);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (self->startup_phase) {
|
||||||
|
// In the extended mode, there isn't a startup "phase", just a special
|
||||||
|
// action on the first frame. In the trusted delay case, we'll take the
|
||||||
|
// current reported delay, unless it's less then our conservative
|
||||||
|
// measurement.
|
||||||
|
int startup_size_ms = reported_delay_ms < kFixedDelayMs ?
|
||||||
|
kFixedDelayMs : reported_delay_ms;
|
||||||
|
int overhead_elements = (WebRtcAec_system_delay(self->aec) -
|
||||||
|
startup_size_ms / 2 * self->rate_factor * 8) / PART_LEN;
|
||||||
|
WebRtcAec_MoveFarReadPtr(self->aec, overhead_elements);
|
||||||
|
self->startup_phase = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
EstBufDelayExtended(self);
|
||||||
|
|
||||||
|
{
|
||||||
|
// |delay_diff_offset| gives us the option to manually rewind the delay on
|
||||||
|
// very low delay platforms which can't be expressed purely through
|
||||||
|
// |reported_delay_ms|.
|
||||||
|
const int adjusted_known_delay =
|
||||||
|
WEBRTC_SPL_MAX(0, self->knownDelay + delay_diff_offset);
|
||||||
|
|
||||||
|
for (i = 0; i < num_frames; ++i) {
|
||||||
|
WebRtcAec_ProcessFrame(self->aec, &near[FRAME_LEN * i],
|
||||||
|
&near_high[FRAME_LEN * i], adjusted_known_delay,
|
||||||
|
&out[FRAME_LEN * i], &out_high[FRAME_LEN * i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void EstBufDelayNormal(aecpc_t* aecpc) {
|
||||||
int nSampSndCard = aecpc->msInSndCardBuf * sampMsNb * aecpc->rate_factor;
|
int nSampSndCard = aecpc->msInSndCardBuf * sampMsNb * aecpc->rate_factor;
|
||||||
int current_delay = nSampSndCard - WebRtcAec_system_delay(aecpc->aec);
|
int current_delay = nSampSndCard - WebRtcAec_system_delay(aecpc->aec);
|
||||||
int delay_difference = 0;
|
int delay_difference = 0;
|
||||||
@ -732,6 +853,9 @@ static int EstBufDelay(aecpc_t* aecpc) {
|
|||||||
current_delay += WebRtcAec_MoveFarReadPtr(aecpc->aec, 1) * PART_LEN;
|
current_delay += WebRtcAec_MoveFarReadPtr(aecpc->aec, 1) * PART_LEN;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We use -1 to signal an initialized state in the "extended" implementation;
|
||||||
|
// compensate for that.
|
||||||
|
aecpc->filtDelay = aecpc->filtDelay < 0 ? 0 : aecpc->filtDelay;
|
||||||
aecpc->filtDelay = WEBRTC_SPL_MAX(0, (short) (0.8 * aecpc->filtDelay +
|
aecpc->filtDelay = WEBRTC_SPL_MAX(0, (short) (0.8 * aecpc->filtDelay +
|
||||||
0.2 * current_delay));
|
0.2 * current_delay));
|
||||||
|
|
||||||
@ -756,6 +880,58 @@ static int EstBufDelay(aecpc_t* aecpc) {
|
|||||||
if (aecpc->timeForDelayChange > 25) {
|
if (aecpc->timeForDelayChange > 25) {
|
||||||
aecpc->knownDelay = WEBRTC_SPL_MAX((int) aecpc->filtDelay - 160, 0);
|
aecpc->knownDelay = WEBRTC_SPL_MAX((int) aecpc->filtDelay - 160, 0);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return 0;
|
|
||||||
|
static void EstBufDelayExtended(aecpc_t* self) {
|
||||||
|
int reported_delay = self->msInSndCardBuf * sampMsNb * self->rate_factor;
|
||||||
|
int current_delay = reported_delay - WebRtcAec_system_delay(self->aec);
|
||||||
|
int delay_difference = 0;
|
||||||
|
|
||||||
|
// Before we proceed with the delay estimate filtering we:
|
||||||
|
// 1) Compensate for the frame that will be read.
|
||||||
|
// 2) Compensate for drift resampling.
|
||||||
|
// 3) Compensate for non-causality if needed, since the estimated delay can't
|
||||||
|
// be negative.
|
||||||
|
|
||||||
|
// 1) Compensating for the frame(s) that will be read/processed.
|
||||||
|
current_delay += FRAME_LEN * self->rate_factor;
|
||||||
|
|
||||||
|
// 2) Account for resampling frame delay.
|
||||||
|
if (self->skewMode == kAecTrue && self->resample == kAecTrue) {
|
||||||
|
current_delay -= kResamplingDelay;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3) Compensate for non-causality, if needed, by flushing two blocks.
|
||||||
|
if (current_delay < PART_LEN) {
|
||||||
|
current_delay += WebRtcAec_MoveFarReadPtr(self->aec, 2) * PART_LEN;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (self->filtDelay == -1) {
|
||||||
|
self->filtDelay = WEBRTC_SPL_MAX(0, 0.5 * current_delay);
|
||||||
|
} else {
|
||||||
|
self->filtDelay = WEBRTC_SPL_MAX(0, (short) (0.95 * self->filtDelay +
|
||||||
|
0.05 * current_delay));
|
||||||
|
}
|
||||||
|
|
||||||
|
delay_difference = self->filtDelay - self->knownDelay;
|
||||||
|
if (delay_difference > 384) {
|
||||||
|
if (self->lastDelayDiff < 128) {
|
||||||
|
self->timeForDelayChange = 0;
|
||||||
|
} else {
|
||||||
|
self->timeForDelayChange++;
|
||||||
|
}
|
||||||
|
} else if (delay_difference < 128 && self->knownDelay > 0) {
|
||||||
|
if (self->lastDelayDiff > 384) {
|
||||||
|
self->timeForDelayChange = 0;
|
||||||
|
} else {
|
||||||
|
self->timeForDelayChange++;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self->timeForDelayChange = 0;
|
||||||
|
}
|
||||||
|
self->lastDelayDiff = delay_difference;
|
||||||
|
|
||||||
|
if (self->timeForDelayChange > 25) {
|
||||||
|
self->knownDelay = WEBRTC_SPL_MAX((int) self->filtDelay - 256, 0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,8 +20,6 @@ typedef struct {
|
|||||||
int splitSampFreq;
|
int splitSampFreq;
|
||||||
int scSampFreq;
|
int scSampFreq;
|
||||||
float sampFactor; // scSampRate / sampFreq
|
float sampFactor; // scSampRate / sampFreq
|
||||||
short autoOnOff;
|
|
||||||
short activity;
|
|
||||||
short skewMode;
|
short skewMode;
|
||||||
int bufSizeStart;
|
int bufSizeStart;
|
||||||
int knownDelay;
|
int knownDelay;
|
||||||
@ -39,7 +37,7 @@ typedef struct {
|
|||||||
short msInSndCardBuf;
|
short msInSndCardBuf;
|
||||||
short filtDelay; // Filtered delay estimate.
|
short filtDelay; // Filtered delay estimate.
|
||||||
int timeForDelayChange;
|
int timeForDelayChange;
|
||||||
int ECstartup;
|
int startup_phase;
|
||||||
int checkBuffSize;
|
int checkBuffSize;
|
||||||
short lastDelayDiff;
|
short lastDelayDiff;
|
||||||
|
|
||||||
@ -62,6 +60,8 @@ typedef struct {
|
|||||||
|
|
||||||
int lastError;
|
int lastError;
|
||||||
|
|
||||||
|
int farend_started;
|
||||||
|
|
||||||
AecCore* aec;
|
AecCore* aec;
|
||||||
} aecpc_t;
|
} aecpc_t;
|
||||||
|
|
||||||
|
@ -128,7 +128,7 @@ void SystemDelayTest::RunStableStartup() {
|
|||||||
for (; process_time_ms < kStableConvergenceMs; process_time_ms += 10) {
|
for (; process_time_ms < kStableConvergenceMs; process_time_ms += 10) {
|
||||||
RenderAndCapture(kDeviceBufMs);
|
RenderAndCapture(kDeviceBufMs);
|
||||||
buffer_size += samples_per_frame_;
|
buffer_size += samples_per_frame_;
|
||||||
if (self_->ECstartup == 0) {
|
if (self_->startup_phase == 0) {
|
||||||
// We have left the startup phase.
|
// We have left the startup phase.
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -222,7 +222,7 @@ TEST_F(SystemDelayTest, CorrectDelayAfterUnstableStartup) {
|
|||||||
RenderAndCapture(reported_delay_ms);
|
RenderAndCapture(reported_delay_ms);
|
||||||
buffer_size += samples_per_frame_;
|
buffer_size += samples_per_frame_;
|
||||||
buffer_offset_ms = -buffer_offset_ms;
|
buffer_offset_ms = -buffer_offset_ms;
|
||||||
if (self_->ECstartup == 0) {
|
if (self_->startup_phase == 0) {
|
||||||
// We have left the startup phase.
|
// We have left the startup phase.
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -268,7 +268,7 @@ TEST_F(SystemDelayTest, CorrectDelayAfterStableBufferBuildUp) {
|
|||||||
for (; process_time_ms <= kMaxConvergenceMs; process_time_ms += 10) {
|
for (; process_time_ms <= kMaxConvergenceMs; process_time_ms += 10) {
|
||||||
RenderAndCapture(kDeviceBufMs);
|
RenderAndCapture(kDeviceBufMs);
|
||||||
buffer_size += samples_per_frame_;
|
buffer_size += samples_per_frame_;
|
||||||
if (self_->ECstartup == 0) {
|
if (self_->startup_phase == 0) {
|
||||||
// We have left the startup phase.
|
// We have left the startup phase.
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -155,6 +155,7 @@
|
|||||||
'aec/aec_rdft_sse2.c',
|
'aec/aec_rdft_sse2.c',
|
||||||
],
|
],
|
||||||
'cflags': ['-msse2',],
|
'cflags': ['-msse2',],
|
||||||
|
'cflags_mozilla': [ '-msse2', ],
|
||||||
'xcode_settings': {
|
'xcode_settings': {
|
||||||
'OTHER_CFLAGS': ['-msse2',],
|
'OTHER_CFLAGS': ['-msse2',],
|
||||||
},
|
},
|
||||||
@ -178,11 +179,14 @@
|
|||||||
'dependencies': [
|
'dependencies': [
|
||||||
'audio_processing_offsets',
|
'audio_processing_offsets',
|
||||||
],
|
],
|
||||||
'sources': [
|
#
|
||||||
|
# We disable the ASM source, because our gyp->Makefile translator
|
||||||
|
# does not support the build steps to get the asm offsets.
|
||||||
|
'sources!': [
|
||||||
'aecm/aecm_core_neon.S',
|
'aecm/aecm_core_neon.S',
|
||||||
'ns/nsx_core_neon.S',
|
'ns/nsx_core_neon.S',
|
||||||
],
|
],
|
||||||
'sources!': [
|
'sources': [
|
||||||
'aecm/aecm_core_neon.c',
|
'aecm/aecm_core_neon.c',
|
||||||
'ns/nsx_core_neon.c',
|
'ns/nsx_core_neon.c',
|
||||||
],
|
],
|
||||||
|
@ -13,12 +13,14 @@
|
|||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
#include "webrtc/modules/audio_processing/aec/aec_core.h"
|
||||||
|
}
|
||||||
|
#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h"
|
||||||
#include "webrtc/modules/audio_processing/audio_buffer.h"
|
#include "webrtc/modules/audio_processing/audio_buffer.h"
|
||||||
#include "webrtc/modules/audio_processing/audio_processing_impl.h"
|
#include "webrtc/modules/audio_processing/audio_processing_impl.h"
|
||||||
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
||||||
|
|
||||||
#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h"
|
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
typedef void Handle;
|
typedef void Handle;
|
||||||
@ -69,7 +71,8 @@ EchoCancellationImpl::EchoCancellationImpl(const AudioProcessingImpl* apm)
|
|||||||
stream_drift_samples_(0),
|
stream_drift_samples_(0),
|
||||||
was_stream_drift_set_(false),
|
was_stream_drift_set_(false),
|
||||||
stream_has_echo_(false),
|
stream_has_echo_(false),
|
||||||
delay_logging_enabled_(false) {}
|
delay_logging_enabled_(false),
|
||||||
|
delay_correction_enabled_(true) {}
|
||||||
|
|
||||||
EchoCancellationImpl::~EchoCancellationImpl() {}
|
EchoCancellationImpl::~EchoCancellationImpl() {}
|
||||||
|
|
||||||
@ -338,6 +341,13 @@ int EchoCancellationImpl::Initialize() {
|
|||||||
return apm_->kNoError;
|
return apm_->kNoError;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if 0
|
||||||
|
void EchoCancellationImpl::SetExtraOptions(const Config& config) {
|
||||||
|
delay_correction_enabled_ = config.Get<DelayCorrection>().enabled;
|
||||||
|
Configure();
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
void* EchoCancellationImpl::CreateHandle() const {
|
void* EchoCancellationImpl::CreateHandle() const {
|
||||||
Handle* handle = NULL;
|
Handle* handle = NULL;
|
||||||
if (WebRtcAec_Create(&handle) != apm_->kNoError) {
|
if (WebRtcAec_Create(&handle) != apm_->kNoError) {
|
||||||
@ -369,6 +379,8 @@ int EchoCancellationImpl::ConfigureHandle(void* handle) const {
|
|||||||
config.skewMode = drift_compensation_enabled_;
|
config.skewMode = drift_compensation_enabled_;
|
||||||
config.delay_logging = delay_logging_enabled_;
|
config.delay_logging = delay_logging_enabled_;
|
||||||
|
|
||||||
|
WebRtcAec_enable_delay_correction(WebRtcAec_aec_core(
|
||||||
|
static_cast<Handle*>(handle)), delay_correction_enabled_ ? 1 : 0);
|
||||||
return WebRtcAec_set_config(static_cast<Handle*>(handle), config);
|
return WebRtcAec_set_config(static_cast<Handle*>(handle), config);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,6 +14,29 @@
|
|||||||
#include "webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h"
|
#include "webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
// Use to enable the delay correction feature. This now engages an extended
|
||||||
|
// filter mode in the AEC, along with robustness measures around the reported
|
||||||
|
// system delays. It comes with a significant increase in AEC complexity, but is
|
||||||
|
// much more robust to unreliable reported delays.
|
||||||
|
//
|
||||||
|
// Detailed changes to the algorithm:
|
||||||
|
// - The filter length is changed from 48 to 128 ms. This comes with tuning of
|
||||||
|
// several parameters: i) filter adaptation stepsize and error threshold;
|
||||||
|
// ii) non-linear processing smoothing and overdrive.
|
||||||
|
// - Option to ignore the reported delays on platforms which we deem
|
||||||
|
// sufficiently unreliable. See WEBRTC_UNTRUSTED_DELAY in echo_cancellation.c.
|
||||||
|
// - Faster startup times by removing the excessive "startup phase" processing
|
||||||
|
// of reported delays.
|
||||||
|
// - Much more conservative adjustments to the far-end read pointer. We smooth
|
||||||
|
// the delay difference more heavily, and back off from the difference more.
|
||||||
|
// Adjustments force a readaptation of the filter, so they should be avoided
|
||||||
|
// except when really necessary.
|
||||||
|
struct DelayCorrection {
|
||||||
|
DelayCorrection() : enabled(false) {}
|
||||||
|
DelayCorrection(bool enabled) : enabled(enabled) {}
|
||||||
|
|
||||||
|
bool enabled;
|
||||||
|
};
|
||||||
|
|
||||||
class AudioProcessingImpl;
|
class AudioProcessingImpl;
|
||||||
class AudioBuffer;
|
class AudioBuffer;
|
||||||
@ -34,6 +57,7 @@ class EchoCancellationImpl : public EchoCancellationImplWrapper {
|
|||||||
|
|
||||||
// ProcessingComponent implementation.
|
// ProcessingComponent implementation.
|
||||||
virtual int Initialize() OVERRIDE;
|
virtual int Initialize() OVERRIDE;
|
||||||
|
// virtual void SetExtraOptions(const Config& config) OVERRIDE;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// EchoCancellation implementation.
|
// EchoCancellation implementation.
|
||||||
@ -70,6 +94,7 @@ class EchoCancellationImpl : public EchoCancellationImplWrapper {
|
|||||||
bool was_stream_drift_set_;
|
bool was_stream_drift_set_;
|
||||||
bool stream_has_echo_;
|
bool stream_has_echo_;
|
||||||
bool delay_logging_enabled_;
|
bool delay_logging_enabled_;
|
||||||
|
bool delay_correction_enabled_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -0,0 +1,51 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "testing/gtest/include/gtest/gtest.h"
|
||||||
|
extern "C" {
|
||||||
|
#include "webrtc/modules/audio_processing/aec/aec_core.h"
|
||||||
|
}
|
||||||
|
#include "webrtc/modules/audio_processing/echo_cancellation_impl.h"
|
||||||
|
#include "webrtc/modules/audio_processing/include/audio_processing.h"
|
||||||
|
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
|
||||||
|
|
||||||
|
namespace webrtc {
|
||||||
|
|
||||||
|
TEST(EchoCancellationInternalTest, DelayCorrection) {
|
||||||
|
scoped_ptr<AudioProcessing> ap(AudioProcessing::Create(0));
|
||||||
|
EXPECT_TRUE(ap->echo_cancellation()->aec_core() == NULL);
|
||||||
|
|
||||||
|
EXPECT_EQ(ap->kNoError, ap->echo_cancellation()->Enable(true));
|
||||||
|
EXPECT_TRUE(ap->echo_cancellation()->is_enabled());
|
||||||
|
|
||||||
|
AecCore* aec_core = ap->echo_cancellation()->aec_core();
|
||||||
|
ASSERT_TRUE(aec_core != NULL);
|
||||||
|
// Disabled by default.
|
||||||
|
EXPECT_EQ(0, WebRtcAec_delay_correction_enabled(aec_core));
|
||||||
|
|
||||||
|
Config config;
|
||||||
|
config.Set<DelayCorrection>(new DelayCorrection(true));
|
||||||
|
ap->SetExtraOptions(config);
|
||||||
|
EXPECT_EQ(1, WebRtcAec_delay_correction_enabled(aec_core));
|
||||||
|
|
||||||
|
// Retains setting after initialization.
|
||||||
|
EXPECT_EQ(ap->kNoError, ap->Initialize());
|
||||||
|
EXPECT_EQ(1, WebRtcAec_delay_correction_enabled(aec_core));
|
||||||
|
|
||||||
|
config.Set<DelayCorrection>(new DelayCorrection(false));
|
||||||
|
ap->SetExtraOptions(config);
|
||||||
|
EXPECT_EQ(0, WebRtcAec_delay_correction_enabled(aec_core));
|
||||||
|
|
||||||
|
// Retains setting after initialization.
|
||||||
|
EXPECT_EQ(ap->kNoError, ap->Initialize());
|
||||||
|
EXPECT_EQ(0, WebRtcAec_delay_correction_enabled(aec_core));
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace webrtc
|
@ -609,13 +609,13 @@ int32_t ModuleFileUtility::ReadWavHeader(InStream& wav)
|
|||||||
// special cases?
|
// special cases?
|
||||||
if(_wavFormatObj.nSamplesPerSec == 44100)
|
if(_wavFormatObj.nSamplesPerSec == 44100)
|
||||||
{
|
{
|
||||||
_readSizeBytes = 440 * _wavFormatObj.nChannels *
|
_readSizeBytes = 441 * _wavFormatObj.nChannels *
|
||||||
(_wavFormatObj.nBitsPerSample / 8);
|
(_wavFormatObj.nBitsPerSample / 8);
|
||||||
} else if(_wavFormatObj.nSamplesPerSec == 22050) {
|
} else if(_wavFormatObj.nSamplesPerSec == 22050) {
|
||||||
_readSizeBytes = 220 * _wavFormatObj.nChannels *
|
_readSizeBytes = 220 * _wavFormatObj.nChannels * // XXX inexact!
|
||||||
(_wavFormatObj.nBitsPerSample / 8);
|
(_wavFormatObj.nBitsPerSample / 8);
|
||||||
} else if(_wavFormatObj.nSamplesPerSec == 11025) {
|
} else if(_wavFormatObj.nSamplesPerSec == 11025) {
|
||||||
_readSizeBytes = 110 * _wavFormatObj.nChannels *
|
_readSizeBytes = 110 * _wavFormatObj.nChannels * // XXX inexact!
|
||||||
(_wavFormatObj.nBitsPerSample / 8);
|
(_wavFormatObj.nBitsPerSample / 8);
|
||||||
} else {
|
} else {
|
||||||
_readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
|
_readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
|
||||||
@ -677,22 +677,22 @@ int32_t ModuleFileUtility::InitWavCodec(uint32_t samplesPerSec,
|
|||||||
{
|
{
|
||||||
strcpy(codec_info_.plname, "L16");
|
strcpy(codec_info_.plname, "L16");
|
||||||
_codecId = kCodecL16_16kHz;
|
_codecId = kCodecL16_16kHz;
|
||||||
codec_info_.pacsize = 110;
|
codec_info_.pacsize = 110; // XXX inexact!
|
||||||
codec_info_.plfreq = 11000;
|
codec_info_.plfreq = 11000; // XXX inexact!
|
||||||
}
|
}
|
||||||
else if(samplesPerSec == 22050)
|
else if(samplesPerSec == 22050)
|
||||||
{
|
{
|
||||||
strcpy(codec_info_.plname, "L16");
|
strcpy(codec_info_.plname, "L16");
|
||||||
_codecId = kCodecL16_16kHz;
|
_codecId = kCodecL16_16kHz;
|
||||||
codec_info_.pacsize = 220;
|
codec_info_.pacsize = 220; // XXX inexact!
|
||||||
codec_info_.plfreq = 22000;
|
codec_info_.plfreq = 22000; // XXX inexact!
|
||||||
}
|
}
|
||||||
else if(samplesPerSec == 44100)
|
else if(samplesPerSec == 44100)
|
||||||
{
|
{
|
||||||
strcpy(codec_info_.plname, "L16");
|
strcpy(codec_info_.plname, "L16");
|
||||||
_codecId = kCodecL16_16kHz;
|
_codecId = kCodecL16_16kHz;
|
||||||
codec_info_.pacsize = 440;
|
codec_info_.pacsize = 441;
|
||||||
codec_info_.plfreq = 44000;
|
codec_info_.plfreq = 44100;
|
||||||
}
|
}
|
||||||
else if(samplesPerSec == 48000)
|
else if(samplesPerSec == 48000)
|
||||||
{
|
{
|
||||||
@ -1125,8 +1125,6 @@ int32_t ModuleFileUtility::WriteWavHeader(
|
|||||||
{
|
{
|
||||||
|
|
||||||
// Frame size in bytes for 10 ms of audio.
|
// Frame size in bytes for 10 ms of audio.
|
||||||
// TODO (hellner): 44.1 kHz has 440 samples frame size. Doesn't seem to
|
|
||||||
// be taken into consideration here!
|
|
||||||
int32_t frameSize = (freq / 100) * bytesPerSample * channels;
|
int32_t frameSize = (freq / 100) * bytesPerSample * channels;
|
||||||
|
|
||||||
// Calculate the number of full frames that the wave file contain.
|
// Calculate the number of full frames that the wave file contain.
|
||||||
|
@ -145,6 +145,7 @@
|
|||||||
'audio_coding/neteq4/mock/mock_payload_splitter.h',
|
'audio_coding/neteq4/mock/mock_payload_splitter.h',
|
||||||
'audio_processing/aec/system_delay_unittest.cc',
|
'audio_processing/aec/system_delay_unittest.cc',
|
||||||
'audio_processing/aec/echo_cancellation_unittest.cc',
|
'audio_processing/aec/echo_cancellation_unittest.cc',
|
||||||
|
'audio_processing/echo_cancellation_impl_unittest.cc',
|
||||||
'audio_processing/test/audio_processing_unittest.cc',
|
'audio_processing/test/audio_processing_unittest.cc',
|
||||||
'audio_processing/utility/delay_estimator_unittest.cc',
|
'audio_processing/utility/delay_estimator_unittest.cc',
|
||||||
'audio_processing/utility/ring_buffer_unittest.cc',
|
'audio_processing/utility/ring_buffer_unittest.cc',
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
|
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
|
||||||
|
|
||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
|
#include <cstdlib> // for abs()
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
#include <WinSock.h> // timeval
|
#include <WinSock.h> // timeval
|
||||||
|
|
||||||
#include <MMSystem.h> // timeGetTime
|
#include <MMSystem.h> // timeGetTime
|
||||||
#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_MAC))
|
#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_BSD) || (defined WEBRTC_MAC))
|
||||||
#include <sys/time.h> // gettimeofday
|
#include <sys/time.h> // gettimeofday
|
||||||
#include <time.h>
|
#include <time.h>
|
||||||
#endif
|
#endif
|
||||||
@ -118,7 +118,7 @@ bool StringCompare(const char* str1, const char* str2,
|
|||||||
const uint32_t length) {
|
const uint32_t length) {
|
||||||
return (_strnicmp(str1, str2, length) == 0) ? true : false;
|
return (_strnicmp(str1, str2, length) == 0) ? true : false;
|
||||||
}
|
}
|
||||||
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
|
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
|
||||||
bool StringCompare(const char* str1, const char* str2,
|
bool StringCompare(const char* str1, const char* str2,
|
||||||
const uint32_t length) {
|
const uint32_t length) {
|
||||||
return (strncasecmp(str1, str2, length) == 0) ? true : false;
|
return (strncasecmp(str1, str2, length) == 0) ? true : false;
|
||||||
|
@ -87,7 +87,7 @@ int32_t FilePlayerImpl::Frequency() const
|
|||||||
{
|
{
|
||||||
return 32000;
|
return 32000;
|
||||||
}
|
}
|
||||||
else if(_codec.plfreq == 44000)
|
else if(_codec.plfreq == 44100 || _codec.plfreq == 44000 ) // XXX just 44100?
|
||||||
{
|
{
|
||||||
return 32000;
|
return 32000;
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
#if defined(_WIN32)
|
#if defined(_WIN32)
|
||||||
#include <Windows.h>
|
#include <Windows.h>
|
||||||
#include <mmsystem.h>
|
#include <mmsystem.h>
|
||||||
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
|
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) || defined(WEBRTC_BSD)
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <sys/time.h>
|
#include <sys/time.h>
|
||||||
#include <time.h>
|
#include <time.h>
|
||||||
@ -237,7 +237,7 @@ inline uint32_t RtpDumpImpl::GetTimeInMS() const
|
|||||||
{
|
{
|
||||||
#if defined(_WIN32)
|
#if defined(_WIN32)
|
||||||
return timeGetTime();
|
return timeGetTime();
|
||||||
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
|
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
|
||||||
struct timeval tv;
|
struct timeval tv;
|
||||||
struct timezone tz;
|
struct timezone tz;
|
||||||
unsigned long val;
|
unsigned long val;
|
||||||
|
@ -16,6 +16,8 @@
|
|||||||
#include "webrtc/system_wrappers/interface/ref_count.h"
|
#include "webrtc/system_wrappers/interface/ref_count.h"
|
||||||
#include "webrtc/system_wrappers/interface/trace.h"
|
#include "webrtc/system_wrappers/interface/trace.h"
|
||||||
|
|
||||||
|
#include "AndroidJNIWrapper.h"
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -52,17 +54,14 @@ DeviceInfoAndroid::~DeviceInfoAndroid() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
uint32_t DeviceInfoAndroid::NumberOfDevices() {
|
uint32_t DeviceInfoAndroid::NumberOfDevices() {
|
||||||
JNIEnv *env;
|
AutoLocalJNIFrame jniFrame;
|
||||||
jclass javaCmDevInfoClass;
|
JNIEnv* env = jniFrame.GetEnv();
|
||||||
jobject javaCmDevInfoObject;
|
if (!env)
|
||||||
bool attached = false;
|
|
||||||
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
|
|
||||||
env,
|
|
||||||
javaCmDevInfoClass,
|
|
||||||
javaCmDevInfoObject,
|
|
||||||
attached) != 0)
|
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
|
jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass();
|
||||||
|
jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject();
|
||||||
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
|
||||||
"%s GetMethodId", __FUNCTION__);
|
"%s GetMethodId", __FUNCTION__);
|
||||||
// get the method ID for the Android Java GetDeviceUniqueName name.
|
// get the method ID for the Android Java GetDeviceUniqueName name.
|
||||||
@ -76,7 +75,6 @@ uint32_t DeviceInfoAndroid::NumberOfDevices() {
|
|||||||
"%s Calling Number of devices", __FUNCTION__);
|
"%s Calling Number of devices", __FUNCTION__);
|
||||||
numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
|
numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
|
||||||
}
|
}
|
||||||
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
|
|
||||||
|
|
||||||
if (numberOfDevices > 0)
|
if (numberOfDevices > 0)
|
||||||
return numberOfDevices;
|
return numberOfDevices;
|
||||||
@ -92,18 +90,15 @@ int32_t DeviceInfoAndroid::GetDeviceName(
|
|||||||
char* /*productUniqueIdUTF8*/,
|
char* /*productUniqueIdUTF8*/,
|
||||||
uint32_t /*productUniqueIdUTF8Length*/) {
|
uint32_t /*productUniqueIdUTF8Length*/) {
|
||||||
|
|
||||||
JNIEnv *env;
|
|
||||||
jclass javaCmDevInfoClass;
|
|
||||||
jobject javaCmDevInfoObject;
|
|
||||||
int32_t result = 0;
|
int32_t result = 0;
|
||||||
bool attached = false;
|
AutoLocalJNIFrame jniFrame;
|
||||||
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
|
JNIEnv* env = jniFrame.GetEnv();
|
||||||
env,
|
if (!env)
|
||||||
javaCmDevInfoClass,
|
|
||||||
javaCmDevInfoObject,
|
|
||||||
attached)!= 0)
|
|
||||||
return -1;
|
return -1;
|
||||||
|
|
||||||
|
jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass();
|
||||||
|
jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject();
|
||||||
|
|
||||||
// get the method ID for the Android Java GetDeviceUniqueName name.
|
// get the method ID for the Android Java GetDeviceUniqueName name.
|
||||||
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
|
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
|
||||||
"(I)Ljava/lang/String;");
|
"(I)Ljava/lang/String;");
|
||||||
@ -151,8 +146,6 @@ int32_t DeviceInfoAndroid::GetDeviceName(
|
|||||||
result = -1;
|
result = -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
|
|
||||||
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: result %d", __FUNCTION__, (int) result);
|
"%s: result %d", __FUNCTION__, (int) result);
|
||||||
return result;
|
return result;
|
||||||
@ -168,21 +161,17 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap(
|
|||||||
delete it->second;
|
delete it->second;
|
||||||
_captureCapabilities.clear();
|
_captureCapabilities.clear();
|
||||||
|
|
||||||
JNIEnv *env;
|
AutoLocalJNIFrame jniFrame;
|
||||||
jclass javaCmDevInfoClass;
|
JNIEnv* env = jniFrame.GetEnv();
|
||||||
jobject javaCmDevInfoObject;
|
if (!env)
|
||||||
bool attached = false;
|
|
||||||
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
|
|
||||||
env,
|
|
||||||
javaCmDevInfoClass,
|
|
||||||
javaCmDevInfoObject,
|
|
||||||
attached) != 0)
|
|
||||||
return -1;
|
return -1;
|
||||||
|
|
||||||
|
jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass();
|
||||||
|
jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject();
|
||||||
|
|
||||||
// Find the capability class
|
// Find the capability class
|
||||||
jclass javaCapClass = g_capabilityClass;
|
jclass javaCapClass = jsjni_GetGlobalClassRef(AndroidJavaCaptureCapabilityClass);
|
||||||
if (javaCapClass == NULL) {
|
if (javaCapClass == NULL) {
|
||||||
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
"%s: SetAndroidCaptureClasses must be called first!",
|
"%s: SetAndroidCaptureClasses must be called first!",
|
||||||
__FUNCTION__);
|
__FUNCTION__);
|
||||||
@ -195,7 +184,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap(
|
|||||||
"GetCapabilityArray",
|
"GetCapabilityArray",
|
||||||
"(Ljava/lang/String;)[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;");
|
"(Ljava/lang/String;)[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;");
|
||||||
if (cid == NULL) {
|
if (cid == NULL) {
|
||||||
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
"%s: Can't find method GetCapabilityArray.", __FUNCTION__);
|
"%s: Can't find method GetCapabilityArray.", __FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
@ -204,7 +192,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap(
|
|||||||
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
|
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
|
||||||
|
|
||||||
if (capureIdString == NULL) {
|
if (capureIdString == NULL) {
|
||||||
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
"%s: Can't create string for method GetCapabilityArray.",
|
"%s: Can't create string for method GetCapabilityArray.",
|
||||||
__FUNCTION__);
|
__FUNCTION__);
|
||||||
@ -214,7 +201,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap(
|
|||||||
jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject,
|
jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject,
|
||||||
cid, capureIdString);
|
cid, capureIdString);
|
||||||
if (!javaCapabilitiesObj) {
|
if (!javaCapabilitiesObj) {
|
||||||
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
"%s: Failed to call java GetCapabilityArray.",
|
"%s: Failed to call java GetCapabilityArray.",
|
||||||
__FUNCTION__);
|
__FUNCTION__);
|
||||||
@ -225,7 +211,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap(
|
|||||||
jfieldID heigtField = env->GetFieldID(javaCapClass, "height", "I");
|
jfieldID heigtField = env->GetFieldID(javaCapClass, "height", "I");
|
||||||
jfieldID maxFpsField = env->GetFieldID(javaCapClass, "maxFPS", "I");
|
jfieldID maxFpsField = env->GetFieldID(javaCapClass, "maxFPS", "I");
|
||||||
if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) {
|
if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) {
|
||||||
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
"%s: Failed to get field Id.", __FUNCTION__);
|
"%s: Failed to get field Id.", __FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
@ -258,7 +243,8 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap(
|
|||||||
deviceUniqueIdUTF8,
|
deviceUniqueIdUTF8,
|
||||||
_lastUsedDeviceNameLength + 1);
|
_lastUsedDeviceNameLength + 1);
|
||||||
|
|
||||||
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
|
env->DeleteGlobalRef(javaCapClass);
|
||||||
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
|
||||||
"CreateCapabilityMap %d", _captureCapabilities.size());
|
"CreateCapabilityMap %d", _captureCapabilities.size());
|
||||||
|
|
||||||
@ -268,22 +254,18 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap(
|
|||||||
int32_t DeviceInfoAndroid::GetOrientation(
|
int32_t DeviceInfoAndroid::GetOrientation(
|
||||||
const char* deviceUniqueIdUTF8,
|
const char* deviceUniqueIdUTF8,
|
||||||
VideoCaptureRotation& orientation) {
|
VideoCaptureRotation& orientation) {
|
||||||
JNIEnv *env;
|
AutoLocalJNIFrame jniFrame;
|
||||||
jclass javaCmDevInfoClass;
|
JNIEnv* env = jniFrame.GetEnv();
|
||||||
jobject javaCmDevInfoObject;
|
if (!env)
|
||||||
bool attached = false;
|
|
||||||
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
|
|
||||||
env,
|
|
||||||
javaCmDevInfoClass,
|
|
||||||
javaCmDevInfoObject,
|
|
||||||
attached) != 0)
|
|
||||||
return -1;
|
return -1;
|
||||||
|
|
||||||
|
jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass();
|
||||||
|
jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject();
|
||||||
|
|
||||||
// get the method ID for the Android Java GetOrientation .
|
// get the method ID for the Android Java GetOrientation .
|
||||||
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation",
|
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation",
|
||||||
"(Ljava/lang/String;)I");
|
"(Ljava/lang/String;)I");
|
||||||
if (cid == NULL) {
|
if (cid == NULL) {
|
||||||
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
"%s: Can't find method GetOrientation.", __FUNCTION__);
|
"%s: Can't find method GetOrientation.", __FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
@ -291,7 +273,6 @@ int32_t DeviceInfoAndroid::GetOrientation(
|
|||||||
// Create a jstring so we can pass the deviceUniquName to the java method.
|
// Create a jstring so we can pass the deviceUniquName to the java method.
|
||||||
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
|
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
|
||||||
if (capureIdString == NULL) {
|
if (capureIdString == NULL) {
|
||||||
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
"%s: Can't create string for method GetCapabilityArray.",
|
"%s: Can't create string for method GetCapabilityArray.",
|
||||||
__FUNCTION__);
|
__FUNCTION__);
|
||||||
@ -300,7 +281,6 @@ int32_t DeviceInfoAndroid::GetOrientation(
|
|||||||
// Call the java class and get the orientation.
|
// Call the java class and get the orientation.
|
||||||
jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid,
|
jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid,
|
||||||
capureIdString);
|
capureIdString);
|
||||||
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
|
|
||||||
|
|
||||||
int32_t retValue = 0;
|
int32_t retValue = 0;
|
||||||
switch (jorientation) {
|
switch (jorientation) {
|
||||||
|
@ -16,6 +16,9 @@
|
|||||||
#include "webrtc/modules/video_capture/device_info_impl.h"
|
#include "webrtc/modules/video_capture/device_info_impl.h"
|
||||||
#include "webrtc/modules/video_capture/video_capture_impl.h"
|
#include "webrtc/modules/video_capture/video_capture_impl.h"
|
||||||
|
|
||||||
|
#define AndroidJavaCaptureDeviceInfoClass "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid"
|
||||||
|
#define AndroidJavaCaptureCapabilityClass "org/webrtc/videoengine/CaptureCapabilityAndroid"
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
namespace videocapturemodule
|
namespace videocapturemodule
|
||||||
|
@ -25,14 +25,25 @@ import android.graphics.YuvImage;
|
|||||||
import android.hardware.Camera;
|
import android.hardware.Camera;
|
||||||
import android.hardware.Camera.PreviewCallback;
|
import android.hardware.Camera.PreviewCallback;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
|
import android.view.Surface;
|
||||||
import android.view.SurfaceHolder;
|
import android.view.SurfaceHolder;
|
||||||
import android.view.SurfaceHolder.Callback;
|
import android.view.SurfaceHolder.Callback;
|
||||||
|
import android.view.SurfaceView;
|
||||||
|
import android.view.TextureView;
|
||||||
|
import android.view.TextureView.SurfaceTextureListener;
|
||||||
|
import android.view.View;
|
||||||
|
|
||||||
|
import org.mozilla.gecko.GeckoApp;
|
||||||
|
import org.mozilla.gecko.GeckoAppShell;
|
||||||
|
import org.mozilla.gecko.GeckoAppShell.AppStateListener;
|
||||||
|
import org.mozilla.gecko.util.ThreadUtils;
|
||||||
|
|
||||||
public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
||||||
|
|
||||||
private final static String TAG = "WEBRTC-JC";
|
private final static String TAG = "WEBRTC-JC";
|
||||||
|
|
||||||
private Camera camera;
|
private Camera camera;
|
||||||
|
private int cameraId;
|
||||||
private AndroidVideoCaptureDevice currentDevice = null;
|
private AndroidVideoCaptureDevice currentDevice = null;
|
||||||
public ReentrantLock previewBufferLock = new ReentrantLock();
|
public ReentrantLock previewBufferLock = new ReentrantLock();
|
||||||
// This lock takes sync with StartCapture and SurfaceChanged
|
// This lock takes sync with StartCapture and SurfaceChanged
|
||||||
@ -43,6 +54,9 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
|||||||
private boolean isCaptureStarted = false;
|
private boolean isCaptureStarted = false;
|
||||||
private boolean isCaptureRunning = false;
|
private boolean isCaptureRunning = false;
|
||||||
private boolean isSurfaceReady = false;
|
private boolean isSurfaceReady = false;
|
||||||
|
private SurfaceHolder surfaceHolder = null;
|
||||||
|
private SurfaceTexture surfaceTexture = null;
|
||||||
|
private SurfaceTexture dummySurfaceTexture = null;
|
||||||
|
|
||||||
private final int numCaptureBuffers = 3;
|
private final int numCaptureBuffers = 3;
|
||||||
private int expectedFrameSize = 0;
|
private int expectedFrameSize = 0;
|
||||||
@ -51,7 +65,6 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
|||||||
// C++ callback context variable.
|
// C++ callback context variable.
|
||||||
private long context = 0;
|
private long context = 0;
|
||||||
private SurfaceHolder localPreview = null;
|
private SurfaceHolder localPreview = null;
|
||||||
private SurfaceTexture dummySurfaceTexture = null;
|
|
||||||
// True if this class owns the preview video buffers.
|
// True if this class owns the preview video buffers.
|
||||||
private boolean ownsBuffers = false;
|
private boolean ownsBuffers = false;
|
||||||
|
|
||||||
@ -59,25 +72,171 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
|||||||
private int mCaptureHeight = -1;
|
private int mCaptureHeight = -1;
|
||||||
private int mCaptureFPS = -1;
|
private int mCaptureFPS = -1;
|
||||||
|
|
||||||
|
private int mCaptureRotation = 0;
|
||||||
|
|
||||||
|
private AppStateListener mAppStateListener = null;
|
||||||
|
|
||||||
|
public class MySurfaceTextureListener implements TextureView.SurfaceTextureListener {
|
||||||
|
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
|
||||||
|
Log.d(TAG, "VideoCaptureAndroid::onSurfaceTextureAvailable");
|
||||||
|
|
||||||
|
captureLock.lock();
|
||||||
|
isSurfaceReady = true;
|
||||||
|
surfaceTexture = surface;
|
||||||
|
|
||||||
|
tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
|
||||||
|
captureLock.unlock();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void onSurfaceTextureSizeChanged(SurfaceTexture surface,
|
||||||
|
int width, int height) {
|
||||||
|
// Ignored, Camera does all the work for us
|
||||||
|
// Note that for a TextureView we start on onSurfaceTextureAvailable,
|
||||||
|
// for a SurfaceView we start on surfaceChanged. TextureView
|
||||||
|
// will not give out an onSurfaceTextureSizeChanged during creation.
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
|
||||||
|
Log.d(TAG, "VideoCaptureAndroid::onSurfaceTextureDestroyed");
|
||||||
|
isSurfaceReady = false;
|
||||||
|
DetachCamera();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
|
||||||
|
// Invoked every time there's a new Camera preview frame
|
||||||
|
}
|
||||||
|
}
|
||||||
public static
|
public static
|
||||||
void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) {
|
void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) {
|
||||||
Log.d(TAG, "DeleteVideoCaptureAndroid");
|
Log.d(TAG, "DeleteVideoCaptureAndroid");
|
||||||
if (captureAndroid.camera == null) {
|
|
||||||
return;
|
GeckoAppShell.getGeckoInterface().removeAppStateListener(captureAndroid.mAppStateListener);
|
||||||
}
|
|
||||||
|
|
||||||
captureAndroid.StopCapture();
|
captureAndroid.StopCapture();
|
||||||
|
if (captureAndroid.camera != null) {
|
||||||
captureAndroid.camera.release();
|
captureAndroid.camera.release();
|
||||||
captureAndroid.camera = null;
|
captureAndroid.camera = null;
|
||||||
|
}
|
||||||
captureAndroid.context = 0;
|
captureAndroid.context = 0;
|
||||||
|
|
||||||
|
View cameraView = GeckoAppShell.getGeckoInterface().getCameraView();
|
||||||
|
if (cameraView instanceof SurfaceView) {
|
||||||
|
((SurfaceView)cameraView).getHolder().removeCallback(captureAndroid);
|
||||||
|
} else if (cameraView instanceof TextureView) {
|
||||||
|
// No need to explicitly remove the Listener:
|
||||||
|
// i.e. ((SurfaceView)cameraView).setSurfaceTextureListener(null);
|
||||||
|
}
|
||||||
|
ThreadUtils.getUiHandler().post(new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
GeckoAppShell.getGeckoInterface().disableCameraView();
|
||||||
|
} catch (Exception e) {
|
||||||
|
Log.e(TAG,
|
||||||
|
"VideoCaptureAndroid disableCameraView exception: " +
|
||||||
|
e.getLocalizedMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public VideoCaptureAndroid(int in_id, long in_context, Camera in_camera,
|
public VideoCaptureAndroid(int in_id, long in_context, Camera in_camera,
|
||||||
AndroidVideoCaptureDevice in_device) {
|
AndroidVideoCaptureDevice in_device,
|
||||||
|
int in_cameraId) {
|
||||||
id = in_id;
|
id = in_id;
|
||||||
context = in_context;
|
context = in_context;
|
||||||
camera = in_camera;
|
camera = in_camera;
|
||||||
|
cameraId = in_cameraId;
|
||||||
currentDevice = in_device;
|
currentDevice = in_device;
|
||||||
|
mCaptureRotation = GetRotateAmount();
|
||||||
|
|
||||||
|
try {
|
||||||
|
View cameraView = GeckoAppShell.getGeckoInterface().getCameraView();
|
||||||
|
if (cameraView instanceof SurfaceView) {
|
||||||
|
((SurfaceView)cameraView).getHolder().addCallback(this);
|
||||||
|
} else if (cameraView instanceof TextureView) {
|
||||||
|
MySurfaceTextureListener listener = new MySurfaceTextureListener();
|
||||||
|
((TextureView)cameraView).setSurfaceTextureListener(listener);
|
||||||
|
}
|
||||||
|
ThreadUtils.getUiHandler().post(new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
GeckoAppShell.getGeckoInterface().enableCameraView();
|
||||||
|
} catch (Exception e) {
|
||||||
|
Log.e(TAG,
|
||||||
|
"VideoCaptureAndroid enableCameraView exception: "
|
||||||
|
+ e.getLocalizedMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (Exception ex) {
|
||||||
|
Log.e(TAG, "VideoCaptureAndroid constructor exception: " +
|
||||||
|
ex.getLocalizedMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
mAppStateListener = new AppStateListener() {
|
||||||
|
@Override
|
||||||
|
public void onPause() {
|
||||||
|
StopCapture();
|
||||||
|
if (camera != null) {
|
||||||
|
camera.release();
|
||||||
|
camera = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@Override
|
||||||
|
public void onResume() {
|
||||||
|
try {
|
||||||
|
if(android.os.Build.VERSION.SDK_INT>8) {
|
||||||
|
camera = Camera.open(cameraId);
|
||||||
|
} else {
|
||||||
|
camera = Camera.open();
|
||||||
|
}
|
||||||
|
} catch (Exception ex) {
|
||||||
|
Log.e(TAG, "Error reopening to the camera: " + ex.getMessage());
|
||||||
|
}
|
||||||
|
captureLock.lock();
|
||||||
|
isCaptureStarted = true;
|
||||||
|
tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
|
||||||
|
captureLock.unlock();
|
||||||
|
}
|
||||||
|
@Override
|
||||||
|
public void onOrientationChanged() {
|
||||||
|
mCaptureRotation = GetRotateAmount();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
GeckoAppShell.getGeckoInterface().addAppStateListener(mAppStateListener);
|
||||||
|
}
|
||||||
|
|
||||||
|
public int GetRotateAmount() {
|
||||||
|
int rotation = GeckoAppShell.getGeckoInterface().getActivity().getWindowManager().getDefaultDisplay().getRotation();
|
||||||
|
int degrees = 0;
|
||||||
|
switch (rotation) {
|
||||||
|
case Surface.ROTATION_0: degrees = 0; break;
|
||||||
|
case Surface.ROTATION_90: degrees = 90; break;
|
||||||
|
case Surface.ROTATION_180: degrees = 180; break;
|
||||||
|
case Surface.ROTATION_270: degrees = 270; break;
|
||||||
|
}
|
||||||
|
if(android.os.Build.VERSION.SDK_INT>8) {
|
||||||
|
android.hardware.Camera.CameraInfo info =
|
||||||
|
new android.hardware.Camera.CameraInfo();
|
||||||
|
android.hardware.Camera.getCameraInfo(cameraId, info);
|
||||||
|
int result;
|
||||||
|
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
|
||||||
|
result = (info.orientation + degrees) % 360;
|
||||||
|
} else { // back-facing
|
||||||
|
result = (info.orientation - degrees + 360) % 360;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
} else {
|
||||||
|
// Assume 90deg orientation for Froyo devices.
|
||||||
|
// Only back-facing cameras are supported in Froyo.
|
||||||
|
int orientation = 90;
|
||||||
|
int result = (orientation - degrees + 360) % 360;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private int tryStartCapture(int width, int height, int frameRate) {
|
private int tryStartCapture(int width, int height, int frameRate) {
|
||||||
@ -86,16 +245,34 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
Log.d(TAG, "tryStartCapture: " + width +
|
Log.d(TAG, "tryStartCapture " + width +
|
||||||
"x" + height +", frameRate: " + frameRate +
|
" height " + height +" frame rate " + frameRate +
|
||||||
", isCaptureRunning: " + isCaptureRunning +
|
" isCaptureRunning " + isCaptureRunning +
|
||||||
", isSurfaceReady: " + isSurfaceReady +
|
" isSurfaceReady " + isSurfaceReady +
|
||||||
", isCaptureStarted: " + isCaptureStarted);
|
" isCaptureStarted " + isCaptureStarted);
|
||||||
|
|
||||||
if (isCaptureRunning || !isCaptureStarted) {
|
if (isCaptureRunning || !isSurfaceReady || !isCaptureStarted) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (surfaceHolder != null)
|
||||||
|
camera.setPreviewDisplay(surfaceHolder);
|
||||||
|
if (surfaceTexture != null)
|
||||||
|
camera.setPreviewTexture(surfaceTexture);
|
||||||
|
if (surfaceHolder == null && surfaceTexture == null) {
|
||||||
|
// No local renderer. Camera won't capture without
|
||||||
|
// setPreview{Texture,Display}, so we create a dummy SurfaceTexture
|
||||||
|
// and hand it over to Camera, but never listen for frame-ready
|
||||||
|
// callbacks, and never call updateTexImage on it.
|
||||||
|
try {
|
||||||
|
dummySurfaceTexture = new SurfaceTexture(42);
|
||||||
|
camera.setPreviewTexture(dummySurfaceTexture);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
CaptureCapabilityAndroid currentCapability =
|
CaptureCapabilityAndroid currentCapability =
|
||||||
new CaptureCapabilityAndroid();
|
new CaptureCapabilityAndroid();
|
||||||
currentCapability.width = width;
|
currentCapability.width = width;
|
||||||
@ -103,17 +280,13 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
|||||||
currentCapability.maxFPS = frameRate;
|
currentCapability.maxFPS = frameRate;
|
||||||
PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
|
PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
|
||||||
|
|
||||||
|
|
||||||
Camera.Parameters parameters = camera.getParameters();
|
Camera.Parameters parameters = camera.getParameters();
|
||||||
parameters.setPreviewSize(currentCapability.width,
|
parameters.setPreviewSize(currentCapability.width,
|
||||||
currentCapability.height);
|
currentCapability.height);
|
||||||
parameters.setPreviewFormat(PIXEL_FORMAT);
|
parameters.setPreviewFormat(PIXEL_FORMAT);
|
||||||
parameters.setPreviewFrameRate(currentCapability.maxFPS);
|
parameters.setPreviewFrameRate(currentCapability.maxFPS);
|
||||||
try {
|
|
||||||
camera.setParameters(parameters);
|
camera.setParameters(parameters);
|
||||||
} catch (RuntimeException e) {
|
|
||||||
Log.e(TAG, "setParameters failed", e);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
|
int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
|
||||||
byte[] buffer = null;
|
byte[] buffer = null;
|
||||||
@ -130,35 +303,19 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
|||||||
isCaptureRunning = true;
|
isCaptureRunning = true;
|
||||||
previewBufferLock.unlock();
|
previewBufferLock.unlock();
|
||||||
|
|
||||||
|
}
|
||||||
|
catch (Exception ex) {
|
||||||
|
Log.e(TAG, "Failed to start camera: " + ex.getMessage());
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
isCaptureRunning = true;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int StartCapture(int width, int height, int frameRate) {
|
public int StartCapture(int width, int height, int frameRate) {
|
||||||
Log.d(TAG, "StartCapture width " + width +
|
Log.d(TAG, "StartCapture width " + width +
|
||||||
" height " + height +" frame rate " + frameRate);
|
" height " + height +" frame rate " + frameRate);
|
||||||
// Get the local preview SurfaceHolder from the static render class
|
|
||||||
localPreview = ViERenderer.GetLocalRenderer();
|
|
||||||
if (localPreview != null) {
|
|
||||||
if (localPreview.getSurface() != null &&
|
|
||||||
localPreview.getSurface().isValid()) {
|
|
||||||
surfaceCreated(localPreview);
|
|
||||||
}
|
|
||||||
localPreview.addCallback(this);
|
|
||||||
} else {
|
|
||||||
// No local renderer. Camera won't capture without
|
|
||||||
// setPreview{Texture,Display}, so we create a dummy SurfaceTexture
|
|
||||||
// and hand it over to Camera, but never listen for frame-ready
|
|
||||||
// callbacks, and never call updateTexImage on it.
|
|
||||||
captureLock.lock();
|
|
||||||
try {
|
|
||||||
dummySurfaceTexture = new SurfaceTexture(42);
|
|
||||||
camera.setPreviewTexture(dummySurfaceTexture);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
captureLock.unlock();
|
|
||||||
}
|
|
||||||
|
|
||||||
captureLock.lock();
|
captureLock.lock();
|
||||||
isCaptureStarted = true;
|
isCaptureStarted = true;
|
||||||
mCaptureWidth = width;
|
mCaptureWidth = width;
|
||||||
@ -171,36 +328,48 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int StopCapture() {
|
public int DetachCamera() {
|
||||||
Log.d(TAG, "StopCapture");
|
|
||||||
try {
|
try {
|
||||||
previewBufferLock.lock();
|
previewBufferLock.lock();
|
||||||
isCaptureRunning = false;
|
isCaptureRunning = false;
|
||||||
previewBufferLock.unlock();
|
previewBufferLock.unlock();
|
||||||
camera.stopPreview();
|
if (camera != null) {
|
||||||
camera.setPreviewCallbackWithBuffer(null);
|
camera.setPreviewCallbackWithBuffer(null);
|
||||||
} catch (RuntimeException e) {
|
camera.stopPreview();
|
||||||
Log.e(TAG, "Failed to stop camera", e);
|
}
|
||||||
|
} catch (Exception ex) {
|
||||||
|
Log.e(TAG, "Failed to stop camera: " + ex.getMessage());
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
isCaptureStarted = false;
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
native void ProvideCameraFrame(byte[] data, int length, long captureObject);
|
public int StopCapture() {
|
||||||
|
Log.d(TAG, "StopCapture");
|
||||||
|
isCaptureStarted = false;
|
||||||
|
return DetachCamera();
|
||||||
|
}
|
||||||
|
|
||||||
|
native void ProvideCameraFrame(byte[] data, int length, int rotation,
|
||||||
|
long captureObject);
|
||||||
|
|
||||||
public void onPreviewFrame(byte[] data, Camera camera) {
|
public void onPreviewFrame(byte[] data, Camera camera) {
|
||||||
previewBufferLock.lock();
|
previewBufferLock.lock();
|
||||||
|
|
||||||
|
String dataLengthStr = "does not exist";
|
||||||
|
if(data != null) {
|
||||||
|
dataLengthStr = Integer.toString(data.length);
|
||||||
|
}
|
||||||
|
|
||||||
// The following line is for debug only
|
// The following line is for debug only
|
||||||
// Log.v(TAG, "preview frame length " + data.length +
|
Log.v(TAG, "preview frame length " + data.length +
|
||||||
// " context" + context);
|
" context" + context);
|
||||||
if (isCaptureRunning) {
|
if (isCaptureRunning) {
|
||||||
// If StartCapture has been called but not StopCapture
|
// If StartCapture has been called but not StopCapture
|
||||||
// Call the C++ layer with the captured frame
|
// Call the C++ layer with the captured frame
|
||||||
if (data.length == expectedFrameSize) {
|
if (data != null && data.length == expectedFrameSize) {
|
||||||
ProvideCameraFrame(data, expectedFrameSize, context);
|
ProvideCameraFrame(data, expectedFrameSize, mCaptureRotation,
|
||||||
|
context);
|
||||||
if (ownsBuffers) {
|
if (ownsBuffers) {
|
||||||
// Give the video buffer to the camera service again.
|
// Give the video buffer to the camera service again.
|
||||||
camera.addCallbackBuffer(data);
|
camera.addCallbackBuffer(data);
|
||||||
@ -210,58 +379,26 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
|
|||||||
previewBufferLock.unlock();
|
previewBufferLock.unlock();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sets the rotation of the preview render window.
|
|
||||||
// Does not affect the captured video image.
|
|
||||||
public void SetPreviewRotation(int rotation) {
|
|
||||||
Log.v(TAG, "SetPreviewRotation:" + rotation);
|
|
||||||
|
|
||||||
if (camera == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
int resultRotation = 0;
|
|
||||||
if (currentDevice.frontCameraType ==
|
|
||||||
VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) {
|
|
||||||
// this is a 2.3 or later front facing camera.
|
|
||||||
// SetDisplayOrientation will flip the image horizontally
|
|
||||||
// before doing the rotation.
|
|
||||||
resultRotation = ( 360 - rotation ) % 360; // compensate the mirror
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Back facing or 2.2 or previous front camera
|
|
||||||
resultRotation = rotation;
|
|
||||||
}
|
|
||||||
camera.setDisplayOrientation(resultRotation);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void surfaceChanged(SurfaceHolder holder,
|
public void surfaceChanged(SurfaceHolder holder,
|
||||||
int format, int width, int height) {
|
int format, int width, int height) {
|
||||||
Log.d(TAG, "VideoCaptureAndroid::surfaceChanged");
|
Log.d(TAG, "VideoCaptureAndroid::surfaceChanged");
|
||||||
|
|
||||||
|
captureLock.lock();
|
||||||
|
isSurfaceReady = true;
|
||||||
|
surfaceHolder = holder;
|
||||||
|
|
||||||
|
tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
|
||||||
|
captureLock.unlock();
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void surfaceCreated(SurfaceHolder holder) {
|
public void surfaceCreated(SurfaceHolder holder) {
|
||||||
Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
|
Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
|
||||||
captureLock.lock();
|
|
||||||
try {
|
|
||||||
if (camera != null) {
|
|
||||||
camera.setPreviewDisplay(holder);
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
Log.e(TAG, "Failed to set preview surface!", e);
|
|
||||||
}
|
|
||||||
captureLock.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void surfaceDestroyed(SurfaceHolder holder) {
|
public void surfaceDestroyed(SurfaceHolder holder) {
|
||||||
Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
|
Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
|
||||||
captureLock.lock();
|
isSurfaceReady = false;
|
||||||
try {
|
DetachCamera();
|
||||||
if (camera != null) {
|
|
||||||
camera.setPreviewDisplay(null);
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
Log.e(TAG, "Failed to clear preview surface!", e);
|
|
||||||
}
|
|
||||||
captureLock.unlock();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -252,6 +252,7 @@ public class VideoCaptureDeviceInfoAndroid {
|
|||||||
Log.d(TAG, "AllocateCamera " + deviceUniqueId);
|
Log.d(TAG, "AllocateCamera " + deviceUniqueId);
|
||||||
|
|
||||||
Camera camera = null;
|
Camera camera = null;
|
||||||
|
int cameraId = 0;
|
||||||
AndroidVideoCaptureDevice deviceToUse = null;
|
AndroidVideoCaptureDevice deviceToUse = null;
|
||||||
for (AndroidVideoCaptureDevice device: deviceList) {
|
for (AndroidVideoCaptureDevice device: deviceList) {
|
||||||
if(device.deviceUniqueName.equals(deviceUniqueId)) {
|
if(device.deviceUniqueName.equals(deviceUniqueId)) {
|
||||||
@ -266,10 +267,12 @@ public class VideoCaptureDeviceInfoAndroid {
|
|||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
// From Android 2.3 and onwards)
|
// From Android 2.3 and onwards)
|
||||||
if(android.os.Build.VERSION.SDK_INT>8)
|
if(android.os.Build.VERSION.SDK_INT>8) {
|
||||||
|
cameraId = device.index;
|
||||||
camera = Camera.open(device.index);
|
camera = Camera.open(device.index);
|
||||||
else
|
} else {
|
||||||
camera=Camera.open(); // Default camera
|
camera = Camera.open(); // Default_ camera
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -279,7 +282,7 @@ public class VideoCaptureDeviceInfoAndroid {
|
|||||||
}
|
}
|
||||||
Log.v(TAG, "AllocateCamera - creating VideoCaptureAndroid");
|
Log.v(TAG, "AllocateCamera - creating VideoCaptureAndroid");
|
||||||
|
|
||||||
return new VideoCaptureAndroid(id, context, camera, deviceToUse);
|
return new VideoCaptureAndroid(id, context, camera, deviceToUse, cameraId);
|
||||||
} catch (NoSuchMethodException e) {
|
} catch (NoSuchMethodException e) {
|
||||||
Log.e(TAG, "AllocateCamera Failed to open camera", e);
|
Log.e(TAG, "AllocateCamera Failed to open camera", e);
|
||||||
} catch (ClassNotFoundException e) {
|
} catch (ClassNotFoundException e) {
|
||||||
|
@ -16,6 +16,9 @@
|
|||||||
#include "webrtc/system_wrappers/interface/ref_count.h"
|
#include "webrtc/system_wrappers/interface/ref_count.h"
|
||||||
#include "webrtc/system_wrappers/interface/trace.h"
|
#include "webrtc/system_wrappers/interface/trace.h"
|
||||||
|
|
||||||
|
#include "AndroidJNIWrapper.h"
|
||||||
|
#include "mozilla/Assertions.h"
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
|
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
|
||||||
@ -45,12 +48,19 @@ VideoCaptureModule* VideoCaptureImpl::Create(
|
|||||||
return implementation;
|
return implementation;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef DEBUG
|
||||||
// Android logging, uncomment to print trace to
|
// Android logging, uncomment to print trace to
|
||||||
// logcat instead of trace file/callback
|
// logcat instead of trace file/callback
|
||||||
// #include <android/log.h>
|
#include <android/log.h>
|
||||||
// #undef WEBRTC_TRACE
|
// #undef WEBRTC_TRACE
|
||||||
// #define WEBRTC_TRACE(a,b,c,...)
|
// #define WEBRTC_TRACE(a,b,c,...)
|
||||||
// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
|
// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
|
||||||
|
// Some functions are called before before the WebRTC logging can be brought up,
|
||||||
|
// log those to the Android log.
|
||||||
|
#define EARLY_WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC-VCA", __VA_ARGS__)
|
||||||
|
#else
|
||||||
|
#define EARLY_WEBRTC_TRACE(a,b,c,...)
|
||||||
|
#endif
|
||||||
|
|
||||||
JavaVM* VideoCaptureAndroid::g_jvm = NULL;
|
JavaVM* VideoCaptureAndroid::g_jvm = NULL;
|
||||||
//VideoCaptureAndroid.java
|
//VideoCaptureAndroid.java
|
||||||
@ -59,7 +69,6 @@ jclass VideoCaptureAndroid::g_javaCmClass = NULL;
|
|||||||
jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL;
|
jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL;
|
||||||
//static instance of VideoCaptureDeviceInfoAndroid.java
|
//static instance of VideoCaptureDeviceInfoAndroid.java
|
||||||
jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL;
|
jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL;
|
||||||
jobject VideoCaptureAndroid::g_javaContext = NULL;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Register references to Java Capture class.
|
* Register references to Java Capture class.
|
||||||
@ -67,87 +76,59 @@ jobject VideoCaptureAndroid::g_javaContext = NULL;
|
|||||||
int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
|
int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
|
||||||
void* javaContext) {
|
void* javaContext) {
|
||||||
|
|
||||||
|
MOZ_ASSERT(javaVM != nullptr || g_javaCmDevInfoClass != nullptr);
|
||||||
|
EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
||||||
|
"%s: running", __FUNCTION__);
|
||||||
|
|
||||||
g_jvm = static_cast<JavaVM*> (javaVM);
|
g_jvm = static_cast<JavaVM*> (javaVM);
|
||||||
g_javaContext = static_cast<jobject> (javaContext);
|
|
||||||
|
|
||||||
if (javaVM) {
|
if (javaVM) {
|
||||||
|
// Already done? Exit early.
|
||||||
|
if (g_javaCmClass != NULL
|
||||||
|
&& g_javaCmDevInfoClass != NULL
|
||||||
|
&& g_javaCmDevInfoObject != NULL) {
|
||||||
|
EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
||||||
|
"%s: early exit", __FUNCTION__);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
JNIEnv* env = NULL;
|
JNIEnv* env = NULL;
|
||||||
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: could not get Java environment", __FUNCTION__);
|
"%s: could not get Java environment", __FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
// get java capture class type (note path to class packet)
|
// get java capture class type (note path to class packet)
|
||||||
jclass javaCmClassLocal = env->FindClass(AndroidJavaCaptureClass);
|
g_javaCmClass = jsjni_GetGlobalClassRef(AndroidJavaCaptureClass);
|
||||||
if (!javaCmClassLocal) {
|
if (!g_javaCmClass) {
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: could not find java class", __FUNCTION__);
|
"%s: could not find java class", __FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
// create a global reference to the class
|
|
||||||
// (to tell JNI that we are referencing it
|
|
||||||
// after this function has returned)
|
|
||||||
g_javaCmClass = static_cast<jclass>
|
|
||||||
(env->NewGlobalRef(javaCmClassLocal));
|
|
||||||
if (!g_javaCmClass) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
|
||||||
"%s: InitVideoEngineJava(): could not create"
|
|
||||||
" Java Camera class reference",
|
|
||||||
__FUNCTION__);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
// Delete local class ref, we only use the global ref
|
|
||||||
env->DeleteLocalRef(javaCmClassLocal);
|
|
||||||
JNINativeMethod nativeFunctions =
|
JNINativeMethod nativeFunctions =
|
||||||
{ "ProvideCameraFrame", "([BIJ)V",
|
{ "ProvideCameraFrame", "([BIIJ)V",
|
||||||
(void*) &VideoCaptureAndroid::ProvideCameraFrame };
|
(void*) &VideoCaptureAndroid::ProvideCameraFrame };
|
||||||
if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0) {
|
if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0) {
|
||||||
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: Registered native functions", __FUNCTION__);
|
"%s: Registered native functions", __FUNCTION__);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: Failed to register native functions",
|
"%s: Failed to register native functions",
|
||||||
__FUNCTION__);
|
__FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
jclass capabilityClassLocal = env->FindClass(
|
|
||||||
"org/webrtc/videoengine/CaptureCapabilityAndroid");
|
|
||||||
if (!capabilityClassLocal) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
|
||||||
"%s: could not find java class", __FUNCTION__);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
jclass capabilityClassGlobal = reinterpret_cast<jclass>(env->NewGlobalRef(
|
|
||||||
capabilityClassLocal));
|
|
||||||
DeviceInfoAndroid::SetAndroidCaptureClasses(capabilityClassGlobal);
|
|
||||||
|
|
||||||
// get java capture class type (note path to class packet)
|
// get java capture class type (note path to class packet)
|
||||||
jclass javaCmDevInfoClassLocal = env->FindClass(
|
g_javaCmDevInfoClass = jsjni_GetGlobalClassRef(
|
||||||
"org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid");
|
AndroidJavaCaptureDeviceInfoClass);
|
||||||
if (!javaCmDevInfoClassLocal) {
|
if (!g_javaCmDevInfoClass) {
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: could not find java class", __FUNCTION__);
|
"%s: could not find java class", __FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// create a global reference to the class
|
EARLY_WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
|
||||||
// (to tell JNI that we are referencing it
|
|
||||||
// after this function has returned)
|
|
||||||
g_javaCmDevInfoClass = static_cast<jclass>
|
|
||||||
(env->NewGlobalRef(javaCmDevInfoClassLocal));
|
|
||||||
if (!g_javaCmDevInfoClass) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
|
||||||
"%s: InitVideoEngineJava(): could not create Java "
|
|
||||||
"Camera Device info class reference",
|
|
||||||
__FUNCTION__);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
// Delete local class ref, we only use the global ref
|
|
||||||
env->DeleteLocalRef(javaCmDevInfoClassLocal);
|
|
||||||
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
|
|
||||||
"VideoCaptureDeviceInfoAndroid get method id");
|
"VideoCaptureDeviceInfoAndroid get method id");
|
||||||
|
|
||||||
// get the method ID for the Android Java CaptureClass static
|
// get the method ID for the Android Java CaptureClass static
|
||||||
@ -158,23 +139,23 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
|
|||||||
"(ILandroid/content/Context;)"
|
"(ILandroid/content/Context;)"
|
||||||
"Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;");
|
"Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;");
|
||||||
if (cid == NULL) {
|
if (cid == NULL) {
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: could not get java"
|
"%s: could not get java"
|
||||||
"VideoCaptureDeviceInfoAndroid constructor ID",
|
"VideoCaptureDeviceInfoAndroid constructor ID",
|
||||||
__FUNCTION__);
|
__FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: construct static java device object", __FUNCTION__);
|
"%s: construct static java device object", __FUNCTION__);
|
||||||
|
|
||||||
// construct the object by calling the static constructor object
|
// construct the object by calling the static constructor object
|
||||||
jobject javaCameraDeviceInfoObjLocal =
|
jobject javaCameraDeviceInfoObjLocal =
|
||||||
env->CallStaticObjectMethod(g_javaCmDevInfoClass,
|
env->CallStaticObjectMethod(g_javaCmDevInfoClass,
|
||||||
cid, (int) -1,
|
cid, (int) -1,
|
||||||
g_javaContext);
|
javaContext);
|
||||||
if (!javaCameraDeviceInfoObjLocal) {
|
if (!javaCameraDeviceInfoObjLocal) {
|
||||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: could not create Java Capture Device info object",
|
"%s: could not create Java Capture Device info object",
|
||||||
__FUNCTION__);
|
__FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
@ -183,7 +164,7 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
|
|||||||
// we are referencing it after this function has returned)
|
// we are referencing it after this function has returned)
|
||||||
g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal);
|
g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal);
|
||||||
if (!g_javaCmDevInfoObject) {
|
if (!g_javaCmDevInfoObject) {
|
||||||
WEBRTC_TRACE(webrtc::kTraceError,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceError,
|
||||||
webrtc::kTraceAudioDevice,
|
webrtc::kTraceAudioDevice,
|
||||||
-1,
|
-1,
|
||||||
"%s: could not create Java"
|
"%s: could not create Java"
|
||||||
@ -193,13 +174,16 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
|
|||||||
}
|
}
|
||||||
// Delete local object ref, we only use the global ref
|
// Delete local object ref, we only use the global ref
|
||||||
env->DeleteLocalRef(javaCameraDeviceInfoObjLocal);
|
env->DeleteLocalRef(javaCameraDeviceInfoObjLocal);
|
||||||
|
|
||||||
|
EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
||||||
|
"%s: success", __FUNCTION__);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: JVM is NULL, assuming deinit", __FUNCTION__);
|
"%s: JVM is NULL, assuming deinit", __FUNCTION__);
|
||||||
if (!g_jvm) {
|
if (!g_jvm) {
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: SetAndroidObjects not called with a valid JVM.",
|
"%s: SetAndroidObjects not called with a valid JVM.",
|
||||||
__FUNCTION__);
|
__FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
@ -211,7 +195,7 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
|
|||||||
// Attach this thread to JVM
|
// Attach this thread to JVM
|
||||||
jint res = g_jvm->AttachCurrentThread(&env, NULL);
|
jint res = g_jvm->AttachCurrentThread(&env, NULL);
|
||||||
if ((res < 0) || !env) {
|
if ((res < 0) || !env) {
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
|
||||||
-1, "%s: Could not attach thread to JVM (%d, %p)",
|
-1, "%s: Could not attach thread to JVM (%d, %p)",
|
||||||
__FUNCTION__, res, env);
|
__FUNCTION__, res, env);
|
||||||
return -1;
|
return -1;
|
||||||
@ -222,7 +206,7 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
|
|||||||
env->DeleteGlobalRef(g_javaCmDevInfoClass);
|
env->DeleteGlobalRef(g_javaCmDevInfoClass);
|
||||||
env->DeleteGlobalRef(g_javaCmClass);
|
env->DeleteGlobalRef(g_javaCmClass);
|
||||||
if (attached && g_jvm->DetachCurrentThread() < 0) {
|
if (attached && g_jvm->DetachCurrentThread() < 0) {
|
||||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
|
EARLY_WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: Could not detach thread from JVM", __FUNCTION__);
|
"%s: Could not detach thread from JVM", __FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -232,47 +216,6 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
|
|
||||||
JNIEnv*& env,
|
|
||||||
jclass& javaCmDevInfoClass,
|
|
||||||
jobject& javaCmDevInfoObject,
|
|
||||||
bool& attached) {
|
|
||||||
// get the JNI env for this thread
|
|
||||||
if (!g_jvm) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
|
||||||
"%s: SetAndroidObjects not called with a valid JVM.",
|
|
||||||
__FUNCTION__);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
attached = false;
|
|
||||||
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
|
||||||
// try to attach the thread and get the env
|
|
||||||
// Attach this thread to JVM
|
|
||||||
jint res = g_jvm->AttachCurrentThread(&env, NULL);
|
|
||||||
if ((res < 0) || !env) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
|
||||||
"%s: Could not attach thread to JVM (%d, %p)",
|
|
||||||
__FUNCTION__, res, env);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
attached = true;
|
|
||||||
}
|
|
||||||
javaCmDevInfoClass = g_javaCmDevInfoClass;
|
|
||||||
javaCmDevInfoObject = g_javaCmDevInfoObject;
|
|
||||||
return 0;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
int32_t VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(
|
|
||||||
bool attached) {
|
|
||||||
if (attached && g_jvm->DetachCurrentThread() < 0) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
|
|
||||||
"%s: Could not detach thread from JVM", __FUNCTION__);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* JNI callback from Java class. Called
|
* JNI callback from Java class. Called
|
||||||
* when the camera has a new frame to deliver
|
* when the camera has a new frame to deliver
|
||||||
@ -284,11 +227,29 @@ void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env,
|
|||||||
jobject,
|
jobject,
|
||||||
jbyteArray javaCameraFrame,
|
jbyteArray javaCameraFrame,
|
||||||
jint length,
|
jint length,
|
||||||
|
jint rotation,
|
||||||
jlong context) {
|
jlong context) {
|
||||||
VideoCaptureAndroid* captureModule =
|
VideoCaptureAndroid* captureModule =
|
||||||
reinterpret_cast<VideoCaptureAndroid*>(context);
|
reinterpret_cast<VideoCaptureAndroid*>(context);
|
||||||
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
|
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
|
||||||
-1, "%s: IncomingFrame %d", __FUNCTION__,length);
|
-1, "%s: IncomingFrame %d", __FUNCTION__,length);
|
||||||
|
|
||||||
|
switch (rotation) {
|
||||||
|
case 90:
|
||||||
|
captureModule->SetCaptureRotation(kCameraRotate90);
|
||||||
|
break;
|
||||||
|
case 180:
|
||||||
|
captureModule->SetCaptureRotation(kCameraRotate180);
|
||||||
|
break;
|
||||||
|
case 270:
|
||||||
|
captureModule->SetCaptureRotation(kCameraRotate270);
|
||||||
|
break;
|
||||||
|
case 0:
|
||||||
|
default:
|
||||||
|
captureModule->SetCaptureRotation(kCameraRotate0);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL);
|
jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL);
|
||||||
captureModule->IncomingFrame((uint8_t*) cameraFrame,
|
captureModule->IncomingFrame((uint8_t*) cameraFrame,
|
||||||
length,captureModule->_frameInfo,0);
|
length,captureModule->_frameInfo,0);
|
||||||
@ -338,33 +299,25 @@ int32_t VideoCaptureAndroid::Init(const int32_t id,
|
|||||||
"%s: Not a valid Java VM pointer", __FUNCTION__);
|
"%s: Not a valid Java VM pointer", __FUNCTION__);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
// get the JNI env for this thread
|
|
||||||
JNIEnv *env;
|
|
||||||
bool isAttached = false;
|
|
||||||
|
|
||||||
// get the JNI env for this thread
|
AutoLocalJNIFrame jniFrame;
|
||||||
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
JNIEnv* env = jniFrame.GetEnv();
|
||||||
// try to attach the thread and get the env
|
if (!env)
|
||||||
// Attach this thread to JVM
|
|
||||||
jint res = g_jvm->AttachCurrentThread(&env, NULL);
|
|
||||||
if ((res < 0) || !env) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
|
||||||
"%s: Could not attach thread to JVM (%d, %p)",
|
|
||||||
__FUNCTION__, res, env);
|
|
||||||
return -1;
|
return -1;
|
||||||
}
|
|
||||||
isAttached = true;
|
jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass();
|
||||||
}
|
jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject();
|
||||||
|
|
||||||
|
int32_t rotation = 0;
|
||||||
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
|
||||||
"get method id");
|
"get method id");
|
||||||
|
|
||||||
// get the method ID for the Android Java
|
// get the method ID for the Android Java
|
||||||
// CaptureDeviceInfoClass AllocateCamera factory method.
|
// CaptureDeviceInfoClass AllocateCamera factory method.
|
||||||
char signature[256];
|
char signature[256];
|
||||||
sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass);
|
sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass);
|
||||||
|
|
||||||
jmethodID cid = env->GetMethodID(g_javaCmDevInfoClass, "AllocateCamera",
|
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "AllocateCamera",
|
||||||
signature);
|
signature);
|
||||||
if (cid == NULL) {
|
if (cid == NULL) {
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
@ -374,7 +327,7 @@ int32_t VideoCaptureAndroid::Init(const int32_t id,
|
|||||||
|
|
||||||
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
|
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
|
||||||
// construct the object by calling the static constructor object
|
// construct the object by calling the static constructor object
|
||||||
jobject javaCameraObjLocal = env->CallObjectMethod(g_javaCmDevInfoObject,
|
jobject javaCameraObjLocal = env->CallObjectMethod(javaCmDevInfoObject,
|
||||||
cid, (jint) id,
|
cid, (jint) id,
|
||||||
(jlong) this,
|
(jlong) this,
|
||||||
capureIdString);
|
capureIdString);
|
||||||
@ -394,17 +347,6 @@ int32_t VideoCaptureAndroid::Init(const int32_t id,
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete local object ref, we only use the global ref
|
|
||||||
env->DeleteLocalRef(javaCameraObjLocal);
|
|
||||||
|
|
||||||
// Detach this thread if it was attached
|
|
||||||
if (isAttached) {
|
|
||||||
if (g_jvm->DetachCurrentThread() < 0) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
|
|
||||||
"%s: Could not detach thread from JVM", __FUNCTION__);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -416,29 +358,15 @@ VideoCaptureAndroid::~VideoCaptureAndroid() {
|
|||||||
"%s: Nothing to clean", __FUNCTION__);
|
"%s: Nothing to clean", __FUNCTION__);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
bool isAttached = false;
|
AutoLocalJNIFrame jniFrame;
|
||||||
// get the JNI env for this thread
|
JNIEnv* env = jniFrame.GetEnv();
|
||||||
JNIEnv *env;
|
if (!env)
|
||||||
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
return;
|
||||||
// try to attach the thread and get the env
|
|
||||||
// Attach this thread to JVM
|
|
||||||
jint res = g_jvm->AttachCurrentThread(&env, NULL);
|
|
||||||
if ((res < 0) || !env) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
|
|
||||||
_id,
|
|
||||||
"%s: Could not attach thread to JVM (%d, %p)",
|
|
||||||
__FUNCTION__, res, env);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
isAttached = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// get the method ID for the Android Java CaptureClass static
|
// get the method ID for the Android Java CaptureClass static
|
||||||
// DeleteVideoCaptureAndroid method. Call this to release the camera so
|
// DeleteVideoCaptureAndroid method. Call this to release the camera so
|
||||||
// another application can use it.
|
// another application can use it.
|
||||||
jmethodID cid = env->GetStaticMethodID(
|
jmethodID cid = env->GetStaticMethodID(g_javaCmClass,
|
||||||
g_javaCmClass,
|
|
||||||
"DeleteVideoCaptureAndroid",
|
"DeleteVideoCaptureAndroid",
|
||||||
"(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
|
"(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
|
||||||
if (cid != NULL) {
|
if (cid != NULL) {
|
||||||
@ -450,21 +378,11 @@ VideoCaptureAndroid::~VideoCaptureAndroid() {
|
|||||||
// Delete global object ref to the camera.
|
// Delete global object ref to the camera.
|
||||||
env->DeleteGlobalRef(_javaCaptureObj);
|
env->DeleteGlobalRef(_javaCaptureObj);
|
||||||
_javaCaptureObj = NULL;
|
_javaCaptureObj = NULL;
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: Failed to find DeleteVideoCaptureAndroid id",
|
"%s: Failed to find DeleteVideoCaptureAndroid id",
|
||||||
__FUNCTION__);
|
__FUNCTION__);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detach this thread if it was attached
|
|
||||||
if (isAttached) {
|
|
||||||
if (g_jvm->DetachCurrentThread() < 0) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
|
|
||||||
_id, "%s: Could not detach thread from JVM",
|
|
||||||
__FUNCTION__);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -474,23 +392,13 @@ int32_t VideoCaptureAndroid::StartCapture(
|
|||||||
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: ", __FUNCTION__);
|
"%s: ", __FUNCTION__);
|
||||||
|
|
||||||
bool isAttached = false;
|
|
||||||
int32_t result = 0;
|
int32_t result = 0;
|
||||||
// get the JNI env for this thread
|
int32_t rotation = 0;
|
||||||
JNIEnv *env;
|
|
||||||
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
AutoLocalJNIFrame jniFrame;
|
||||||
// try to attach the thread and get the env
|
JNIEnv* env = jniFrame.GetEnv();
|
||||||
// Attach this thread to JVM
|
if (!env)
|
||||||
jint res = g_jvm->AttachCurrentThread(&env, NULL);
|
return -1;
|
||||||
if ((res < 0) || !env) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
|
||||||
"%s: Could not attach thread to JVM (%d, %p)",
|
|
||||||
__FUNCTION__, res, env);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
isAttached = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability,
|
if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability,
|
||||||
_frameInfo) < 0) {
|
_frameInfo) < 0) {
|
||||||
@ -522,13 +430,6 @@ int32_t VideoCaptureAndroid::StartCapture(
|
|||||||
"%s: Failed to find StartCapture id", __FUNCTION__);
|
"%s: Failed to find StartCapture id", __FUNCTION__);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detach this thread if it was attached
|
|
||||||
if (isAttached) {
|
|
||||||
if (g_jvm->DetachCurrentThread() < 0) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
|
|
||||||
"%s: Could not detach thread from JVM", __FUNCTION__);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (result == 0) {
|
if (result == 0) {
|
||||||
_requestedCapability = capability;
|
_requestedCapability = capability;
|
||||||
_captureStarted = true;
|
_captureStarted = true;
|
||||||
@ -543,23 +444,12 @@ int32_t VideoCaptureAndroid::StopCapture() {
|
|||||||
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
||||||
"%s: ", __FUNCTION__);
|
"%s: ", __FUNCTION__);
|
||||||
|
|
||||||
bool isAttached = false;
|
|
||||||
int32_t result = 0;
|
int32_t result = 0;
|
||||||
// get the JNI env for this thread
|
|
||||||
JNIEnv *env = NULL;
|
AutoLocalJNIFrame jniFrame;
|
||||||
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
JNIEnv* env = jniFrame.GetEnv();
|
||||||
// try to attach the thread and get the env
|
if (!env)
|
||||||
// Attach this thread to JVM
|
return -1;
|
||||||
jint res = g_jvm->AttachCurrentThread(&env, NULL);
|
|
||||||
if ((res < 0) || !env) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
|
||||||
"%s: Could not attach thread to JVM (%d, %p)",
|
|
||||||
__FUNCTION__, res, env);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
isAttached = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
memset(&_requestedCapability, 0, sizeof(_requestedCapability));
|
memset(&_requestedCapability, 0, sizeof(_requestedCapability));
|
||||||
memset(&_frameInfo, 0, sizeof(_frameInfo));
|
memset(&_frameInfo, 0, sizeof(_frameInfo));
|
||||||
@ -577,13 +467,6 @@ int32_t VideoCaptureAndroid::StopCapture() {
|
|||||||
"%s: Failed to find StopCapture id", __FUNCTION__);
|
"%s: Failed to find StopCapture id", __FUNCTION__);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detach this thread if it was attached
|
|
||||||
if (isAttached) {
|
|
||||||
if (g_jvm->DetachCurrentThread() < 0) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
|
|
||||||
"%s: Could not detach thread from JVM", __FUNCTION__);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_captureStarted = false;
|
_captureStarted = false;
|
||||||
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
||||||
@ -610,65 +493,7 @@ int32_t VideoCaptureAndroid::CaptureSettings(
|
|||||||
int32_t VideoCaptureAndroid::SetCaptureRotation(
|
int32_t VideoCaptureAndroid::SetCaptureRotation(
|
||||||
VideoCaptureRotation rotation) {
|
VideoCaptureRotation rotation) {
|
||||||
CriticalSectionScoped cs(&_apiCs);
|
CriticalSectionScoped cs(&_apiCs);
|
||||||
if (VideoCaptureImpl::SetCaptureRotation(rotation) == 0) {
|
return VideoCaptureImpl::SetCaptureRotation(rotation);
|
||||||
if (!g_jvm)
|
|
||||||
return -1;
|
|
||||||
|
|
||||||
// get the JNI env for this thread
|
|
||||||
JNIEnv *env;
|
|
||||||
bool isAttached = false;
|
|
||||||
|
|
||||||
// get the JNI env for this thread
|
|
||||||
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
|
||||||
// try to attach the thread and get the env
|
|
||||||
// Attach this thread to JVM
|
|
||||||
jint res = g_jvm->AttachCurrentThread(&env, NULL);
|
|
||||||
if ((res < 0) || !env) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
|
|
||||||
_id,
|
|
||||||
"%s: Could not attach thread to JVM (%d, %p)",
|
|
||||||
__FUNCTION__, res, env);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
isAttached = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
jmethodID cid = env->GetMethodID(g_javaCmClass, "SetPreviewRotation",
|
|
||||||
"(I)V");
|
|
||||||
if (cid == NULL) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
|
||||||
"%s: could not get java SetPreviewRotation ID",
|
|
||||||
__FUNCTION__);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
jint rotateFrame = 0;
|
|
||||||
switch (rotation) {
|
|
||||||
case kCameraRotate0:
|
|
||||||
rotateFrame = 0;
|
|
||||||
break;
|
|
||||||
case kCameraRotate90:
|
|
||||||
rotateFrame = 90;
|
|
||||||
break;
|
|
||||||
case kCameraRotate180:
|
|
||||||
rotateFrame = 180;
|
|
||||||
break;
|
|
||||||
case kCameraRotate270:
|
|
||||||
rotateFrame = 270;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
env->CallVoidMethod(_javaCaptureObj, cid, rotateFrame);
|
|
||||||
|
|
||||||
// Detach this thread if it was attached
|
|
||||||
if (isAttached) {
|
|
||||||
if (g_jvm->DetachCurrentThread() < 0) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
|
|
||||||
_id, "%s: Could not detach thread from JVM",
|
|
||||||
__FUNCTION__);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace videocapturemodule
|
} // namespace videocapturemodule
|
||||||
|
@ -12,6 +12,8 @@
|
|||||||
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
|
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
|
||||||
|
|
||||||
#include <jni.h>
|
#include <jni.h>
|
||||||
|
#include <assert.h>
|
||||||
|
#include "trace.h"
|
||||||
|
|
||||||
#include "webrtc/modules/video_capture/android/device_info_android.h"
|
#include "webrtc/modules/video_capture/android/device_info_android.h"
|
||||||
#include "webrtc/modules/video_capture/video_capture_impl.h"
|
#include "webrtc/modules/video_capture/video_capture_impl.h"
|
||||||
@ -24,17 +26,9 @@ namespace videocapturemodule {
|
|||||||
class VideoCaptureAndroid : public VideoCaptureImpl {
|
class VideoCaptureAndroid : public VideoCaptureImpl {
|
||||||
public:
|
public:
|
||||||
static int32_t SetAndroidObjects(void* javaVM, void* javaContext);
|
static int32_t SetAndroidObjects(void* javaVM, void* javaContext);
|
||||||
static int32_t AttachAndUseAndroidDeviceInfoObjects(
|
|
||||||
JNIEnv*& env,
|
|
||||||
jclass& javaCmDevInfoClass,
|
|
||||||
jobject& javaCmDevInfoObject,
|
|
||||||
bool& attached);
|
|
||||||
static int32_t ReleaseAndroidDeviceInfoObjects(bool attached);
|
|
||||||
|
|
||||||
VideoCaptureAndroid(const int32_t id);
|
VideoCaptureAndroid(const int32_t id);
|
||||||
virtual int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8);
|
virtual int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8);
|
||||||
|
|
||||||
|
|
||||||
virtual int32_t StartCapture(
|
virtual int32_t StartCapture(
|
||||||
const VideoCaptureCapability& capability);
|
const VideoCaptureCapability& capability);
|
||||||
virtual int32_t StopCapture();
|
virtual int32_t StopCapture();
|
||||||
@ -42,12 +36,16 @@ class VideoCaptureAndroid : public VideoCaptureImpl {
|
|||||||
virtual int32_t CaptureSettings(VideoCaptureCapability& settings);
|
virtual int32_t CaptureSettings(VideoCaptureCapability& settings);
|
||||||
virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation);
|
virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation);
|
||||||
|
|
||||||
|
friend class AutoLocalJNIFrame;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
virtual ~VideoCaptureAndroid();
|
virtual ~VideoCaptureAndroid();
|
||||||
static void JNICALL ProvideCameraFrame (JNIEnv * env,
|
static void JNICALL ProvideCameraFrame (JNIEnv * env,
|
||||||
jobject,
|
jobject,
|
||||||
jbyteArray javaCameraFrame,
|
jbyteArray javaCameraFrame,
|
||||||
jint length, jlong context);
|
jint length,
|
||||||
|
jint rotation,
|
||||||
|
jlong context);
|
||||||
DeviceInfoAndroid _capInfo;
|
DeviceInfoAndroid _capInfo;
|
||||||
jobject _javaCaptureObj; // Java Camera object.
|
jobject _javaCaptureObj; // Java Camera object.
|
||||||
VideoCaptureCapability _frameInfo;
|
VideoCaptureCapability _frameInfo;
|
||||||
@ -58,7 +56,120 @@ class VideoCaptureAndroid : public VideoCaptureImpl {
|
|||||||
static jclass g_javaCmDevInfoClass;
|
static jclass g_javaCmDevInfoClass;
|
||||||
//Static java object implementing the needed device info functions;
|
//Static java object implementing the needed device info functions;
|
||||||
static jobject g_javaCmDevInfoObject;
|
static jobject g_javaCmDevInfoObject;
|
||||||
static jobject g_javaContext; // Java Application context
|
};
|
||||||
|
|
||||||
|
// Reworked version of what is available in AndroidBridge,
|
||||||
|
// can attach/deatch in addition to push/pop frames.
|
||||||
|
class AutoLocalJNIFrame {
|
||||||
|
public:
|
||||||
|
AutoLocalJNIFrame(int nEntries = 128)
|
||||||
|
: mEntries(nEntries), mHasFrameBeenPushed(false), mAttached(false)
|
||||||
|
{
|
||||||
|
mJNIEnv = InitJNIEnv();
|
||||||
|
Push();
|
||||||
|
}
|
||||||
|
|
||||||
|
JNIEnv* GetEnv() {
|
||||||
|
return mJNIEnv;
|
||||||
|
}
|
||||||
|
|
||||||
|
jclass GetCmDevInfoClass() {
|
||||||
|
assert(VideoCaptureAndroid::g_javaCmDevInfoClass != nullptr);
|
||||||
|
return VideoCaptureAndroid::g_javaCmDevInfoClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
jobject GetCmDevInfoObject() {
|
||||||
|
assert(VideoCaptureAndroid::g_javaCmDevInfoObject != nullptr);
|
||||||
|
return VideoCaptureAndroid::g_javaCmDevInfoObject;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool CheckForException() {
|
||||||
|
if (mJNIEnv->ExceptionCheck()) {
|
||||||
|
mJNIEnv->ExceptionDescribe();
|
||||||
|
mJNIEnv->ExceptionClear();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
~AutoLocalJNIFrame() {
|
||||||
|
if (!mJNIEnv)
|
||||||
|
return;
|
||||||
|
|
||||||
|
CheckForException();
|
||||||
|
|
||||||
|
if (mHasFrameBeenPushed)
|
||||||
|
mJNIEnv->PopLocalFrame(NULL);
|
||||||
|
|
||||||
|
if (mAttached) {
|
||||||
|
int res = VideoCaptureAndroid::g_jvm->DetachCurrentThread();
|
||||||
|
if (res < 0) {
|
||||||
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
||||||
|
"%s: JVM Detach failed.", __FUNCTION__);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
void Push() {
|
||||||
|
if (!mJNIEnv)
|
||||||
|
return;
|
||||||
|
|
||||||
|
// Make sure there is enough space to store a local ref to the
|
||||||
|
// exception. I am not completely sure this is needed, but does
|
||||||
|
// not hurt.
|
||||||
|
jint ret = mJNIEnv->PushLocalFrame(mEntries + 1);
|
||||||
|
assert(ret == 0);
|
||||||
|
if (ret < 0)
|
||||||
|
CheckForException();
|
||||||
|
else
|
||||||
|
mHasFrameBeenPushed = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
JNIEnv* InitJNIEnv()
|
||||||
|
{
|
||||||
|
JNIEnv* env = nullptr;
|
||||||
|
|
||||||
|
// Get the JNI env for this thread.
|
||||||
|
if (!VideoCaptureAndroid::g_jvm) {
|
||||||
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
||||||
|
"%s: SetAndroidObjects not called with a valid JVM.",
|
||||||
|
__FUNCTION__);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
jint res = VideoCaptureAndroid::g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4);
|
||||||
|
if (res == JNI_EDETACHED) {
|
||||||
|
// Try to attach this thread to the JVM and get the env.
|
||||||
|
res = VideoCaptureAndroid::g_jvm->AttachCurrentThread(&env, NULL);
|
||||||
|
if ((res < 0) || !env) {
|
||||||
|
// Attaching failed, error out.
|
||||||
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
|
||||||
|
"%s: Could not attach thread to JVM (%d, %p)",
|
||||||
|
__FUNCTION__, res, env);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
mAttached = true;
|
||||||
|
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
||||||
|
"%s: attach success", __FUNCTION__);
|
||||||
|
} else if (res == JNI_OK) {
|
||||||
|
// Already attached, GetEnv succeeded.
|
||||||
|
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
|
||||||
|
"%s: did not attach because JVM Env already present",
|
||||||
|
__FUNCTION__);
|
||||||
|
} else {
|
||||||
|
// Non-recoverable error in GetEnv.
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
return env;
|
||||||
|
}
|
||||||
|
|
||||||
|
int mEntries;
|
||||||
|
JNIEnv* mJNIEnv;
|
||||||
|
bool mHasFrameBeenPushed;
|
||||||
|
bool mAttached;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace videocapturemodule
|
} // namespace videocapturemodule
|
||||||
|
@ -55,7 +55,7 @@ int32_t DeviceInfoImpl::NumberOfCapabilities(
|
|||||||
if (_lastUsedDeviceNameLength == strlen((char*) deviceUniqueIdUTF8))
|
if (_lastUsedDeviceNameLength == strlen((char*) deviceUniqueIdUTF8))
|
||||||
{
|
{
|
||||||
// Is it the same device that is asked for again.
|
// Is it the same device that is asked for again.
|
||||||
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
|
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
if(strncasecmp((char*)_lastUsedDeviceName,
|
if(strncasecmp((char*)_lastUsedDeviceName,
|
||||||
(char*) deviceUniqueIdUTF8,
|
(char*) deviceUniqueIdUTF8,
|
||||||
_lastUsedDeviceNameLength)==0)
|
_lastUsedDeviceNameLength)==0)
|
||||||
@ -92,7 +92,7 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
|
|||||||
ReadLockScoped cs(_apiLock);
|
ReadLockScoped cs(_apiLock);
|
||||||
|
|
||||||
if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
|
if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
|
||||||
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
|
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
|| (strncasecmp((char*)_lastUsedDeviceName,
|
|| (strncasecmp((char*)_lastUsedDeviceName,
|
||||||
(char*) deviceUniqueIdUTF8,
|
(char*) deviceUniqueIdUTF8,
|
||||||
_lastUsedDeviceNameLength)!=0))
|
_lastUsedDeviceNameLength)!=0))
|
||||||
@ -156,7 +156,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
|
|||||||
|
|
||||||
ReadLockScoped cs(_apiLock);
|
ReadLockScoped cs(_apiLock);
|
||||||
if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
|
if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
|
||||||
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
|
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||||
|| (strncasecmp((char*)_lastUsedDeviceName,
|
|| (strncasecmp((char*)_lastUsedDeviceName,
|
||||||
(char*) deviceUniqueIdUTF8,
|
(char*) deviceUniqueIdUTF8,
|
||||||
_lastUsedDeviceNameLength)!=0))
|
_lastUsedDeviceNameLength)!=0))
|
||||||
|
@ -93,9 +93,10 @@ int32_t DeviceInfoLinux::GetDeviceName(
|
|||||||
char device[20];
|
char device[20];
|
||||||
int fd = -1;
|
int fd = -1;
|
||||||
bool found = false;
|
bool found = false;
|
||||||
for (int n = 0; n < 64; n++)
|
int device_index;
|
||||||
|
for (device_index = 0; device_index < 64; device_index++)
|
||||||
{
|
{
|
||||||
sprintf(device, "/dev/video%d", n);
|
sprintf(device, "/dev/video%d", device_index);
|
||||||
if ((fd = open(device, O_RDONLY)) != -1)
|
if ((fd = open(device, O_RDONLY)) != -1)
|
||||||
{
|
{
|
||||||
if (count == deviceNumber) {
|
if (count == deviceNumber) {
|
||||||
@ -154,6 +155,15 @@ int32_t DeviceInfoLinux::GetDeviceName(
|
|||||||
"buffer passed is too small");
|
"buffer passed is too small");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// if there's no bus info to use for uniqueId, invent one - and it has to be repeatable
|
||||||
|
if (snprintf(deviceUniqueIdUTF8, deviceUniqueIdUTF8Length, "fake_%u", device_index) >=
|
||||||
|
deviceUniqueIdUTF8Length)
|
||||||
|
{
|
||||||
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
|
"buffer passed is too small");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
@ -165,6 +175,7 @@ int32_t DeviceInfoLinux::CreateCapabilityMap(
|
|||||||
int fd;
|
int fd;
|
||||||
char device[32];
|
char device[32];
|
||||||
bool found = false;
|
bool found = false;
|
||||||
|
int device_index;
|
||||||
|
|
||||||
const int32_t deviceUniqueIdUTF8Length =
|
const int32_t deviceUniqueIdUTF8Length =
|
||||||
(int32_t) strlen((char*) deviceUniqueIdUTF8);
|
(int32_t) strlen((char*) deviceUniqueIdUTF8);
|
||||||
@ -176,6 +187,15 @@ int32_t DeviceInfoLinux::CreateCapabilityMap(
|
|||||||
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
|
||||||
"CreateCapabilityMap called for device %s", deviceUniqueIdUTF8);
|
"CreateCapabilityMap called for device %s", deviceUniqueIdUTF8);
|
||||||
|
|
||||||
|
/* detect /dev/video [0-63] entries */
|
||||||
|
if (sscanf(deviceUniqueIdUTF8,"fake_%d",&device_index) == 1)
|
||||||
|
{
|
||||||
|
sprintf(device, "/dev/video%d", device_index);
|
||||||
|
fd = open(device, O_RDONLY);
|
||||||
|
if (fd != -1) {
|
||||||
|
found = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
/* detect /dev/video [0-63] entries */
|
/* detect /dev/video [0-63] entries */
|
||||||
for (int n = 0; n < 64; ++n)
|
for (int n = 0; n < 64; ++n)
|
||||||
{
|
{
|
||||||
@ -198,19 +218,11 @@ int32_t DeviceInfoLinux::CreateCapabilityMap(
|
|||||||
break; // fd matches with device unique id supplied
|
break; // fd matches with device unique id supplied
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else //match for device name
|
// else can't be a match as the test for fake_* above would have matched it
|
||||||
{
|
|
||||||
if (IsDeviceNameMatches((const char*) cap.card,
|
|
||||||
(const char*) deviceUniqueIdUTF8))
|
|
||||||
{
|
|
||||||
found = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
close(fd); // close since this is not the matching device
|
close(fd); // close since this is not the matching device
|
||||||
}
|
}
|
||||||
|
}
|
||||||
if (!found)
|
if (!found)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
|
||||||
|
@ -10,7 +10,6 @@
|
|||||||
|
|
||||||
#include <errno.h>
|
#include <errno.h>
|
||||||
#include <fcntl.h>
|
#include <fcntl.h>
|
||||||
#include <linux/videodev2.h>
|
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <sys/ioctl.h>
|
#include <sys/ioctl.h>
|
||||||
@ -18,7 +17,15 @@
|
|||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
|
|
||||||
#include <iostream>
|
//v4l includes
|
||||||
|
#if defined(__DragonFly__) || defined(__NetBSD__) || defined(__OpenBSD__)
|
||||||
|
#include <sys/videoio.h>
|
||||||
|
#elif defined(__sun)
|
||||||
|
#include <sys/videodev2.h>
|
||||||
|
#else
|
||||||
|
#include <linux/videodev2.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
#include <new>
|
#include <new>
|
||||||
|
|
||||||
#include "webrtc/modules/video_capture/linux/video_capture_linux.h"
|
#include "webrtc/modules/video_capture/linux/video_capture_linux.h"
|
||||||
@ -71,6 +78,13 @@ int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8)
|
|||||||
memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
|
memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int device_index;
|
||||||
|
if (sscanf(deviceUniqueIdUTF8,"fake_%d", &device_index) == 1)
|
||||||
|
{
|
||||||
|
_deviceId = device_index;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
int fd;
|
int fd;
|
||||||
char device[32];
|
char device[32];
|
||||||
bool found = false;
|
bool found = false;
|
||||||
|
@ -15,6 +15,20 @@
|
|||||||
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
||||||
#include "webrtc/system_wrappers/interface/trace.h"
|
#include "webrtc/system_wrappers/interface/trace.h"
|
||||||
|
|
||||||
|
class nsAutoreleasePool {
|
||||||
|
public:
|
||||||
|
nsAutoreleasePool()
|
||||||
|
{
|
||||||
|
mLocalPool = [[NSAutoreleasePool alloc] init];
|
||||||
|
}
|
||||||
|
~nsAutoreleasePool()
|
||||||
|
{
|
||||||
|
[mLocalPool release];
|
||||||
|
}
|
||||||
|
private:
|
||||||
|
NSAutoreleasePool *mLocalPool;
|
||||||
|
};
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -41,6 +55,7 @@ VideoCaptureMacQTKit::VideoCaptureMacQTKit(const int32_t id) :
|
|||||||
VideoCaptureMacQTKit::~VideoCaptureMacQTKit()
|
VideoCaptureMacQTKit::~VideoCaptureMacQTKit()
|
||||||
{
|
{
|
||||||
|
|
||||||
|
nsAutoreleasePool localPool;
|
||||||
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
|
||||||
"~VideoCaptureMacQTKit() called");
|
"~VideoCaptureMacQTKit() called");
|
||||||
if(_captureDevice)
|
if(_captureDevice)
|
||||||
@ -71,6 +86,8 @@ int32_t VideoCaptureMacQTKit::Init(
|
|||||||
_deviceUniqueId = new char[nameLength+1];
|
_deviceUniqueId = new char[nameLength+1];
|
||||||
memcpy(_deviceUniqueId, iDeviceUniqueIdUTF8,nameLength+1);
|
memcpy(_deviceUniqueId, iDeviceUniqueIdUTF8,nameLength+1);
|
||||||
|
|
||||||
|
nsAutoreleasePool localPool;
|
||||||
|
|
||||||
_captureDevice = [[VideoCaptureMacQTKitObjC alloc] init];
|
_captureDevice = [[VideoCaptureMacQTKitObjC alloc] init];
|
||||||
if(NULL == _captureDevice)
|
if(NULL == _captureDevice)
|
||||||
{
|
{
|
||||||
@ -164,6 +181,7 @@ int32_t VideoCaptureMacQTKit::StartCapture(
|
|||||||
const VideoCaptureCapability& capability)
|
const VideoCaptureCapability& capability)
|
||||||
{
|
{
|
||||||
|
|
||||||
|
nsAutoreleasePool localPool;
|
||||||
_captureWidth = capability.width;
|
_captureWidth = capability.width;
|
||||||
_captureHeight = capability.height;
|
_captureHeight = capability.height;
|
||||||
_captureFrameRate = capability.maxFPS;
|
_captureFrameRate = capability.maxFPS;
|
||||||
@ -180,6 +198,7 @@ int32_t VideoCaptureMacQTKit::StartCapture(
|
|||||||
|
|
||||||
int32_t VideoCaptureMacQTKit::StopCapture()
|
int32_t VideoCaptureMacQTKit::StopCapture()
|
||||||
{
|
{
|
||||||
|
nsAutoreleasePool localPool;
|
||||||
[_captureDevice stopCapture];
|
[_captureDevice stopCapture];
|
||||||
_isCapturing = false;
|
_isCapturing = false;
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -13,6 +13,20 @@
|
|||||||
#include "webrtc/modules/video_capture/video_capture_config.h"
|
#include "webrtc/modules/video_capture/video_capture_config.h"
|
||||||
#include "webrtc/system_wrappers/interface/trace.h"
|
#include "webrtc/system_wrappers/interface/trace.h"
|
||||||
|
|
||||||
|
class nsAutoreleasePool {
|
||||||
|
public:
|
||||||
|
nsAutoreleasePool()
|
||||||
|
{
|
||||||
|
mLocalPool = [[NSAutoreleasePool alloc] init];
|
||||||
|
}
|
||||||
|
~nsAutoreleasePool()
|
||||||
|
{
|
||||||
|
[mLocalPool release];
|
||||||
|
}
|
||||||
|
private:
|
||||||
|
NSAutoreleasePool *mLocalPool;
|
||||||
|
};
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
namespace videocapturemodule
|
namespace videocapturemodule
|
||||||
@ -21,13 +35,14 @@ namespace videocapturemodule
|
|||||||
VideoCaptureMacQTKitInfo::VideoCaptureMacQTKitInfo(const int32_t id) :
|
VideoCaptureMacQTKitInfo::VideoCaptureMacQTKitInfo(const int32_t id) :
|
||||||
DeviceInfoImpl(id)
|
DeviceInfoImpl(id)
|
||||||
{
|
{
|
||||||
|
nsAutoreleasePool localPool;
|
||||||
_captureInfo = [[VideoCaptureMacQTKitInfoObjC alloc] init];
|
_captureInfo = [[VideoCaptureMacQTKitInfoObjC alloc] init];
|
||||||
}
|
}
|
||||||
|
|
||||||
VideoCaptureMacQTKitInfo::~VideoCaptureMacQTKitInfo()
|
VideoCaptureMacQTKitInfo::~VideoCaptureMacQTKitInfo()
|
||||||
{
|
{
|
||||||
|
nsAutoreleasePool localPool;
|
||||||
[_captureInfo release];
|
[_captureInfo release];
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t VideoCaptureMacQTKitInfo::Init()
|
int32_t VideoCaptureMacQTKitInfo::Init()
|
||||||
@ -39,6 +54,7 @@ int32_t VideoCaptureMacQTKitInfo::Init()
|
|||||||
uint32_t VideoCaptureMacQTKitInfo::NumberOfDevices()
|
uint32_t VideoCaptureMacQTKitInfo::NumberOfDevices()
|
||||||
{
|
{
|
||||||
|
|
||||||
|
nsAutoreleasePool localPool;
|
||||||
uint32_t captureDeviceCount =
|
uint32_t captureDeviceCount =
|
||||||
[[_captureInfo getCaptureDeviceCount]intValue];
|
[[_captureInfo getCaptureDeviceCount]intValue];
|
||||||
return captureDeviceCount;
|
return captureDeviceCount;
|
||||||
@ -51,6 +67,7 @@ int32_t VideoCaptureMacQTKitInfo::GetDeviceName(
|
|||||||
uint32_t deviceUniqueIdUTF8Length, char* productUniqueIdUTF8,
|
uint32_t deviceUniqueIdUTF8Length, char* productUniqueIdUTF8,
|
||||||
uint32_t productUniqueIdUTF8Length)
|
uint32_t productUniqueIdUTF8Length)
|
||||||
{
|
{
|
||||||
|
nsAutoreleasePool localPool;
|
||||||
int errNum = [[_captureInfo getDeviceNamesFromIndex:deviceNumber
|
int errNum = [[_captureInfo getDeviceNamesFromIndex:deviceNumber
|
||||||
DefaultName:deviceNameUTF8 WithLength:deviceNameLength
|
DefaultName:deviceNameUTF8 WithLength:deviceNameLength
|
||||||
AndUniqueID:deviceUniqueIdUTF8
|
AndUniqueID:deviceUniqueIdUTF8
|
||||||
@ -104,6 +121,7 @@ int32_t VideoCaptureMacQTKitInfo::DisplayCaptureSettingsDialogBox(
|
|||||||
uint32_t positionX, uint32_t positionY)
|
uint32_t positionX, uint32_t positionY)
|
||||||
{
|
{
|
||||||
|
|
||||||
|
nsAutoreleasePool localPool;
|
||||||
return [[_captureInfo
|
return [[_captureInfo
|
||||||
displayCaptureSettingsDialogBoxWithDevice:deviceUniqueIdUTF8
|
displayCaptureSettingsDialogBoxWithDevice:deviceUniqueIdUTF8
|
||||||
AndTitle:dialogTitleUTF8
|
AndTitle:dialogTitleUTF8
|
||||||
|
@ -25,7 +25,6 @@
|
|||||||
@interface VideoCaptureMacQTKitInfoObjC : NSObject{
|
@interface VideoCaptureMacQTKitInfoObjC : NSObject{
|
||||||
bool _OSSupportedInfo;
|
bool _OSSupportedInfo;
|
||||||
NSArray* _captureDevicesInfo;
|
NSArray* _captureDevicesInfo;
|
||||||
NSAutoreleasePool* _poolInfo;
|
|
||||||
int _captureDeviceCountInfo;
|
int _captureDeviceCountInfo;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -93,8 +93,12 @@ using namespace webrtc;
|
|||||||
return [NSNumber numberWithInt:-1];
|
return [NSNumber numberWithInt:-1];
|
||||||
}
|
}
|
||||||
|
|
||||||
QTCaptureDevice* tempCaptureDevice =
|
if ([_captureDevicesInfo count] <= index)
|
||||||
(QTCaptureDevice*)[_captureDevicesInfo objectAtIndex:index];
|
{
|
||||||
|
return [NSNumber numberWithInt:-1];
|
||||||
|
}
|
||||||
|
|
||||||
|
QTCaptureDevice* tempCaptureDevice = (QTCaptureDevice*)[_captureDevicesInfo objectAtIndex:index];
|
||||||
if(!tempCaptureDevice)
|
if(!tempCaptureDevice)
|
||||||
{
|
{
|
||||||
return [NSNumber numberWithInt:-1];
|
return [NSNumber numberWithInt:-1];
|
||||||
@ -137,7 +141,6 @@ using namespace webrtc;
|
|||||||
return [NSNumber numberWithInt:0];
|
return [NSNumber numberWithInt:0];
|
||||||
}
|
}
|
||||||
|
|
||||||
_poolInfo = [[NSAutoreleasePool alloc]init];
|
|
||||||
_captureDeviceCountInfo = 0;
|
_captureDeviceCountInfo = 0;
|
||||||
[self getCaptureDevices];
|
[self getCaptureDevices];
|
||||||
|
|
||||||
|
@ -151,7 +151,17 @@ using namespace videocapturemodule;
|
|||||||
if (!_capturing)
|
if (!_capturing)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
[_captureSession stopRunning];
|
// This method is often called on a secondary thread. Which means
|
||||||
|
// that the following can sometimes run "too early", causing crashes
|
||||||
|
// and/or weird errors concerning initialization. On OS X 10.7 and
|
||||||
|
// 10.8, the CoreMediaIO method CMIOUninitializeGraph() is called from
|
||||||
|
// -[QTCaptureSession stopRunning]. If this is called too early,
|
||||||
|
// low-level session data gets uninitialized before low-level code
|
||||||
|
// is finished trying to use it. The solution is to make stopRunning
|
||||||
|
// always run on the main thread. See bug 837539.
|
||||||
|
[_captureSession performSelectorOnMainThread:@selector(stopRunning)
|
||||||
|
withObject:nil
|
||||||
|
waitUntilDone:NO];
|
||||||
_capturing = NO;
|
_capturing = NO;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -27,8 +27,8 @@
|
|||||||
#include <QuickTime/video_capture_quick_time.h>
|
#include <QuickTime/video_capture_quick_time.h>
|
||||||
#include <QuickTime/video_capture_quick_time_info.h>
|
#include <QuickTime/video_capture_quick_time_info.h>
|
||||||
#else
|
#else
|
||||||
#include <QTKit/video_capture_qtkit.h>
|
#include "qtkit/video_capture_qtkit.h"
|
||||||
#include <QTKit/video_capture_qtkit_info.h>
|
#include "qtkit/video_capture_qtkit_info.h"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
|
@ -16,6 +16,11 @@
|
|||||||
'<(webrtc_root)/common_video/common_video.gyp:common_video',
|
'<(webrtc_root)/common_video/common_video.gyp:common_video',
|
||||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||||
],
|
],
|
||||||
|
|
||||||
|
'cflags_mozilla': [
|
||||||
|
'$(NSPR_CFLAGS)',
|
||||||
|
],
|
||||||
|
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'include',
|
'include',
|
||||||
'../interface',
|
'../interface',
|
||||||
@ -41,7 +46,7 @@
|
|||||||
],
|
],
|
||||||
}, { # include_internal_video_capture == 1
|
}, { # include_internal_video_capture == 1
|
||||||
'conditions': [
|
'conditions': [
|
||||||
['OS=="linux"', {
|
['include_v4l2_video_capture==1', {
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'linux',
|
'linux',
|
||||||
],
|
],
|
||||||
@ -77,9 +82,13 @@
|
|||||||
},
|
},
|
||||||
}], # mac
|
}], # mac
|
||||||
['OS=="win"', {
|
['OS=="win"', {
|
||||||
|
'conditions': [
|
||||||
|
['build_with_mozilla==0', {
|
||||||
'dependencies': [
|
'dependencies': [
|
||||||
'<(DEPTH)/third_party/winsdk_samples/winsdk_samples.gyp:directshow_baseclasses',
|
'<(DEPTH)/third_party/winsdk_samples/winsdk_samples.gyp:directshow_baseclasses',
|
||||||
],
|
],
|
||||||
|
}],
|
||||||
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'windows',
|
'windows',
|
||||||
],
|
],
|
||||||
@ -97,6 +106,10 @@
|
|||||||
'windows/video_capture_factory_windows.cc',
|
'windows/video_capture_factory_windows.cc',
|
||||||
'windows/video_capture_mf.cc',
|
'windows/video_capture_mf.cc',
|
||||||
'windows/video_capture_mf.h',
|
'windows/video_capture_mf.h',
|
||||||
|
'windows/BasePin.cpp',
|
||||||
|
'windows/BaseFilter.cpp',
|
||||||
|
'windows/BaseInputPin.cpp',
|
||||||
|
'windows/MediaType.cpp',
|
||||||
],
|
],
|
||||||
'link_settings': {
|
'link_settings': {
|
||||||
'libraries': [
|
'libraries': [
|
||||||
@ -162,7 +175,7 @@
|
|||||||
'test/video_capture_main_mac.mm',
|
'test/video_capture_main_mac.mm',
|
||||||
],
|
],
|
||||||
'conditions': [
|
'conditions': [
|
||||||
['OS=="mac" or OS=="linux"', {
|
['OS!="win" and OS!="android"', {
|
||||||
'cflags': [
|
'cflags': [
|
||||||
'-Wno-write-strings',
|
'-Wno-write-strings',
|
||||||
],
|
],
|
||||||
@ -170,11 +183,15 @@
|
|||||||
'-lpthread -lm',
|
'-lpthread -lm',
|
||||||
],
|
],
|
||||||
}],
|
}],
|
||||||
|
['include_v4l2_video_capture==1', {
|
||||||
|
'libraries': [
|
||||||
|
'-lXext',
|
||||||
|
'-lX11',
|
||||||
|
],
|
||||||
|
}],
|
||||||
['OS=="linux"', {
|
['OS=="linux"', {
|
||||||
'libraries': [
|
'libraries': [
|
||||||
'-lrt',
|
'-lrt',
|
||||||
'-lXext',
|
|
||||||
'-lX11',
|
|
||||||
],
|
],
|
||||||
}],
|
}],
|
||||||
['OS=="mac"', {
|
['OS=="mac"', {
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
#include "webrtc/system_wrappers/interface/trace.h"
|
#include "webrtc/system_wrappers/interface/trace.h"
|
||||||
|
|
||||||
#include <Dvdmedia.h>
|
#include <Dvdmedia.h>
|
||||||
#include <Streams.h>
|
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
@ -42,6 +41,23 @@ const DelayValues WindowsCaptureDelays[NoWindowsCaptureDelays] = {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
void _FreeMediaType(AM_MEDIA_TYPE& mt)
|
||||||
|
{
|
||||||
|
if (mt.cbFormat != 0)
|
||||||
|
{
|
||||||
|
CoTaskMemFree((PVOID)mt.pbFormat);
|
||||||
|
mt.cbFormat = 0;
|
||||||
|
mt.pbFormat = NULL;
|
||||||
|
}
|
||||||
|
if (mt.pUnk != NULL)
|
||||||
|
{
|
||||||
|
// pUnk should not be used.
|
||||||
|
mt.pUnk->Release();
|
||||||
|
mt.pUnk = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
DeviceInfoDS* DeviceInfoDS::Create(const int32_t id)
|
DeviceInfoDS* DeviceInfoDS::Create(const int32_t id)
|
||||||
{
|
{
|
||||||
@ -565,7 +581,7 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
|||||||
|
|
||||||
if (hrVC == S_OK)
|
if (hrVC == S_OK)
|
||||||
{
|
{
|
||||||
LONGLONG *frameDurationList;
|
LONGLONG *frameDurationList = NULL;
|
||||||
LONGLONG maxFPS;
|
LONGLONG maxFPS;
|
||||||
long listSize;
|
long listSize;
|
||||||
SIZE size;
|
SIZE size;
|
||||||
@ -584,7 +600,9 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
|||||||
|
|
||||||
// On some odd cameras, you may get a 0 for duration.
|
// On some odd cameras, you may get a 0 for duration.
|
||||||
// GetMaxOfFrameArray returns the lowest duration (highest FPS)
|
// GetMaxOfFrameArray returns the lowest duration (highest FPS)
|
||||||
if (hrVC == S_OK && listSize > 0 &&
|
// Initialize and check the returned list for null since
|
||||||
|
// some broken drivers don't modify it.
|
||||||
|
if (hrVC == S_OK && listSize > 0 && frameDurationList &&
|
||||||
0 != (maxFPS = GetMaxOfFrameArray(frameDurationList,
|
0 != (maxFPS = GetMaxOfFrameArray(frameDurationList,
|
||||||
listSize)))
|
listSize)))
|
||||||
{
|
{
|
||||||
@ -679,7 +697,7 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
|||||||
capability->width, capability->height,
|
capability->width, capability->height,
|
||||||
capability->rawType, capability->maxFPS);
|
capability->rawType, capability->maxFPS);
|
||||||
}
|
}
|
||||||
DeleteMediaType(pmt);
|
_FreeMediaType(*pmt);
|
||||||
pmt = NULL;
|
pmt = NULL;
|
||||||
}
|
}
|
||||||
RELEASE_AND_CLEAR(streamConfig);
|
RELEASE_AND_CLEAR(streamConfig);
|
||||||
|
@ -21,6 +21,9 @@
|
|||||||
DEFINE_GUID(CLSID_SINKFILTER, 0x88cdbbdc, 0xa73b, 0x4afa, 0xac, 0xbf, 0x15, 0xd5,
|
DEFINE_GUID(CLSID_SINKFILTER, 0x88cdbbdc, 0xa73b, 0x4afa, 0xac, 0xbf, 0x15, 0xd5,
|
||||||
0xe2, 0xce, 0x12, 0xc3);
|
0xe2, 0xce, 0x12, 0xc3);
|
||||||
|
|
||||||
|
using namespace mozilla::media;
|
||||||
|
using namespace mozilla;
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
namespace videocapturemodule
|
namespace videocapturemodule
|
||||||
@ -37,10 +40,10 @@ typedef struct tagTHREADNAME_INFO
|
|||||||
CaptureInputPin::CaptureInputPin (int32_t moduleId,
|
CaptureInputPin::CaptureInputPin (int32_t moduleId,
|
||||||
IN TCHAR * szName,
|
IN TCHAR * szName,
|
||||||
IN CaptureSinkFilter* pFilter,
|
IN CaptureSinkFilter* pFilter,
|
||||||
IN CCritSec * pLock,
|
IN CriticalSection * pLock,
|
||||||
OUT HRESULT * pHr,
|
OUT HRESULT * pHr,
|
||||||
IN LPCWSTR pszName)
|
IN LPCWSTR pszName)
|
||||||
: CBaseInputPin (szName, pFilter, pLock, pHr, pszName),
|
: BaseInputPin (szName, pFilter, pLock, pHr, pszName),
|
||||||
_requestedCapability(),
|
_requestedCapability(),
|
||||||
_resultingCapability()
|
_resultingCapability()
|
||||||
{
|
{
|
||||||
@ -53,7 +56,7 @@ CaptureInputPin::~CaptureInputPin()
|
|||||||
}
|
}
|
||||||
|
|
||||||
HRESULT
|
HRESULT
|
||||||
CaptureInputPin::GetMediaType (IN int iPosition, OUT CMediaType * pmt)
|
CaptureInputPin::GetMediaType (IN int iPosition, OUT MediaType * pmt)
|
||||||
{
|
{
|
||||||
// reset the thread handle
|
// reset the thread handle
|
||||||
_threadHandle = NULL;
|
_threadHandle = NULL;
|
||||||
@ -161,7 +164,7 @@ CaptureInputPin::GetMediaType (IN int iPosition, OUT CMediaType * pmt)
|
|||||||
}
|
}
|
||||||
|
|
||||||
HRESULT
|
HRESULT
|
||||||
CaptureInputPin::CheckMediaType ( IN const CMediaType * pMediaType)
|
CaptureInputPin::CheckMediaType ( IN const MediaType * pMediaType)
|
||||||
{
|
{
|
||||||
// reset the thread handle
|
// reset the thread handle
|
||||||
_threadHandle = NULL;
|
_threadHandle = NULL;
|
||||||
@ -319,8 +322,8 @@ CaptureInputPin::Receive ( IN IMediaSample * pIMediaSample )
|
|||||||
{
|
{
|
||||||
HRESULT hr = S_OK;
|
HRESULT hr = S_OK;
|
||||||
|
|
||||||
ASSERT (m_pFilter);
|
assert (mFilter);
|
||||||
ASSERT (pIMediaSample);
|
assert (pIMediaSample);
|
||||||
|
|
||||||
// get the thread handle of the delivering thread inc its priority
|
// get the thread handle of the delivering thread inc its priority
|
||||||
if( _threadHandle == NULL)
|
if( _threadHandle == NULL)
|
||||||
@ -348,8 +351,8 @@ CaptureInputPin::Receive ( IN IMediaSample * pIMediaSample )
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->LockReceive();
|
reinterpret_cast <CaptureSinkFilter *>(mFilter)->LockReceive();
|
||||||
hr = CBaseInputPin::Receive (pIMediaSample);
|
hr = BaseInputPin::Receive (pIMediaSample);
|
||||||
|
|
||||||
if (SUCCEEDED (hr))
|
if (SUCCEEDED (hr))
|
||||||
{
|
{
|
||||||
@ -358,17 +361,17 @@ CaptureInputPin::Receive ( IN IMediaSample * pIMediaSample )
|
|||||||
unsigned char* pBuffer = NULL;
|
unsigned char* pBuffer = NULL;
|
||||||
if(S_OK != pIMediaSample->GetPointer(&pBuffer))
|
if(S_OK != pIMediaSample->GetPointer(&pBuffer))
|
||||||
{
|
{
|
||||||
reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
|
reinterpret_cast <CaptureSinkFilter *>(mFilter)->UnlockReceive();
|
||||||
return S_FALSE;
|
return S_FALSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: filter unlocked within Send call
|
// NOTE: filter unlocked within Send call
|
||||||
reinterpret_cast <CaptureSinkFilter *> (m_pFilter)->ProcessCapturedFrame(
|
reinterpret_cast <CaptureSinkFilter *> (mFilter)->ProcessCapturedFrame(
|
||||||
pBuffer,length,_resultingCapability);
|
pBuffer,length,_resultingCapability);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
|
reinterpret_cast <CaptureSinkFilter *>(mFilter)->UnlockReceive();
|
||||||
}
|
}
|
||||||
|
|
||||||
return hr;
|
return hr;
|
||||||
@ -389,13 +392,15 @@ CaptureSinkFilter::CaptureSinkFilter (IN TCHAR * tszName,
|
|||||||
OUT HRESULT * phr,
|
OUT HRESULT * phr,
|
||||||
VideoCaptureExternal& captureObserver,
|
VideoCaptureExternal& captureObserver,
|
||||||
int32_t moduleId)
|
int32_t moduleId)
|
||||||
: CBaseFilter(tszName,punk,& m_crtFilter,CLSID_SINKFILTER),
|
: BaseFilter(tszName, CLSID_SINKFILTER),
|
||||||
|
m_crtFilter("CaptureSinkFilter::m_crtFilter"),
|
||||||
|
m_crtRecv("CaptureSinkFilter::m_crtRecv"),
|
||||||
m_pInput(NULL),
|
m_pInput(NULL),
|
||||||
_captureObserver(captureObserver),
|
_captureObserver(captureObserver),
|
||||||
_moduleId(moduleId)
|
_moduleId(moduleId)
|
||||||
{
|
{
|
||||||
(* phr) = S_OK;
|
(* phr) = S_OK;
|
||||||
m_pInput = new CaptureInputPin(moduleId,NAME ("VideoCaptureInputPin"),
|
m_pInput = new CaptureInputPin(moduleId, L"VideoCaptureInputPin",
|
||||||
this,
|
this,
|
||||||
& m_crtFilter,
|
& m_crtFilter,
|
||||||
phr, L"VideoCapture");
|
phr, L"VideoCapture");
|
||||||
@ -418,10 +423,10 @@ int CaptureSinkFilter::GetPinCount()
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
CBasePin *
|
BasePin *
|
||||||
CaptureSinkFilter::GetPin(IN int Index)
|
CaptureSinkFilter::GetPin(IN int Index)
|
||||||
{
|
{
|
||||||
CBasePin * pPin;
|
BasePin * pPin;
|
||||||
LockFilter ();
|
LockFilter ();
|
||||||
if (Index == 0)
|
if (Index == 0)
|
||||||
{
|
{
|
||||||
@ -438,22 +443,22 @@ CaptureSinkFilter::GetPin(IN int Index)
|
|||||||
STDMETHODIMP CaptureSinkFilter::Pause()
|
STDMETHODIMP CaptureSinkFilter::Pause()
|
||||||
{
|
{
|
||||||
LockFilter();
|
LockFilter();
|
||||||
if (m_State == State_Stopped)
|
if (mState == State_Stopped)
|
||||||
{
|
{
|
||||||
// change the state, THEN activate the input pin
|
// change the state, THEN activate the input pin
|
||||||
m_State = State_Paused;
|
mState = State_Paused;
|
||||||
if (m_pInput && m_pInput->IsConnected())
|
if (m_pInput && m_pInput->IsConnected())
|
||||||
{
|
{
|
||||||
m_pInput->Active();
|
m_pInput->Active();
|
||||||
}
|
}
|
||||||
if (m_pInput && !m_pInput->IsConnected())
|
if (m_pInput && !m_pInput->IsConnected())
|
||||||
{
|
{
|
||||||
m_State = State_Running;
|
mState = State_Running;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (m_State == State_Running)
|
else if (mState == State_Running)
|
||||||
{
|
{
|
||||||
m_State = State_Paused;
|
mState = State_Paused;
|
||||||
}
|
}
|
||||||
UnlockFilter();
|
UnlockFilter();
|
||||||
return S_OK;
|
return S_OK;
|
||||||
@ -465,7 +470,7 @@ STDMETHODIMP CaptureSinkFilter::Stop()
|
|||||||
LockFilter();
|
LockFilter();
|
||||||
|
|
||||||
// set the state
|
// set the state
|
||||||
m_State = State_Stopped;
|
mState = State_Stopped;
|
||||||
|
|
||||||
// inactivate the pins
|
// inactivate the pins
|
||||||
if (m_pInput)
|
if (m_pInput)
|
||||||
@ -479,7 +484,7 @@ STDMETHODIMP CaptureSinkFilter::Stop()
|
|||||||
void CaptureSinkFilter::SetFilterGraph(IGraphBuilder* graph)
|
void CaptureSinkFilter::SetFilterGraph(IGraphBuilder* graph)
|
||||||
{
|
{
|
||||||
LockFilter();
|
LockFilter();
|
||||||
m_pGraph = graph;
|
mGraph = graph;
|
||||||
UnlockFilter();
|
UnlockFilter();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -488,7 +493,7 @@ void CaptureSinkFilter::ProcessCapturedFrame(unsigned char* pBuffer,
|
|||||||
const VideoCaptureCapability& frameInfo)
|
const VideoCaptureCapability& frameInfo)
|
||||||
{
|
{
|
||||||
// we have the receiver lock
|
// we have the receiver lock
|
||||||
if (m_State == State_Running)
|
if (mState == State_Running)
|
||||||
{
|
{
|
||||||
_captureObserver.IncomingFrame(pBuffer, length, frameInfo);
|
_captureObserver.IncomingFrame(pBuffer, length, frameInfo);
|
||||||
|
|
||||||
|
@ -11,9 +11,10 @@
|
|||||||
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
|
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
|
||||||
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
|
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
|
||||||
|
|
||||||
#include <Streams.h> // Include base DS filter header files
|
|
||||||
|
|
||||||
#include "webrtc/modules/video_capture/include/video_capture_defines.h"
|
#include "webrtc/modules/video_capture/include/video_capture_defines.h"
|
||||||
|
#include "BaseInputPin.h"
|
||||||
|
#include "BaseFilter.h"
|
||||||
|
#include "MediaType.h"
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
@ -26,7 +27,7 @@ class CaptureSinkFilter;
|
|||||||
* input pin for camera input
|
* input pin for camera input
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
class CaptureInputPin: public CBaseInputPin
|
class CaptureInputPin: public mozilla::media::BaseInputPin
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
int32_t _moduleId;
|
int32_t _moduleId;
|
||||||
@ -38,18 +39,18 @@ public:
|
|||||||
CaptureInputPin(int32_t moduleId,
|
CaptureInputPin(int32_t moduleId,
|
||||||
IN TCHAR* szName,
|
IN TCHAR* szName,
|
||||||
IN CaptureSinkFilter* pFilter,
|
IN CaptureSinkFilter* pFilter,
|
||||||
IN CCritSec * pLock,
|
IN mozilla::CriticalSection * pLock,
|
||||||
OUT HRESULT * pHr,
|
OUT HRESULT * pHr,
|
||||||
IN LPCWSTR pszName);
|
IN LPCWSTR pszName);
|
||||||
virtual ~CaptureInputPin();
|
virtual ~CaptureInputPin();
|
||||||
|
|
||||||
HRESULT GetMediaType (IN int iPos, OUT CMediaType * pmt);
|
HRESULT GetMediaType (IN int iPos, OUT mozilla::media::MediaType * pmt);
|
||||||
HRESULT CheckMediaType (IN const CMediaType * pmt);
|
HRESULT CheckMediaType (IN const mozilla::media::MediaType * pmt);
|
||||||
STDMETHODIMP Receive (IN IMediaSample *);
|
STDMETHODIMP Receive (IN IMediaSample *);
|
||||||
HRESULT SetMatchingMediaType(const VideoCaptureCapability& capability);
|
HRESULT SetMatchingMediaType(const VideoCaptureCapability& capability);
|
||||||
};
|
};
|
||||||
|
|
||||||
class CaptureSinkFilter: public CBaseFilter
|
class CaptureSinkFilter: public mozilla::media::BaseFilter
|
||||||
{
|
{
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@ -66,34 +67,55 @@ public:
|
|||||||
void ProcessCapturedFrame(unsigned char* pBuffer, int32_t length,
|
void ProcessCapturedFrame(unsigned char* pBuffer, int32_t length,
|
||||||
const VideoCaptureCapability& frameInfo);
|
const VideoCaptureCapability& frameInfo);
|
||||||
// explicit receiver lock aquisition and release
|
// explicit receiver lock aquisition and release
|
||||||
void LockReceive() { m_crtRecv.Lock();}
|
void LockReceive() { m_crtRecv.Enter();}
|
||||||
void UnlockReceive() {m_crtRecv.Unlock();}
|
void UnlockReceive() {m_crtRecv.Leave();}
|
||||||
|
|
||||||
// explicit filter lock aquisition and release
|
// explicit filter lock aquisition and release
|
||||||
void LockFilter() {m_crtFilter.Lock();}
|
void LockFilter() {m_crtFilter.Enter();}
|
||||||
void UnlockFilter() { m_crtFilter.Unlock(); }
|
void UnlockFilter() { m_crtFilter.Leave(); }
|
||||||
void SetFilterGraph(IGraphBuilder* graph); // Used if EVR
|
void SetFilterGraph(IGraphBuilder* graph); // Used if EVR
|
||||||
|
|
||||||
// --------------------------------------------------------------------
|
// --------------------------------------------------------------------
|
||||||
// COM interfaces
|
// COM interfaces
|
||||||
DECLARE_IUNKNOWN ;
|
STDMETHODIMP QueryInterface(REFIID aIId, void **aInterface)
|
||||||
|
{
|
||||||
|
return mozilla::media::BaseFilter::QueryInterface(aIId, aInterface);
|
||||||
|
}
|
||||||
|
STDMETHODIMP_(ULONG) AddRef()
|
||||||
|
{
|
||||||
|
return ::InterlockedIncrement(&mRefCnt);
|
||||||
|
}
|
||||||
|
|
||||||
|
STDMETHODIMP_(ULONG) Release()
|
||||||
|
{
|
||||||
|
unsigned long newRefCnt = ::InterlockedDecrement(&mRefCnt);
|
||||||
|
|
||||||
|
if (!newRefCnt) {
|
||||||
|
delete this;
|
||||||
|
}
|
||||||
|
|
||||||
|
return newRefCnt;
|
||||||
|
}
|
||||||
|
|
||||||
STDMETHODIMP SetMatchingMediaType(const VideoCaptureCapability& capability);
|
STDMETHODIMP SetMatchingMediaType(const VideoCaptureCapability& capability);
|
||||||
|
|
||||||
// --------------------------------------------------------------------
|
// --------------------------------------------------------------------
|
||||||
// CBaseFilter methods
|
// CBaseFilter methods
|
||||||
int GetPinCount ();
|
int GetPinCount ();
|
||||||
CBasePin * GetPin ( IN int Index);
|
mozilla::media::BasePin * GetPin ( IN int Index);
|
||||||
STDMETHODIMP Pause ();
|
STDMETHODIMP Pause ();
|
||||||
STDMETHODIMP Stop ();
|
STDMETHODIMP Stop ();
|
||||||
STDMETHODIMP GetClassID ( OUT CLSID * pCLSID);
|
STDMETHODIMP GetClassID ( OUT CLSID * pCLSID);
|
||||||
// --------------------------------------------------------------------
|
// --------------------------------------------------------------------
|
||||||
// class factory calls this
|
// class factory calls this
|
||||||
static CUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr);
|
static IUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr);
|
||||||
private:
|
private:
|
||||||
CCritSec m_crtFilter; // filter lock
|
mozilla::CriticalSection m_crtFilter; // filter lock
|
||||||
CCritSec m_crtRecv; // receiver lock; always acquire before filter lock
|
mozilla::CriticalSection m_crtRecv; // receiver lock; always acquire before filter lock
|
||||||
CaptureInputPin * m_pInput;
|
CaptureInputPin * m_pInput;
|
||||||
VideoCaptureExternal& _captureObserver;
|
VideoCaptureExternal& _captureObserver;
|
||||||
int32_t _moduleId;
|
int32_t _moduleId;
|
||||||
|
unsigned long mRefCnt;
|
||||||
};
|
};
|
||||||
} // namespace videocapturemodule
|
} // namespace videocapturemodule
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -30,6 +30,12 @@
|
|||||||
'dependencies': [
|
'dependencies': [
|
||||||
'<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx',
|
'<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx',
|
||||||
],
|
],
|
||||||
|
},{
|
||||||
|
'link_settings': {
|
||||||
|
'libraries': [
|
||||||
|
'$(LIBVPX_OBJ)/libvpx.a',
|
||||||
|
],
|
||||||
|
},
|
||||||
}],
|
}],
|
||||||
],
|
],
|
||||||
'direct_dependent_settings': {
|
'direct_dependent_settings': {
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user