diff --git a/content/media/webrtc/MediaEngineWebRTC.cpp b/content/media/webrtc/MediaEngineWebRTC.cpp index b537f5856ae..a6e34d5e233 100644 --- a/content/media/webrtc/MediaEngineWebRTC.cpp +++ b/content/media/webrtc/MediaEngineWebRTC.cpp @@ -254,7 +254,7 @@ MediaEngineWebRTC::EnumerateAudioDevices(nsTArrayAttachCurrentThread(&env, nullptr); - if (webrtc::VoiceEngine::SetAndroidObjects(jvm, (void*)context) != 0) { + if (webrtc::VoiceEngine::SetAndroidObjects(jvm, env, (void*)context) != 0) { LOG(("VoiceEngine:SetAndroidObjects Failed")); return; } diff --git a/content/media/webrtc/MediaEngineWebRTC.h b/content/media/webrtc/MediaEngineWebRTC.h index 15d9dc451f5..2230b23879b 100644 --- a/content/media/webrtc/MediaEngineWebRTC.h +++ b/content/media/webrtc/MediaEngineWebRTC.h @@ -45,7 +45,6 @@ #include "webrtc/video_engine/include/vie_codec.h" #include "webrtc/video_engine/include/vie_render.h" #include "webrtc/video_engine/include/vie_capture.h" -#include "webrtc/video_engine/include/vie_file.h" #ifdef MOZ_B2G_CAMERA #include "CameraPreviewMediaStream.h" #include "DOMCameraManager.h" @@ -118,7 +117,14 @@ public: #else // ViEExternalRenderer. virtual int FrameSizeChange(unsigned int, unsigned int, unsigned int); - virtual int DeliverFrame(unsigned char*, int, uint32_t, int64_t); + virtual int DeliverFrame(unsigned char*,int, uint32_t , int64_t, + void *handle); + /** + * Does DeliverFrame() support a null buffer and non-null handle + * (video texture)? + * XXX Investigate! Especially for Android/B2G + */ + virtual bool IsTextureSupported() { return false; } MediaEngineWebRTCVideoSource(webrtc::VideoEngine* aVideoEnginePtr, int aIndex) : mVideoEngine(aVideoEnginePtr) diff --git a/content/media/webrtc/MediaEngineWebRTCVideo.cpp b/content/media/webrtc/MediaEngineWebRTCVideo.cpp index baa91db0ac6..2a34df7a275 100644 --- a/content/media/webrtc/MediaEngineWebRTCVideo.cpp +++ b/content/media/webrtc/MediaEngineWebRTCVideo.cpp @@ -40,7 +40,8 @@ MediaEngineWebRTCVideoSource::FrameSizeChange( // ViEExternalRenderer Callback. Process every incoming frame here. int MediaEngineWebRTCVideoSource::DeliverFrame( - unsigned char* buffer, int size, uint32_t time_stamp, int64_t render_time) + unsigned char* buffer, int size, uint32_t time_stamp, int64_t render_time, + void *handle) { // mInSnapshotMode can only be set before the camera is turned on and // the renderer is started, so this amounts to a 1-shot @@ -399,126 +400,7 @@ MediaEngineWebRTCVideoSource::Stop(SourceMediaStream *aSource, TrackID aID) nsresult MediaEngineWebRTCVideoSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile) { - /** - * To get a Snapshot we do the following: - * - Set a condition variable (mInSnapshotMode) to true - * - Attach the external renderer and start the camera - * - Wait for the condition variable to change to false - * - * Starting the camera has the effect of invoking DeliverFrame() when - * the first frame arrives from the camera. We only need one frame for - * GetCaptureDeviceSnapshot to work, so we immediately set the condition - * variable to false and notify this method. - * - * This causes the current thread to continue (PR_CondWaitVar will return), - * at which point we can grab a snapshot, convert it to a file and - * return from this function after cleaning up the temporary stream object - * and caling Stop() on the media source. - */ -#ifdef MOZ_B2G_CAMERA - ReentrantMonitorAutoEnter sync(mCallbackMonitor); -#endif - *aFile = nullptr; - if (!mInitDone || mState != kAllocated) { - return NS_ERROR_FAILURE; - } -#ifdef MOZ_B2G_CAMERA - mLastCapture = nullptr; - - NS_DispatchToMainThread(WrapRunnable(this, - &MediaEngineWebRTCVideoSource::StartImpl, - mCapability)); - mCallbackMonitor.Wait(); - if (mState != kStarted) { - return NS_ERROR_FAILURE; - } - - NS_DispatchToMainThread(WrapRunnable(this, - &MediaEngineWebRTCVideoSource::SnapshotImpl)); - mCallbackMonitor.Wait(); - if (mLastCapture == nullptr) - return NS_ERROR_FAILURE; - - mState = kStopped; - NS_DispatchToMainThread(WrapRunnable(this, - &MediaEngineWebRTCVideoSource::StopImpl)); - - // The camera return nsDOMMemoryFile indeed, and the inheritance tree is: - // nsIDOMBlob <- nsIDOMFile <- nsDOMFileBase <- nsDOMFile <- nsDOMMemoryFile - *aFile = mLastCapture.get(); - return NS_OK; -#else - { - MonitorAutoLock lock(mMonitor); - mInSnapshotMode = true; - } - - // Start the rendering (equivalent to calling Start(), but without a track). - int error = 0; - if (!mInitDone || mState != kAllocated) { - return NS_ERROR_FAILURE; - } - error = mViERender->AddRenderer(mCaptureIndex, webrtc::kVideoI420, (webrtc::ExternalRenderer*)this); - if (error == -1) { - return NS_ERROR_FAILURE; - } - error = mViERender->StartRender(mCaptureIndex); - if (error == -1) { - return NS_ERROR_FAILURE; - } - - if (mViECapture->StartCapture(mCaptureIndex, mCapability) < 0) { - return NS_ERROR_FAILURE; - } - - // Wait for the condition variable, will be set in DeliverFrame. - // We use a while loop, because even if Wait() returns, it's not - // guaranteed that the condition variable changed. - // FIX: we need need a way to cancel this and to bail if it appears to not be working - // Perhaps a maximum time, though some cameras can take seconds to start. 10 seconds? - { - MonitorAutoLock lock(mMonitor); - while (mInSnapshotMode) { - lock.Wait(); - } - } - - // If we get here, DeliverFrame received at least one frame. - webrtc::ViEFile* vieFile = webrtc::ViEFile::GetInterface(mVideoEngine); - if (!vieFile) { - return NS_ERROR_FAILURE; - } - - // Create a temporary file on the main thread and put the snapshot in it. - // See Run() in MediaEngineWebRTCVideo.h (sets mSnapshotPath). - NS_DispatchToMainThread(this, NS_DISPATCH_SYNC); - - if (!mSnapshotPath) { - return NS_ERROR_FAILURE; - } - - NS_ConvertUTF16toUTF8 path(*mSnapshotPath); - if (vieFile->GetCaptureDeviceSnapshot(mCaptureIndex, path.get()) < 0) { - delete mSnapshotPath; - mSnapshotPath = nullptr; - return NS_ERROR_FAILURE; - } - - // Stop the camera. - mViERender->StopRender(mCaptureIndex); - mViERender->RemoveRenderer(mCaptureIndex); - - nsCOMPtr file; - nsresult rv = NS_NewLocalFile(*mSnapshotPath, false, getter_AddRefs(file)); - - delete mSnapshotPath; - mSnapshotPath = nullptr; - - NS_ENSURE_SUCCESS(rv, rv); - - NS_ADDREF(*aFile = new nsDOMFileFile(file)); -#endif - return NS_OK; + return NS_ERROR_NOT_IMPLEMENTED; } /** diff --git a/layout/media/webrtc/Makefile.in b/layout/media/webrtc/Makefile.in index 5dc84c114bb..489462226be 100644 --- a/layout/media/webrtc/Makefile.in +++ b/layout/media/webrtc/Makefile.in @@ -9,10 +9,12 @@ SHARED_LIBRARY_LIBS = \ $(call EXPAND_LIBNAME_PATH,video_capture_module,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_video_capture_module) \ $(call EXPAND_LIBNAME_PATH,webrtc_utility,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_webrtc_utility) \ $(call EXPAND_LIBNAME_PATH,audio_coding_module,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_audio_coding_module) \ + $(call EXPAND_LIBNAME_PATH,acm2,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_acm2) \ $(call EXPAND_LIBNAME_PATH,CNG,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_CNG) \ $(call EXPAND_LIBNAME_PATH,G711,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_G711) \ $(call EXPAND_LIBNAME_PATH,PCM16B,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_PCM16B) \ $(call EXPAND_LIBNAME_PATH,NetEq,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_NetEq) \ + $(call EXPAND_LIBNAME_PATH,NetEq4,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_NetEq4) \ $(call EXPAND_LIBNAME_PATH,system_wrappers,$(DEPTH)/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_system_wrappers) \ $(call EXPAND_LIBNAME_PATH,webrtc_video_coding,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_webrtc_video_coding) \ $(call EXPAND_LIBNAME_PATH,video_coding_utility,$(DEPTH)/media/webrtc/trunk/webrtc/modules/video_coding/utility/video_coding_utility_video_coding_utility) \ @@ -26,6 +28,7 @@ SHARED_LIBRARY_LIBS = \ $(call EXPAND_LIBNAME_PATH,rtp_rtcp,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_rtp_rtcp) \ $(call EXPAND_LIBNAME_PATH,bitrate_controller,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_bitrate_controller) \ $(call EXPAND_LIBNAME_PATH,remote_bitrate_estimator,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_remote_bitrate_estimator) \ + $(call EXPAND_LIBNAME_PATH,rbe_components,$(DEPTH)/media/webrtc/trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_components_rbe_components) \ $(call EXPAND_LIBNAME_PATH,paced_sender,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_paced_sender) \ $(call EXPAND_LIBNAME_PATH,video_processing,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_video_processing) \ $(call EXPAND_LIBNAME_PATH,audio_conference_mixer,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_audio_conference_mixer) \ diff --git a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp index 4323fdf073e..8689d0c696b 100644 --- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp +++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp @@ -153,8 +153,9 @@ MediaConduitErrorCode WebrtcAudioConduit::Init(WebrtcAudioConduit *other) // get the JVM JavaVM *jvm = jsjni_GetVM(); + JNIEnv* jenv = jsjni_GetJNIForThread(); - if (webrtc::VoiceEngine::SetAndroidObjects(jvm, (void*)context) != 0) { + if (webrtc::VoiceEngine::SetAndroidObjects(jvm, jenv, (void*)context) != 0) { CSFLogError(logTag, "%s Unable to set Android objects", __FUNCTION__); return kMediaConduitSessionNotInited; } diff --git a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp index ff050158b5b..b57a892c5b6 100644 --- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp +++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp @@ -972,7 +972,8 @@ int WebrtcVideoConduit::DeliverFrame(unsigned char* buffer, int buffer_size, uint32_t time_stamp, - int64_t render_time) + int64_t render_time, + void *handle) { CSFLogDebug(logTag, "%s Buffer Size %d", __FUNCTION__, buffer_size); diff --git a/media/webrtc/signaling/src/media-conduit/VideoConduit.h b/media/webrtc/signaling/src/media-conduit/VideoConduit.h index f662e7b2e3f..2aa8e0d1f60 100755 --- a/media/webrtc/signaling/src/media-conduit/VideoConduit.h +++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.h @@ -16,7 +16,6 @@ #include "webrtc/video_engine/include/vie_codec.h" #include "webrtc/video_engine/include/vie_render.h" #include "webrtc/video_engine/include/vie_network.h" -#include "webrtc/video_engine/include/vie_file.h" #include "webrtc/video_engine/include/vie_rtp_rtcp.h" /** This file hosts several structures identifying different aspects @@ -148,7 +147,15 @@ public: */ virtual int FrameSizeChange(unsigned int, unsigned int, unsigned int); - virtual int DeliverFrame(unsigned char*,int, uint32_t , int64_t); + virtual int DeliverFrame(unsigned char*,int, uint32_t , int64_t, + void *handle); + + /** + * Does DeliverFrame() support a null buffer and non-null handle + * (video texture)? + * XXX Investigate! Especially for Android/B2G + */ + virtual bool IsTextureSupported() { return false; } unsigned short SendingWidth() { return mSendingWidth; diff --git a/media/webrtc/signaling/src/media/VcmSIPCCBinding.cpp b/media/webrtc/signaling/src/media/VcmSIPCCBinding.cpp index af1f9a0e7e6..bb587d32acf 100644 --- a/media/webrtc/signaling/src/media/VcmSIPCCBinding.cpp +++ b/media/webrtc/signaling/src/media/VcmSIPCCBinding.cpp @@ -588,7 +588,7 @@ static short vcmRxAllocICE_s(TemporaryRef ctx_in, VcmSIPCCBinding::connectCandidateSignal(stream); std::vector candidates = stream->GetCandidates(); - CSFLogDebug( logTag, "%s: Got %lu candidates", __FUNCTION__, candidates.size()); + CSFLogDebug( logTag, "%s: Got %lu candidates", __FUNCTION__, (unsigned long) candidates.size()); std::string default_addr; int default_port; diff --git a/media/webrtc/trunk/webrtc/build/arm_neon.gypi b/media/webrtc/trunk/webrtc/build/arm_neon.gypi index 30d040b11d9..47d16c2702f 100644 --- a/media/webrtc/trunk/webrtc/build/arm_neon.gypi +++ b/media/webrtc/trunk/webrtc/build/arm_neon.gypi @@ -23,8 +23,30 @@ 'cflags!': [ '-mfpu=vfpv3-d16', ], + 'cflags_mozilla!': [ + '-mfpu=vfpv3-d16', + ], 'cflags': [ '-mfpu=neon', '-flax-vector-conversions', ], + 'cflags_mozilla': [ + '-mfpu=neon', + '-flax-vector-conversions', + ], + 'asflags!': [ + '-mfpu=vfpv3-d16', + ], + 'asflags_mozilla!': [ + '-mfpu=vfpv3-d16', + ], + 'asflags': [ + '-mfpu=neon', + '-flax-vector-conversions', + ], + 'asflags_mozilla': [ + '-mfpu=neon', + '-flax-vector-conversions', + ], + } diff --git a/media/webrtc/trunk/webrtc/build/common.gypi b/media/webrtc/trunk/webrtc/build/common.gypi index b425a7e4d73..cd00026a331 100644 --- a/media/webrtc/trunk/webrtc/build/common.gypi +++ b/media/webrtc/trunk/webrtc/build/common.gypi @@ -45,7 +45,12 @@ 'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8', 'rbe_components_path%': '<(webrtc_root)/modules/remote_bitrate_estimator', + 'include_g711%': 1, + 'include_g722%': 1, + 'include_ilbc%': 1, 'include_opus%': 1, + 'include_isac%': 1, + 'include_pcm16b%': 1, }, 'build_with_chromium%': '<(build_with_chromium)', 'build_with_libjingle%': '<(build_with_libjingle)', @@ -54,7 +59,14 @@ 'import_isolate_path%': '<(import_isolate_path)', 'modules_java_gyp_path%': '<(modules_java_gyp_path)', 'webrtc_vp8_dir%': '<(webrtc_vp8_dir)', + + 'include_g711%': '<(include_g711)', + 'include_g722%': '<(include_g722)', + 'include_ilbc%': '<(include_ilbc)', 'include_opus%': '<(include_opus)', + 'include_isac%': '<(include_isac)', + 'include_pcm16b%': '<(include_pcm16b)', + 'rbe_components_path%': '<(rbe_components_path)', # The Chromium common.gypi we use treats all gyp files without @@ -113,6 +125,9 @@ # Include ndk cpu features in Chromium build. 'include_ndk_cpu_features%': 1, + + # lazily allocate the ~4MB of trace message buffers if set + 'enable_lazy_trace_alloc%': 0, }, { # Settings for the standalone (not-in-Chromium) build. # TODO(andrew): For now, disable the Chrome plugins, which causes a # flood of chromium-style warnings. Investigate enabling them: @@ -136,6 +151,21 @@ # and Java Implementation 'enable_android_opensl%': 0, }], + ['OS=="linux"', { + 'include_alsa_audio%': 1, + }, { + 'include_alsa_audio%': 0, + }], + ['OS=="solaris" or os_bsd==1', { + 'include_pulse_audio%': 1, + }, { + 'include_pulse_audio%': 0, + }], + ['OS=="linux" or OS=="solaris" or os_bsd==1', { + 'include_v4l2_video_capture%': 1, + }, { + 'include_v4l2_video_capture%': 0, + }], ['OS=="ios"', { 'build_libjpeg%': 0, 'enable_protobuf%': 0, @@ -160,10 +190,15 @@ 'defines': [ # TODO(leozwang): Run this as a gclient hook rather than at build-time: # http://code.google.com/p/webrtc/issues/detail?id=687 - 'WEBRTC_SVNREVISION="Unavailable(issue687)"', + 'WEBRTC_SVNREVISION="\\\"Unavailable_issue687\\\""', #'WEBRTC_SVNREVISION=" #include "webrtc/common_audio/include/audio_util.h" -#include "webrtc/common_audio/resampler/include/resampler.h" #include "webrtc/common_audio/resampler/push_sinc_resampler.h" namespace webrtc { diff --git a/media/webrtc/trunk/webrtc/common_audio/resampler/resampler.cc b/media/webrtc/trunk/webrtc/common_audio/resampler/resampler.cc index 071e551d829..f76a8161ffb 100644 --- a/media/webrtc/trunk/webrtc/common_audio/resampler/resampler.cc +++ b/media/webrtc/trunk/webrtc/common_audio/resampler/resampler.cc @@ -15,1070 +15,121 @@ #include #include +#include #include "webrtc/common_audio/resampler/include/resampler.h" #include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" +// TODO(jesup) better adjust per platform ability +// Note: if these are changed (higher), you may need to change the +// KernelDelay values in the unit tests here and in output_mixer. +#if defined(WEBRTC_ANDROID) || defined(WEBRTC_GONK) +#define RESAMPLER_QUALITY 2 +#else +#define RESAMPLER_QUALITY 3 +#endif namespace webrtc { -Resampler::Resampler() +Resampler::Resampler() : state_(NULL), type_(kResamplerSynchronous) { - state1_ = NULL; - state2_ = NULL; - state3_ = NULL; - in_buffer_ = NULL; - out_buffer_ = NULL; - in_buffer_size_ = 0; - out_buffer_size_ = 0; - in_buffer_size_max_ = 0; - out_buffer_size_max_ = 0; - // we need a reset before we will work - my_in_frequency_khz_ = 0; - my_out_frequency_khz_ = 0; - my_mode_ = kResamplerMode1To1; - my_type_ = kResamplerInvalid; - slave_left_ = NULL; - slave_right_ = NULL; + // Note: Push will fail until Reset() is called } -Resampler::Resampler(int inFreq, int outFreq, ResamplerType type) +Resampler::Resampler(int in_freq, int out_freq, ResamplerType type) : + state_(NULL) // all others get initialized in reset { - state1_ = NULL; - state2_ = NULL; - state3_ = NULL; - in_buffer_ = NULL; - out_buffer_ = NULL; - in_buffer_size_ = 0; - out_buffer_size_ = 0; - in_buffer_size_max_ = 0; - out_buffer_size_max_ = 0; - // we need a reset before we will work - my_in_frequency_khz_ = 0; - my_out_frequency_khz_ = 0; - my_mode_ = kResamplerMode1To1; - my_type_ = kResamplerInvalid; - slave_left_ = NULL; - slave_right_ = NULL; - - Reset(inFreq, outFreq, type); + Reset(in_freq, out_freq, type); } Resampler::~Resampler() { - if (state1_) - { - free(state1_); - } - if (state2_) - { - free(state2_); - } - if (state3_) - { - free(state3_); - } - if (in_buffer_) - { - free(in_buffer_); - } - if (out_buffer_) - { - free(out_buffer_); - } - if (slave_left_) - { - delete slave_left_; - } - if (slave_right_) - { - delete slave_right_; - } + if (state_) + { + speex_resampler_destroy(state_); + } } -int Resampler::ResetIfNeeded(int inFreq, int outFreq, ResamplerType type) +int Resampler::ResetIfNeeded(int in_freq, int out_freq, ResamplerType type) { - int tmpInFreq_kHz = inFreq / 1000; - int tmpOutFreq_kHz = outFreq / 1000; - - if ((tmpInFreq_kHz != my_in_frequency_khz_) || (tmpOutFreq_kHz != my_out_frequency_khz_) - || (type != my_type_)) - { - return Reset(inFreq, outFreq, type); - } else - { - return 0; - } -} - -int Resampler::Reset(int inFreq, int outFreq, ResamplerType type) -{ - - if (state1_) - { - free(state1_); - state1_ = NULL; - } - if (state2_) - { - free(state2_); - state2_ = NULL; - } - if (state3_) - { - free(state3_); - state3_ = NULL; - } - if (in_buffer_) - { - free(in_buffer_); - in_buffer_ = NULL; - } - if (out_buffer_) - { - free(out_buffer_); - out_buffer_ = NULL; - } - if (slave_left_) - { - delete slave_left_; - slave_left_ = NULL; - } - if (slave_right_) - { - delete slave_right_; - slave_right_ = NULL; - } - - in_buffer_size_ = 0; - out_buffer_size_ = 0; - in_buffer_size_max_ = 0; - out_buffer_size_max_ = 0; - - // This might be overridden if parameters are not accepted. - my_type_ = type; - - // Start with a math exercise, Euclid's algorithm to find the gcd: - - int a = inFreq; - int b = outFreq; - int c = a % b; - while (c != 0) - { - a = b; - b = c; - c = a % b; - } - // b is now the gcd; - - // We need to track what domain we're in. - my_in_frequency_khz_ = inFreq / 1000; - my_out_frequency_khz_ = outFreq / 1000; - - // Scale with GCD - inFreq = inFreq / b; - outFreq = outFreq / b; - - // Do we need stereo? - if ((my_type_ & 0xf0) == 0x20) - { - // Change type to mono - type = static_cast( - ((static_cast(type) & 0x0f) + 0x10)); - slave_left_ = new Resampler(inFreq, outFreq, type); - slave_right_ = new Resampler(inFreq, outFreq, type); - } - - if (inFreq == outFreq) - { - my_mode_ = kResamplerMode1To1; - } else if (inFreq == 1) - { - switch (outFreq) - { - case 2: - my_mode_ = kResamplerMode1To2; - break; - case 3: - my_mode_ = kResamplerMode1To3; - break; - case 4: - my_mode_ = kResamplerMode1To4; - break; - case 6: - my_mode_ = kResamplerMode1To6; - break; - case 12: - my_mode_ = kResamplerMode1To12; - break; - default: - my_type_ = kResamplerInvalid; - return -1; - } - } else if (outFreq == 1) - { - switch (inFreq) - { - case 2: - my_mode_ = kResamplerMode2To1; - break; - case 3: - my_mode_ = kResamplerMode3To1; - break; - case 4: - my_mode_ = kResamplerMode4To1; - break; - case 6: - my_mode_ = kResamplerMode6To1; - break; - case 12: - my_mode_ = kResamplerMode12To1; - break; - default: - my_type_ = kResamplerInvalid; - return -1; - } - } else if ((inFreq == 2) && (outFreq == 3)) - { - my_mode_ = kResamplerMode2To3; - } else if ((inFreq == 2) && (outFreq == 11)) - { - my_mode_ = kResamplerMode2To11; - } else if ((inFreq == 4) && (outFreq == 11)) - { - my_mode_ = kResamplerMode4To11; - } else if ((inFreq == 8) && (outFreq == 11)) - { - my_mode_ = kResamplerMode8To11; - } else if ((inFreq == 3) && (outFreq == 2)) - { - my_mode_ = kResamplerMode3To2; - } else if ((inFreq == 11) && (outFreq == 2)) - { - my_mode_ = kResamplerMode11To2; - } else if ((inFreq == 11) && (outFreq == 4)) - { - my_mode_ = kResamplerMode11To4; - } else if ((inFreq == 11) && (outFreq == 16)) - { - my_mode_ = kResamplerMode11To16; - } else if ((inFreq == 11) && (outFreq == 32)) - { - my_mode_ = kResamplerMode11To32; - } else if ((inFreq == 11) && (outFreq == 8)) - { - my_mode_ = kResamplerMode11To8; - } else - { - my_type_ = kResamplerInvalid; - return -1; - } - - // Now create the states we need - switch (my_mode_) - { - case kResamplerMode1To1: - // No state needed; - break; - case kResamplerMode1To2: - state1_ = malloc(8 * sizeof(int32_t)); - memset(state1_, 0, 8 * sizeof(int32_t)); - break; - case kResamplerMode1To3: - state1_ = malloc(sizeof(WebRtcSpl_State16khzTo48khz)); - WebRtcSpl_ResetResample16khzTo48khz((WebRtcSpl_State16khzTo48khz *)state1_); - break; - case kResamplerMode1To4: - // 1:2 - state1_ = malloc(8 * sizeof(int32_t)); - memset(state1_, 0, 8 * sizeof(int32_t)); - // 2:4 - state2_ = malloc(8 * sizeof(int32_t)); - memset(state2_, 0, 8 * sizeof(int32_t)); - break; - case kResamplerMode1To6: - // 1:2 - state1_ = malloc(8 * sizeof(int32_t)); - memset(state1_, 0, 8 * sizeof(int32_t)); - // 2:6 - state2_ = malloc(sizeof(WebRtcSpl_State16khzTo48khz)); - WebRtcSpl_ResetResample16khzTo48khz((WebRtcSpl_State16khzTo48khz *)state2_); - break; - case kResamplerMode1To12: - // 1:2 - state1_ = malloc(8 * sizeof(int32_t)); - memset(state1_, 0, 8 * sizeof(int32_t)); - // 2:4 - state2_ = malloc(8 * sizeof(int32_t)); - memset(state2_, 0, 8 * sizeof(int32_t)); - // 4:12 - state3_ = malloc(sizeof(WebRtcSpl_State16khzTo48khz)); - WebRtcSpl_ResetResample16khzTo48khz( - (WebRtcSpl_State16khzTo48khz*) state3_); - break; - case kResamplerMode2To3: - // 2:6 - state1_ = malloc(sizeof(WebRtcSpl_State16khzTo48khz)); - WebRtcSpl_ResetResample16khzTo48khz((WebRtcSpl_State16khzTo48khz *)state1_); - // 6:3 - state2_ = malloc(8 * sizeof(int32_t)); - memset(state2_, 0, 8 * sizeof(int32_t)); - break; - case kResamplerMode2To11: - state1_ = malloc(8 * sizeof(int32_t)); - memset(state1_, 0, 8 * sizeof(int32_t)); - - state2_ = malloc(sizeof(WebRtcSpl_State8khzTo22khz)); - WebRtcSpl_ResetResample8khzTo22khz((WebRtcSpl_State8khzTo22khz *)state2_); - break; - case kResamplerMode4To11: - state1_ = malloc(sizeof(WebRtcSpl_State8khzTo22khz)); - WebRtcSpl_ResetResample8khzTo22khz((WebRtcSpl_State8khzTo22khz *)state1_); - break; - case kResamplerMode8To11: - state1_ = malloc(sizeof(WebRtcSpl_State16khzTo22khz)); - WebRtcSpl_ResetResample16khzTo22khz((WebRtcSpl_State16khzTo22khz *)state1_); - break; - case kResamplerMode11To16: - state1_ = malloc(8 * sizeof(int32_t)); - memset(state1_, 0, 8 * sizeof(int32_t)); - - state2_ = malloc(sizeof(WebRtcSpl_State22khzTo16khz)); - WebRtcSpl_ResetResample22khzTo16khz((WebRtcSpl_State22khzTo16khz *)state2_); - break; - case kResamplerMode11To32: - // 11 -> 22 - state1_ = malloc(8 * sizeof(int32_t)); - memset(state1_, 0, 8 * sizeof(int32_t)); - - // 22 -> 16 - state2_ = malloc(sizeof(WebRtcSpl_State22khzTo16khz)); - WebRtcSpl_ResetResample22khzTo16khz((WebRtcSpl_State22khzTo16khz *)state2_); - - // 16 -> 32 - state3_ = malloc(8 * sizeof(int32_t)); - memset(state3_, 0, 8 * sizeof(int32_t)); - - break; - case kResamplerMode2To1: - state1_ = malloc(8 * sizeof(int32_t)); - memset(state1_, 0, 8 * sizeof(int32_t)); - break; - case kResamplerMode3To1: - state1_ = malloc(sizeof(WebRtcSpl_State48khzTo16khz)); - WebRtcSpl_ResetResample48khzTo16khz((WebRtcSpl_State48khzTo16khz *)state1_); - break; - case kResamplerMode4To1: - // 4:2 - state1_ = malloc(8 * sizeof(int32_t)); - memset(state1_, 0, 8 * sizeof(int32_t)); - // 2:1 - state2_ = malloc(8 * sizeof(int32_t)); - memset(state2_, 0, 8 * sizeof(int32_t)); - break; - case kResamplerMode6To1: - // 6:2 - state1_ = malloc(sizeof(WebRtcSpl_State48khzTo16khz)); - WebRtcSpl_ResetResample48khzTo16khz((WebRtcSpl_State48khzTo16khz *)state1_); - // 2:1 - state2_ = malloc(8 * sizeof(int32_t)); - memset(state2_, 0, 8 * sizeof(int32_t)); - break; - case kResamplerMode12To1: - // 12:4 - state1_ = malloc(sizeof(WebRtcSpl_State48khzTo16khz)); - WebRtcSpl_ResetResample48khzTo16khz( - (WebRtcSpl_State48khzTo16khz*) state1_); - // 4:2 - state2_ = malloc(8 * sizeof(int32_t)); - memset(state2_, 0, 8 * sizeof(int32_t)); - // 2:1 - state3_ = malloc(8 * sizeof(int32_t)); - memset(state3_, 0, 8 * sizeof(int32_t)); - break; - case kResamplerMode3To2: - // 3:6 - state1_ = malloc(8 * sizeof(int32_t)); - memset(state1_, 0, 8 * sizeof(int32_t)); - // 6:2 - state2_ = malloc(sizeof(WebRtcSpl_State48khzTo16khz)); - WebRtcSpl_ResetResample48khzTo16khz((WebRtcSpl_State48khzTo16khz *)state2_); - break; - case kResamplerMode11To2: - state1_ = malloc(sizeof(WebRtcSpl_State22khzTo8khz)); - WebRtcSpl_ResetResample22khzTo8khz((WebRtcSpl_State22khzTo8khz *)state1_); - - state2_ = malloc(8 * sizeof(int32_t)); - memset(state2_, 0, 8 * sizeof(int32_t)); - - break; - case kResamplerMode11To4: - state1_ = malloc(sizeof(WebRtcSpl_State22khzTo8khz)); - WebRtcSpl_ResetResample22khzTo8khz((WebRtcSpl_State22khzTo8khz *)state1_); - break; - case kResamplerMode11To8: - state1_ = malloc(sizeof(WebRtcSpl_State22khzTo16khz)); - WebRtcSpl_ResetResample22khzTo16khz((WebRtcSpl_State22khzTo16khz *)state1_); - break; - - } - + if (!state_ || type != type_ || + in_freq != in_freq_ || out_freq != out_freq_) + { + // Note that fixed-rate resamplers where input == output rate will + // have state_ == NULL, and will call Reset() here - but reset won't + // do anything beyond overwrite the member vars unless it needs a + // real resampler. + return Reset(in_freq, out_freq, type); + } else { return 0; + } } -// Synchronous resampling, all output samples are written to samplesOut -int Resampler::Push(const int16_t * samplesIn, int lengthIn, int16_t* samplesOut, - int maxLen, int &outLen) +int Resampler::Reset(int in_freq, int out_freq, ResamplerType type) { - // Check that the resampler is not in asynchronous mode - if (my_type_ & 0x0f) + uint32_t channels = (type == kResamplerSynchronousStereo || + type == kResamplerFixedSynchronousStereo) ? 2 : 1; + + if (state_) + { + speex_resampler_destroy(state_); + state_ = NULL; + } + type_ = type; + channels_ = channels; + in_freq_ = in_freq; + out_freq_ = out_freq; + + // For fixed-rate, same-rate resamples we just memcpy and so don't spin up a resampler + if (in_freq != out_freq || !IsFixedRate()) + { + state_ = speex_resampler_init(channels, in_freq, out_freq, RESAMPLER_QUALITY, NULL); + if (!state_) { - return -1; + return -1; + } + } + return 0; +} + +// Synchronous resampling, all output samples are written to samples_out +// TODO(jesup) Change to take samples-per-channel in and out +int Resampler::Push(const int16_t* samples_in, int length_in, + int16_t* samples_out, int max_len, int &out_len) +{ + if (max_len < length_in) + { + return -1; + } + if (!state_) + { + if (!IsFixedRate() || in_freq_ != out_freq_) + { + // Since we initialize to a non-Fixed type, Push() will fail + // until Reset() is called + return -1; } - // Do we have a stereo signal? - if ((my_type_ & 0xf0) == 0x20) - { - - // Split up the signal and call the slave object for each channel - - int16_t* left = (int16_t*)malloc(lengthIn * sizeof(int16_t) / 2); - int16_t* right = (int16_t*)malloc(lengthIn * sizeof(int16_t) / 2); - int16_t* out_left = (int16_t*)malloc(maxLen / 2 * sizeof(int16_t)); - int16_t* out_right = - (int16_t*)malloc(maxLen / 2 * sizeof(int16_t)); - int res = 0; - for (int i = 0; i < lengthIn; i += 2) - { - left[i >> 1] = samplesIn[i]; - right[i >> 1] = samplesIn[i + 1]; - } - - // It's OK to overwrite the local parameter, since it's just a copy - lengthIn = lengthIn / 2; - - int actualOutLen_left = 0; - int actualOutLen_right = 0; - // Do resampling for right channel - res |= slave_left_->Push(left, lengthIn, out_left, maxLen / 2, actualOutLen_left); - res |= slave_right_->Push(right, lengthIn, out_right, maxLen / 2, actualOutLen_right); - if (res || (actualOutLen_left != actualOutLen_right)) - { - free(left); - free(right); - free(out_left); - free(out_right); - return -1; - } - - // Reassemble the signal - for (int i = 0; i < actualOutLen_left; i++) - { - samplesOut[i * 2] = out_left[i]; - samplesOut[i * 2 + 1] = out_right[i]; - } - outLen = 2 * actualOutLen_left; - - free(left); - free(right); - free(out_left); - free(out_right); - - return 0; - } - - // Containers for temp samples - int16_t* tmp; - int16_t* tmp_2; - // tmp data for resampling routines - int32_t* tmp_mem; - - switch (my_mode_) - { - case kResamplerMode1To1: - memcpy(samplesOut, samplesIn, lengthIn * sizeof(int16_t)); - outLen = lengthIn; - break; - case kResamplerMode1To2: - if (maxLen < (lengthIn * 2)) - { - return -1; - } - WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, samplesOut, (int32_t*)state1_); - outLen = lengthIn * 2; - return 0; - case kResamplerMode1To3: - - // We can only handle blocks of 160 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 160) != 0) - { - return -1; - } - if (maxLen < (lengthIn * 3)) - { - return -1; - } - tmp_mem = (int32_t*)malloc(336 * sizeof(int32_t)); - - for (int i = 0; i < lengthIn; i += 160) - { - WebRtcSpl_Resample16khzTo48khz(samplesIn + i, samplesOut + i * 3, - (WebRtcSpl_State16khzTo48khz *)state1_, - tmp_mem); - } - outLen = lengthIn * 3; - free(tmp_mem); - return 0; - case kResamplerMode1To4: - if (maxLen < (lengthIn * 4)) - { - return -1; - } - - tmp = (int16_t*)malloc(sizeof(int16_t) * 2 * lengthIn); - // 1:2 - WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, tmp, (int32_t*)state1_); - // 2:4 - WebRtcSpl_UpsampleBy2(tmp, lengthIn * 2, samplesOut, (int32_t*)state2_); - outLen = lengthIn * 4; - free(tmp); - return 0; - case kResamplerMode1To6: - // We can only handle blocks of 80 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 80) != 0) - { - return -1; - } - if (maxLen < (lengthIn * 6)) - { - return -1; - } - - //1:2 - - tmp_mem = (int32_t*)malloc(336 * sizeof(int32_t)); - tmp = (int16_t*)malloc(sizeof(int16_t) * 2 * lengthIn); - - WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, tmp, (int32_t*)state1_); - outLen = lengthIn * 2; - - for (int i = 0; i < outLen; i += 160) - { - WebRtcSpl_Resample16khzTo48khz(tmp + i, samplesOut + i * 3, - (WebRtcSpl_State16khzTo48khz *)state2_, - tmp_mem); - } - outLen = outLen * 3; - free(tmp_mem); - free(tmp); - - return 0; - case kResamplerMode1To12: - // We can only handle blocks of 40 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 40) != 0) { - return -1; - } - if (maxLen < (lengthIn * 12)) { - return -1; - } - - tmp_mem = (int32_t*) malloc(336 * sizeof(int32_t)); - tmp = (int16_t*) malloc(sizeof(int16_t) * 4 * lengthIn); - //1:2 - WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, samplesOut, - (int32_t*) state1_); - outLen = lengthIn * 2; - //2:4 - WebRtcSpl_UpsampleBy2(samplesOut, outLen, tmp, (int32_t*) state2_); - outLen = outLen * 2; - // 4:12 - for (int i = 0; i < outLen; i += 160) { - // WebRtcSpl_Resample16khzTo48khz() takes a block of 160 samples - // as input and outputs a resampled block of 480 samples. The - // data is now actually in 32 kHz sampling rate, despite the - // function name, and with a resampling factor of three becomes - // 96 kHz. - WebRtcSpl_Resample16khzTo48khz(tmp + i, samplesOut + i * 3, - (WebRtcSpl_State16khzTo48khz*) state3_, - tmp_mem); - } - outLen = outLen * 3; - free(tmp_mem); - free(tmp); - - return 0; - case kResamplerMode2To3: - if (maxLen < (lengthIn * 3 / 2)) - { - return -1; - } - // 2:6 - // We can only handle blocks of 160 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 160) != 0) - { - return -1; - } - tmp = static_cast (malloc(sizeof(int16_t) * lengthIn * 3)); - tmp_mem = (int32_t*)malloc(336 * sizeof(int32_t)); - for (int i = 0; i < lengthIn; i += 160) - { - WebRtcSpl_Resample16khzTo48khz(samplesIn + i, tmp + i * 3, - (WebRtcSpl_State16khzTo48khz *)state1_, - tmp_mem); - } - lengthIn = lengthIn * 3; - // 6:3 - WebRtcSpl_DownsampleBy2(tmp, lengthIn, samplesOut, (int32_t*)state2_); - outLen = lengthIn / 2; - free(tmp); - free(tmp_mem); - return 0; - case kResamplerMode2To11: - - // We can only handle blocks of 80 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 80) != 0) - { - return -1; - } - if (maxLen < ((lengthIn * 11) / 2)) - { - return -1; - } - tmp = (int16_t*)malloc(sizeof(int16_t) * 2 * lengthIn); - // 1:2 - WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, tmp, (int32_t*)state1_); - lengthIn *= 2; - - tmp_mem = (int32_t*)malloc(98 * sizeof(int32_t)); - - for (int i = 0; i < lengthIn; i += 80) - { - WebRtcSpl_Resample8khzTo22khz(tmp + i, samplesOut + (i * 11) / 4, - (WebRtcSpl_State8khzTo22khz *)state2_, - tmp_mem); - } - outLen = (lengthIn * 11) / 4; - free(tmp_mem); - free(tmp); - return 0; - case kResamplerMode4To11: - - // We can only handle blocks of 80 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 80) != 0) - { - return -1; - } - if (maxLen < ((lengthIn * 11) / 4)) - { - return -1; - } - tmp_mem = (int32_t*)malloc(98 * sizeof(int32_t)); - - for (int i = 0; i < lengthIn; i += 80) - { - WebRtcSpl_Resample8khzTo22khz(samplesIn + i, samplesOut + (i * 11) / 4, - (WebRtcSpl_State8khzTo22khz *)state1_, - tmp_mem); - } - outLen = (lengthIn * 11) / 4; - free(tmp_mem); - return 0; - case kResamplerMode8To11: - // We can only handle blocks of 160 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 160) != 0) - { - return -1; - } - if (maxLen < ((lengthIn * 11) / 8)) - { - return -1; - } - tmp_mem = (int32_t*)malloc(88 * sizeof(int32_t)); - - for (int i = 0; i < lengthIn; i += 160) - { - WebRtcSpl_Resample16khzTo22khz(samplesIn + i, samplesOut + (i * 11) / 8, - (WebRtcSpl_State16khzTo22khz *)state1_, - tmp_mem); - } - outLen = (lengthIn * 11) / 8; - free(tmp_mem); - return 0; - - case kResamplerMode11To16: - // We can only handle blocks of 110 samples - if ((lengthIn % 110) != 0) - { - return -1; - } - if (maxLen < ((lengthIn * 16) / 11)) - { - return -1; - } - - tmp_mem = (int32_t*)malloc(104 * sizeof(int32_t)); - tmp = (int16_t*)malloc((sizeof(int16_t) * lengthIn * 2)); - - WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, tmp, (int32_t*)state1_); - - for (int i = 0; i < (lengthIn * 2); i += 220) - { - WebRtcSpl_Resample22khzTo16khz(tmp + i, samplesOut + (i / 220) * 160, - (WebRtcSpl_State22khzTo16khz *)state2_, - tmp_mem); - } - - outLen = (lengthIn * 16) / 11; - - free(tmp_mem); - free(tmp); - return 0; - - case kResamplerMode11To32: - - // We can only handle blocks of 110 samples - if ((lengthIn % 110) != 0) - { - return -1; - } - if (maxLen < ((lengthIn * 32) / 11)) - { - return -1; - } - - tmp_mem = (int32_t*)malloc(104 * sizeof(int32_t)); - tmp = (int16_t*)malloc((sizeof(int16_t) * lengthIn * 2)); - - // 11 -> 22 kHz in samplesOut - WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, samplesOut, (int32_t*)state1_); - - // 22 -> 16 in tmp - for (int i = 0; i < (lengthIn * 2); i += 220) - { - WebRtcSpl_Resample22khzTo16khz(samplesOut + i, tmp + (i / 220) * 160, - (WebRtcSpl_State22khzTo16khz *)state2_, - tmp_mem); - } - - // 16 -> 32 in samplesOut - WebRtcSpl_UpsampleBy2(tmp, (lengthIn * 16) / 11, samplesOut, - (int32_t*)state3_); - - outLen = (lengthIn * 32) / 11; - - free(tmp_mem); - free(tmp); - return 0; - - case kResamplerMode2To1: - if (maxLen < (lengthIn / 2)) - { - return -1; - } - WebRtcSpl_DownsampleBy2(samplesIn, lengthIn, samplesOut, (int32_t*)state1_); - outLen = lengthIn / 2; - return 0; - case kResamplerMode3To1: - // We can only handle blocks of 480 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 480) != 0) - { - return -1; - } - if (maxLen < (lengthIn / 3)) - { - return -1; - } - tmp_mem = (int32_t*)malloc(496 * sizeof(int32_t)); - - for (int i = 0; i < lengthIn; i += 480) - { - WebRtcSpl_Resample48khzTo16khz(samplesIn + i, samplesOut + i / 3, - (WebRtcSpl_State48khzTo16khz *)state1_, - tmp_mem); - } - outLen = lengthIn / 3; - free(tmp_mem); - return 0; - case kResamplerMode4To1: - if (maxLen < (lengthIn / 4)) - { - return -1; - } - tmp = (int16_t*)malloc(sizeof(int16_t) * lengthIn / 2); - // 4:2 - WebRtcSpl_DownsampleBy2(samplesIn, lengthIn, tmp, (int32_t*)state1_); - // 2:1 - WebRtcSpl_DownsampleBy2(tmp, lengthIn / 2, samplesOut, (int32_t*)state2_); - outLen = lengthIn / 4; - free(tmp); - return 0; - - case kResamplerMode6To1: - // We can only handle blocks of 480 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 480) != 0) - { - return -1; - } - if (maxLen < (lengthIn / 6)) - { - return -1; - } - - tmp_mem = (int32_t*)malloc(496 * sizeof(int32_t)); - tmp = (int16_t*)malloc((sizeof(int16_t) * lengthIn) / 3); - - for (int i = 0; i < lengthIn; i += 480) - { - WebRtcSpl_Resample48khzTo16khz(samplesIn + i, tmp + i / 3, - (WebRtcSpl_State48khzTo16khz *)state1_, - tmp_mem); - } - outLen = lengthIn / 3; - free(tmp_mem); - WebRtcSpl_DownsampleBy2(tmp, outLen, samplesOut, (int32_t*)state2_); - free(tmp); - outLen = outLen / 2; - return 0; - case kResamplerMode12To1: - // We can only handle blocks of 480 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 480) != 0) { - return -1; - } - if (maxLen < (lengthIn / 12)) { - return -1; - } - - tmp_mem = (int32_t*) malloc(496 * sizeof(int32_t)); - tmp = (int16_t*) malloc((sizeof(int16_t) * lengthIn) / 3); - tmp_2 = (int16_t*) malloc((sizeof(int16_t) * lengthIn) / 6); - // 12:4 - for (int i = 0; i < lengthIn; i += 480) { - // WebRtcSpl_Resample48khzTo16khz() takes a block of 480 samples - // as input and outputs a resampled block of 160 samples. The - // data is now actually in 96 kHz sampling rate, despite the - // function name, and with a resampling factor of 1/3 becomes - // 32 kHz. - WebRtcSpl_Resample48khzTo16khz(samplesIn + i, tmp + i / 3, - (WebRtcSpl_State48khzTo16khz*) state1_, - tmp_mem); - } - outLen = lengthIn / 3; - free(tmp_mem); - // 4:2 - WebRtcSpl_DownsampleBy2(tmp, outLen, tmp_2, - (int32_t*) state2_); - outLen = outLen / 2; - free(tmp); - // 2:1 - WebRtcSpl_DownsampleBy2(tmp_2, outLen, samplesOut, - (int32_t*) state3_); - free(tmp_2); - outLen = outLen / 2; - return 0; - case kResamplerMode3To2: - if (maxLen < (lengthIn * 2 / 3)) - { - return -1; - } - // 3:6 - tmp = static_cast (malloc(sizeof(int16_t) * lengthIn * 2)); - WebRtcSpl_UpsampleBy2(samplesIn, lengthIn, tmp, (int32_t*)state1_); - lengthIn *= 2; - // 6:2 - // We can only handle blocks of 480 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 480) != 0) - { - free(tmp); - return -1; - } - tmp_mem = (int32_t*)malloc(496 * sizeof(int32_t)); - for (int i = 0; i < lengthIn; i += 480) - { - WebRtcSpl_Resample48khzTo16khz(tmp + i, samplesOut + i / 3, - (WebRtcSpl_State48khzTo16khz *)state2_, - tmp_mem); - } - outLen = lengthIn / 3; - free(tmp); - free(tmp_mem); - return 0; - case kResamplerMode11To2: - // We can only handle blocks of 220 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 220) != 0) - { - return -1; - } - if (maxLen < ((lengthIn * 2) / 11)) - { - return -1; - } - tmp_mem = (int32_t*)malloc(126 * sizeof(int32_t)); - tmp = (int16_t*)malloc((lengthIn * 4) / 11 * sizeof(int16_t)); - - for (int i = 0; i < lengthIn; i += 220) - { - WebRtcSpl_Resample22khzTo8khz(samplesIn + i, tmp + (i * 4) / 11, - (WebRtcSpl_State22khzTo8khz *)state1_, - tmp_mem); - } - lengthIn = (lengthIn * 4) / 11; - - WebRtcSpl_DownsampleBy2(tmp, lengthIn, samplesOut, (int32_t*)state2_); - outLen = lengthIn / 2; - - free(tmp_mem); - free(tmp); - return 0; - case kResamplerMode11To4: - // We can only handle blocks of 220 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 220) != 0) - { - return -1; - } - if (maxLen < ((lengthIn * 4) / 11)) - { - return -1; - } - tmp_mem = (int32_t*)malloc(126 * sizeof(int32_t)); - - for (int i = 0; i < lengthIn; i += 220) - { - WebRtcSpl_Resample22khzTo8khz(samplesIn + i, samplesOut + (i * 4) / 11, - (WebRtcSpl_State22khzTo8khz *)state1_, - tmp_mem); - } - outLen = (lengthIn * 4) / 11; - free(tmp_mem); - return 0; - case kResamplerMode11To8: - // We can only handle blocks of 160 samples - // Can be fixed, but I don't think it's needed - if ((lengthIn % 220) != 0) - { - return -1; - } - if (maxLen < ((lengthIn * 8) / 11)) - { - return -1; - } - tmp_mem = (int32_t*)malloc(104 * sizeof(int32_t)); - - for (int i = 0; i < lengthIn; i += 220) - { - WebRtcSpl_Resample22khzTo16khz(samplesIn + i, samplesOut + (i * 8) / 11, - (WebRtcSpl_State22khzTo16khz *)state1_, - tmp_mem); - } - outLen = (lengthIn * 8) / 11; - free(tmp_mem); - return 0; - break; - - } + // Fixed-rate, same-freq "resample" - use memcpy, which avoids + // filtering and delay. For non-fixed rates, where we might tweak + // from 48000->48000 to 48000->48001 for drift, we need to resample + // (and filter) all the time to avoid glitches on rate changes. + memcpy(samples_out, samples_in, length_in*sizeof(*samples_in)); + out_len = length_in; return 0; -} - -// Asynchronous resampling, input -int Resampler::Insert(int16_t * samplesIn, int lengthIn) -{ - if (my_type_ != kResamplerAsynchronous) - { - return -1; - } - int sizeNeeded, tenMsblock; - - // Determine need for size of outBuffer - sizeNeeded = out_buffer_size_ + ((lengthIn + in_buffer_size_) * my_out_frequency_khz_) - / my_in_frequency_khz_; - if (sizeNeeded > out_buffer_size_max_) - { - // Round the value upwards to complete 10 ms blocks - tenMsblock = my_out_frequency_khz_ * 10; - sizeNeeded = (sizeNeeded / tenMsblock + 1) * tenMsblock; - out_buffer_ = (int16_t*)realloc(out_buffer_, sizeNeeded * sizeof(int16_t)); - out_buffer_size_max_ = sizeNeeded; - } - - // If we need to use inBuffer, make sure all input data fits there. - - tenMsblock = my_in_frequency_khz_ * 10; - if (in_buffer_size_ || (lengthIn % tenMsblock)) - { - // Check if input buffer size is enough - if ((in_buffer_size_ + lengthIn) > in_buffer_size_max_) - { - // Round the value upwards to complete 10 ms blocks - sizeNeeded = ((in_buffer_size_ + lengthIn) / tenMsblock + 1) * tenMsblock; - in_buffer_ = (int16_t*)realloc(in_buffer_, - sizeNeeded * sizeof(int16_t)); - in_buffer_size_max_ = sizeNeeded; - } - // Copy in data to input buffer - memcpy(in_buffer_ + in_buffer_size_, samplesIn, lengthIn * sizeof(int16_t)); - - // Resample all available 10 ms blocks - int lenOut; - int dataLenToResample = (in_buffer_size_ / tenMsblock) * tenMsblock; - Push(in_buffer_, dataLenToResample, out_buffer_ + out_buffer_size_, - out_buffer_size_max_ - out_buffer_size_, lenOut); - out_buffer_size_ += lenOut; - - // Save the rest - memmove(in_buffer_, in_buffer_ + dataLenToResample, - (in_buffer_size_ - dataLenToResample) * sizeof(int16_t)); - in_buffer_size_ -= dataLenToResample; - } else - { - // Just resample - int lenOut; - Push(in_buffer_, lengthIn, out_buffer_ + out_buffer_size_, - out_buffer_size_max_ - out_buffer_size_, lenOut); - out_buffer_size_ += lenOut; - } - - return 0; -} - -// Asynchronous resampling output, remaining samples are buffered -int Resampler::Pull(int16_t* samplesOut, int desiredLen, int &outLen) -{ - if (my_type_ != kResamplerAsynchronous) - { - return -1; - } - - // Check that we have enough data - if (desiredLen <= out_buffer_size_) - { - // Give out the date - memcpy(samplesOut, out_buffer_, desiredLen * sizeof(int32_t)); - - // Shuffle down remaining - memmove(out_buffer_, out_buffer_ + desiredLen, - (out_buffer_size_ - desiredLen) * sizeof(int16_t)); - - // Update remaining size - out_buffer_size_ -= desiredLen; - - return 0; - } else - { - return -1; - } + } + assert(channels_ == 1 || channels_ == 2); + spx_uint32_t len = length_in = (length_in >> (channels_ - 1)); + spx_uint32_t out = (spx_uint32_t) (max_len >> (channels_ - 1)); + if ((speex_resampler_process_interleaved_int(state_, samples_in, &len, + samples_out, &out) != RESAMPLER_ERR_SUCCESS) || + len != (spx_uint32_t) length_in) + { + return -1; + } + out_len = (int) (channels_ * out); + return 0; } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/common_audio/resampler/resampler_unittest.cc b/media/webrtc/trunk/webrtc/common_audio/resampler/resampler_unittest.cc index 3d1091d4149..16b79a21810 100644 --- a/media/webrtc/trunk/webrtc/common_audio/resampler/resampler_unittest.cc +++ b/media/webrtc/trunk/webrtc/common_audio/resampler/resampler_unittest.cc @@ -8,6 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include + #include "testing/gtest/include/gtest/gtest.h" #include "webrtc/common_audio/resampler/include/resampler.h" @@ -18,10 +20,7 @@ namespace webrtc { namespace { const ResamplerType kTypes[] = { kResamplerSynchronous, - kResamplerAsynchronous, kResamplerSynchronousStereo, - kResamplerAsynchronousStereo - // kResamplerInvalid excluded }; const size_t kTypesSize = sizeof(kTypes) / sizeof(*kTypes); @@ -31,7 +30,7 @@ const int kRates[] = { 8000, 16000, 32000, - 44000, + 44100, 48000, kMaxRate }; @@ -39,26 +38,19 @@ const size_t kRatesSize = sizeof(kRates) / sizeof(*kRates); const int kMaxChannels = 2; const size_t kDataSize = static_cast (kMaxChannels * kMaxRate / 100); -// TODO(andrew): should we be supporting these combinations? -bool ValidRates(int in_rate, int out_rate) { - // Not the most compact notation, for clarity. - if ((in_rate == 44000 && (out_rate == 48000 || out_rate == 96000)) || - (out_rate == 44000 && (in_rate == 48000 || in_rate == 96000))) { - return false; - } - - return true; -} - class ResamplerTest : public testing::Test { protected: ResamplerTest(); virtual void SetUp(); virtual void TearDown(); + void RunResampleTest(int channels, + int src_sample_rate_hz, + int dst_sample_rate_hz); Resampler rs_; int16_t data_in_[kDataSize]; int16_t data_out_[kDataSize]; + int16_t data_reference_[kDataSize]; }; ResamplerTest::ResamplerTest() {} @@ -83,34 +75,119 @@ TEST_F(ResamplerTest, Reset) { ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j] << ", type: " << kTypes[k]; SCOPED_TRACE(ss.str()); - if (ValidRates(kRates[i], kRates[j])) - EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kTypes[k])); - else - EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j], kTypes[k])); + EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kTypes[k])); } } } } -// TODO(tlegrand): Replace code inside the two tests below with a function -// with number of channels and ResamplerType as input. -TEST_F(ResamplerTest, Synchronous) { - for (size_t i = 0; i < kRatesSize; ++i) { - for (size_t j = 0; j < kRatesSize; ++j) { - std::ostringstream ss; - ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j]; - SCOPED_TRACE(ss.str()); +// Sets the signal value to increase by |data| with every sample. Floats are +// used so non-integer values result in rounding error, but not an accumulating +// error. +void SetMonoFrame(int16_t* buffer, float data, int sample_rate_hz) { + for (int i = 0; i < sample_rate_hz / 100; i++) { + buffer[i] = data * i; + } +} - if (ValidRates(kRates[i], kRates[j])) { - int in_length = kRates[i] / 100; - int out_length = 0; - EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kResamplerSynchronous)); - EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize, - out_length)); - EXPECT_EQ(kRates[j] / 100, out_length); - } else { - EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j], kResamplerSynchronous)); - } +// Sets the signal value to increase by |left| and |right| with every sample in +// each channel respectively. +void SetStereoFrame(int16_t* buffer, float left, float right, + int sample_rate_hz) { + for (int i = 0; i < sample_rate_hz / 100; i++) { + buffer[i * 2] = left * i; + buffer[i * 2 + 1] = right * i; + } +} + +// Computes the best SNR based on the error between |ref_frame| and +// |test_frame|. It allows for a sample delay between the signals to +// compensate for the resampling delay. +float ComputeSNR(const int16_t* reference, const int16_t* test, + int sample_rate_hz, int channels, int max_delay) { + float best_snr = 0; + int best_delay = 0; + int samples_per_channel = sample_rate_hz/100; + for (int delay = 0; delay < max_delay; delay++) { + float mse = 0; + float variance = 0; + for (int i = 0; i < samples_per_channel * channels - delay; i++) { + int error = reference[i] - test[i + delay]; + mse += error * error; + variance += reference[i] * reference[i]; + } + float snr = 100; // We assign 100 dB to the zero-error case. + if (mse > 0) + snr = 10 * log10(variance / mse); + if (snr > best_snr) { + best_snr = snr; + best_delay = delay; + } + } + printf("SNR=%.1f dB at delay=%d\n", best_snr, best_delay); + return best_snr; +} + +void ResamplerTest::RunResampleTest(int channels, + int src_sample_rate_hz, + int dst_sample_rate_hz) { + Resampler resampler; // Create a new one with every test. + const int16_t kSrcLeft = 60; // Shouldn't overflow for any used sample rate. + const int16_t kSrcRight = 30; + const float kResamplingFactor = (1.0 * src_sample_rate_hz) / + dst_sample_rate_hz; + const float kDstLeft = kResamplingFactor * kSrcLeft; + const float kDstRight = kResamplingFactor * kSrcRight; + if (channels == 1) + SetMonoFrame(data_in_, kSrcLeft, src_sample_rate_hz); + else + SetStereoFrame(data_in_, kSrcLeft, kSrcRight, src_sample_rate_hz); + + if (channels == 1) { + SetMonoFrame(data_out_, 0, dst_sample_rate_hz); + SetMonoFrame(data_reference_, kDstLeft, dst_sample_rate_hz); + } else { + SetStereoFrame(data_out_, 0, 0, dst_sample_rate_hz); + SetStereoFrame(data_reference_, kDstLeft, kDstRight, dst_sample_rate_hz); + } + + // The speex resampler has a known delay dependent on quality and rates, + // which we approximate here. Multiplying by two gives us a crude maximum + // for any resampling, as the old resampler typically (but not always) + // has lower delay. The actual delay is calculated internally based on the + // filter length in the QualityMap. + static const int kInputKernelDelaySamples = 16*3; + const int max_delay = std::min(1.0f, 1/kResamplingFactor) * + kInputKernelDelaySamples * channels * 2; + printf("(%d, %d Hz) -> (%d, %d Hz) ", // SNR reported on the same line later. + channels, src_sample_rate_hz, channels, dst_sample_rate_hz); + + int in_length = channels * src_sample_rate_hz / 100; + int out_length = 0; + EXPECT_EQ(0, rs_.Reset(src_sample_rate_hz, dst_sample_rate_hz, + (channels == 1 ? + kResamplerSynchronous : + kResamplerSynchronousStereo))); + EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize, + out_length)); + EXPECT_EQ(channels * dst_sample_rate_hz / 100, out_length); + + // EXPECT_EQ(0, Resample(src_frame_, &resampler, &dst_frame_)); + EXPECT_GT(ComputeSNR(data_reference_, data_out_, dst_sample_rate_hz, + channels, max_delay), 40.0f); +} + +TEST_F(ResamplerTest, Synchronous) { + // Number of channels is 1, mono mode. + const int kChannels = 1; + // We don't attempt to be exhaustive here, but just get good coverage. Some + // combinations of rates will not be resampled, and some give an odd + // resampling factor which makes it more difficult to evaluate. + const int kSampleRates[] = {16000, 32000, 44100, 48000}; + const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates); + for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) { + for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) { + RunResampleTest(kChannels, kSampleRates[src_rate], kSampleRates[dst_rate]); } } } @@ -118,24 +195,14 @@ TEST_F(ResamplerTest, Synchronous) { TEST_F(ResamplerTest, SynchronousStereo) { // Number of channels is 2, stereo mode. const int kChannels = 2; - for (size_t i = 0; i < kRatesSize; ++i) { - for (size_t j = 0; j < kRatesSize; ++j) { - std::ostringstream ss; - ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j]; - SCOPED_TRACE(ss.str()); - - if (ValidRates(kRates[i], kRates[j])) { - int in_length = kChannels * kRates[i] / 100; - int out_length = 0; - EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], - kResamplerSynchronousStereo)); - EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize, - out_length)); - EXPECT_EQ(kChannels * kRates[j] / 100, out_length); - } else { - EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j], - kResamplerSynchronousStereo)); - } + // We don't attempt to be exhaustive here, but just get good coverage. Some + // combinations of rates will not be resampled, and some give an odd + // resampling factor which makes it more difficult to evaluate. + const int kSampleRates[] = {16000, 32000, 44100, 48000}; + const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates); + for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) { + for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) { + RunResampleTest(kChannels, kSampleRates[src_rate], kSampleRates[dst_rate]); } } } diff --git a/media/webrtc/trunk/webrtc/common_audio/resampler/sinc_resampler_neon.cc b/media/webrtc/trunk/webrtc/common_audio/resampler/sinc_resampler_neon.cc index e909a6c5dec..bc5328924aa 100644 --- a/media/webrtc/trunk/webrtc/common_audio/resampler/sinc_resampler_neon.cc +++ b/media/webrtc/trunk/webrtc/common_audio/resampler/sinc_resampler_neon.cc @@ -26,11 +26,11 @@ float SincResampler::Convolve_NEON(const float* input_ptr, const float* k1, const float* upper = input_ptr + kKernelSize; for (; input_ptr < upper; ) { - m_input = vld1q_f32(input_ptr); + m_input = vld1q_f32((const float32_t *) input_ptr); input_ptr += 4; - m_sums1 = vmlaq_f32(m_sums1, m_input, vld1q_f32(k1)); + m_sums1 = vmlaq_f32(m_sums1, m_input, vld1q_f32((const float32_t *) k1)); k1 += 4; - m_sums2 = vmlaq_f32(m_sums2, m_input, vld1q_f32(k2)); + m_sums2 = vmlaq_f32(m_sums2, m_input, vld1q_f32((const float32_t *) k2)); k2 += 4; } diff --git a/media/webrtc/trunk/webrtc/common_types.h b/media/webrtc/trunk/webrtc/common_types.h index 7e88666ab60..425837a8ca3 100644 --- a/media/webrtc/trunk/webrtc/common_types.h +++ b/media/webrtc/trunk/webrtc/common_types.h @@ -341,7 +341,7 @@ enum NsModes // type of Noise Suppression kNsLowSuppression, // lowest suppression kNsModerateSuppression, kNsHighSuppression, - kNsVeryHighSuppression, // highest suppression + kNsVeryHighSuppression // highest suppression }; enum AgcModes // type of Automatic Gain Control @@ -366,7 +366,7 @@ enum EcModes // type of Echo Control kEcDefault, // platform default kEcConference, // conferencing default (aggressive AEC) kEcAec, // Acoustic Echo Cancellation - kEcAecm, // AEC mobile + kEcAecm // AEC mobile }; // AECM modes @@ -418,7 +418,7 @@ enum NetEqModes // NetEQ playout configurations kNetEqFax = 2, // Minimal buffer management. Inserts zeros for lost packets and during // buffer increases. - kNetEqOff = 3, + kNetEqOff = 3 }; enum OnHoldModes // On Hold direction @@ -432,7 +432,7 @@ enum AmrMode { kRfc3267BwEfficient = 0, kRfc3267OctetAligned = 1, - kRfc3267FileStorage = 2, + kRfc3267FileStorage = 2 }; // ================================================================== diff --git a/media/webrtc/trunk/webrtc/engine_configurations.h b/media/webrtc/trunk/webrtc/engine_configurations.h index 8294c9afc83..bd5eaa7510d 100644 --- a/media/webrtc/trunk/webrtc/engine_configurations.h +++ b/media/webrtc/trunk/webrtc/engine_configurations.h @@ -35,7 +35,9 @@ #define WEBRTC_CODEC_AVT // PCM16 is useful for testing and incurs only a small binary size cost. +#ifndef WEBRTC_CODEC_PCM16 #define WEBRTC_CODEC_PCM16 +#endif // iLBC, G.722, and Redundancy coding are excluded from Chromium and Mozilla // builds to reduce binary size. diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c index 04814b76734..e10a87191f4 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c @@ -15,10 +15,6 @@ #include "typedefs.h" -#ifdef WEBRTC_BIG_ENDIAN -#include "signal_processing_library.h" -#endif - #define HIGHEND 0xFF00 #define LOWEND 0xFF @@ -30,7 +26,7 @@ int16_t WebRtcPcm16b_EncodeW16(int16_t *speechIn16b, int16_t *speechOut16b) { #ifdef WEBRTC_BIG_ENDIAN - WEBRTC_SPL_MEMCPY_W16(speechOut16b, speechIn16b, len); + memcpy(speechOut16b, speechIn16b, len * sizeof(int16_t)); #else int i; for (i=0;i>1)); + memcpy(speechOut16b, speechIn16b, ((len*sizeof(int16_t)+1)>>1)); #else int i; int samples=len>>1; diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi index e86fbfc961a..1ffb7ae5175 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi @@ -44,12 +44,6 @@ 'acm_common_defs.h', 'acm_dtmf_playout.cc', 'acm_dtmf_playout.h', - 'acm_g722.cc', - 'acm_g722.h', - 'acm_g7221.cc', - 'acm_g7221.h', - 'acm_g7221c.cc', - 'acm_g7221c.h', 'acm_g729.cc', 'acm_g729.h', 'acm_g7291.cc', @@ -58,11 +52,6 @@ 'acm_generic_codec.h', 'acm_gsmfr.cc', 'acm_gsmfr.h', - 'acm_ilbc.cc', - 'acm_ilbc.h', - 'acm_isac.cc', - 'acm_isac.h', - 'acm_isac_macros.h', 'acm_opus.cc', 'acm_opus.h', 'acm_speex.cc', diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi index 94c3bcb1e9a..d5aae6beb37 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi @@ -10,12 +10,6 @@ 'variables': { 'audio_coding_dependencies': [ 'CNG', - 'G711', - 'G722', - 'iLBC', - 'iSAC', - 'iSACFix', - 'PCM16B', 'NetEq', '<(webrtc_root)/common_audio/common_audio.gyp:common_audio', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', @@ -25,6 +19,57 @@ ['include_opus==1', { 'audio_coding_dependencies': ['webrtc_opus',], 'audio_coding_defines': ['WEBRTC_CODEC_OPUS',], + 'audio_coding_sources': [ + 'acm_opus.cc', + 'acm_opus.h', + ], + }], + ['include_g711==1', { + 'audio_coding_dependencies': ['G711',], + 'audio_coding_defines': ['WEBRTC_CODEC_G711',], + 'audio_coding_sources': [ + 'acm_pcma.cc', + 'acm_pcma.h', + 'acm_pcmu.cc', + 'acm_pcmu.h', + ], + }], + ['include_g722==1', { + 'audio_coding_dependencies': ['G722',], + 'audio_coding_defines': ['WEBRTC_CODEC_G722',], + 'audio_coding_sources': [ + 'acm_g722.cc', + 'acm_g722.h', + 'acm_g7221.cc', + 'acm_g7221.h', + 'acm_g7221c.cc', + 'acm_g7221c.h', + ], + }], + ['include_ilbc==1', { + 'audio_coding_dependencies': ['iLBC',], + 'audio_coding_defines': ['WEBRTC_CODEC_ILBC',], + 'audio_coding_sources': [ + 'acm_ilbc.cc', + 'acm_ilbc.h', + ], + }], + ['include_isac==1', { + 'audio_coding_dependencies': ['iSAC', 'iSACFix',], + 'audio_coding_defines': ['WEBRTC_CODEC_ISAC', 'WEBRTC_CODEC_ISACFX',], + 'audio_coding_sources': [ + 'acm_isac.cc', + 'acm_isac.h', + 'acm_isac_macros.h', + ], + }], + ['include_pcm16b==1', { + 'audio_coding_dependencies': ['PCM16B',], + 'audio_coding_defines': ['WEBRTC_CODEC_PCM16',], + 'audio_coding_sources': [ + 'acm_pcm16b.cc', + 'acm_pcm16b.h', + ], }], ], }, @@ -50,14 +95,9 @@ ], }, 'sources': [ +# '<@(audio_coding_sources)', '../interface/audio_coding_module.h', '../interface/audio_coding_module_typedefs.h', - 'acm_amr.cc', - 'acm_amr.h', - 'acm_amrwb.cc', - 'acm_amrwb.h', - 'acm_celt.cc', - 'acm_celt.h', 'acm_cng.cc', 'acm_cng.h', 'acm_codec_database.cc', @@ -66,31 +106,13 @@ 'acm_dtmf_detection.h', 'acm_dtmf_playout.cc', 'acm_dtmf_playout.h', - 'acm_g722.cc', - 'acm_g722.h', - 'acm_g7221.cc', - 'acm_g7221.h', - 'acm_g7221c.cc', - 'acm_g7221c.h', - 'acm_g729.cc', - 'acm_g729.h', - 'acm_g7291.cc', - 'acm_g7291.h', 'acm_generic_codec.cc', 'acm_generic_codec.h', - 'acm_gsmfr.cc', - 'acm_gsmfr.h', - 'acm_ilbc.cc', - 'acm_ilbc.h', - 'acm_isac.cc', - 'acm_isac.h', - 'acm_isac_macros.h', 'acm_neteq.cc', 'acm_neteq.h', +# cheat until I get audio_coding_sources to work 'acm_opus.cc', 'acm_opus.h', - 'acm_speex.cc', - 'acm_speex.h', 'acm_pcm16b.cc', 'acm_pcm16b.h', 'acm_pcma.cc', diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_defines.h b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_defines.h index 9390f2bf3f0..1f3b682ff56 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_defines.h +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_defines.h @@ -69,6 +69,8 @@ * decoded signal is at 32 kHz. * NETEQ_ISAC_FB_CODEC Enable iSAC-FB * + * NETEQ_OPUS_CODEC Enable Opus + * * NETEQ_G722_CODEC Enable G.722 * * NETEQ_G729_CODEC Enable G.729 @@ -321,6 +323,7 @@ #define NETEQ_PCM16B_CODEC #define NETEQ_G711_CODEC #define NETEQ_ILBC_CODEC + #define NETEQ_OPUS_CODEC #define NETEQ_G729_CODEC #define NETEQ_G726_CODEC #define NETEQ_GSMFR_CODEC @@ -329,6 +332,7 @@ /* Wideband codecs */ #define NETEQ_WIDEBAND #define NETEQ_ISAC_CODEC + /*#define NETEQ_OPUS_CODEC define only once */ #define NETEQ_G722_CODEC #define NETEQ_G722_1_CODEC #define NETEQ_G729_1_CODEC @@ -337,13 +341,18 @@ /* Super wideband 32kHz codecs */ #define NETEQ_ISAC_SWB_CODEC + /*#define NETEQ_OPUS_CODEC*/ #define NETEQ_32KHZ_WIDEBAND #define NETEQ_G722_1C_CODEC #define NETEQ_CELT_CODEC + /*#define NETEQ_OPUS_CODEC*/ + + /* hack in 48 kHz support */ + #define NETEQ_48KHZ_WIDEBAND /* Super wideband 48kHz codecs */ #define NETEQ_48KHZ_WIDEBAND - #define NETEQ_OPUS_CODEC + /*#define NETEQ_OPUS_CODEC*/ #define NETEQ_ISAC_FB #endif diff --git a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/packet_buffer.c b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/packet_buffer.c index a542333cf87..5bfc642c506 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/packet_buffer.c +++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/packet_buffer.c @@ -678,6 +678,11 @@ int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID, codecBytes = 15300; /* 240ms @ 510kbps (60ms frames) */ codecBuffers = 30; /* Replicating the value for PCMu/a */ } + else if (codecID[i] == kDecoderOpus) + { + codecBytes = 15300; /* 240ms @ 510kbps (60ms frames) */ + codecBuffers = 30; /* ?? Codec supports down to 2.5-60 ms frames */ + } else if ((codecID[i] == kDecoderPCM16B) || (codecID[i] == kDecoderPCM16B_2ch)) { diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.cc index 33b7db983dd..7585f4e004e 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.cc @@ -26,12 +26,13 @@ #include "webrtc/system_wrappers/interface/thread_wrapper.h" #include "webrtc/system_wrappers/interface/trace.h" +#include "AndroidJNIWrapper.h" + namespace webrtc { // TODO(leozwang): Refactor jni and the following global variables, a // good example is jni_helper in Chromium. JavaVM* AudioDeviceAndroidJni::globalJvm = NULL; -JNIEnv* AudioDeviceAndroidJni::globalJNIEnv = NULL; jobject AudioDeviceAndroidJni::globalContext = NULL; jclass AudioDeviceAndroidJni::globalScClass = NULL; @@ -45,62 +46,62 @@ jclass AudioDeviceAndroidJni::globalScClass = NULL; int32_t AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects( void* javaVM, - void* env, void* context) { - __android_log_print(ANDROID_LOG_DEBUG, "WEBRTC", "JNI:%s", __FUNCTION__); + return SetAndroidAudioDeviceObjects(javaVM, NULL, context); +} + +int32_t AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects( + void* javaVM, + void* null_env, + void* context) { + WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, -1, + "%s called", __FUNCTION__); // TODO(leozwang): Make this function thread-safe. globalJvm = reinterpret_cast(javaVM); - if (env) { - globalJNIEnv = reinterpret_cast(env); + JNIEnv* env = NULL; + + // Check if we already got a reference + if (globalJvm && !globalScClass) { + if (globalJvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, -1, + "%s: could not get Java environment", __FUNCTION__); + return -1; + } + globalJvm->AttachCurrentThread(&env, NULL); + // Get java class type (note path to class packet). - jclass javaScClassLocal = globalJNIEnv->FindClass( - "org/webrtc/voiceengine/WebRTCAudioDevice"); - if (!javaScClassLocal) { + globalScClass = jsjni_GetGlobalClassRef(AudioCaptureClass); + if (!globalScClass) { WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, "%s: could not find java class", __FUNCTION__); return -1; // exception thrown } - // Create a global reference to the class (to tell JNI that we are - // referencing it after this function has returned). - globalScClass = reinterpret_cast ( - globalJNIEnv->NewGlobalRef(javaScClassLocal)); - if (!globalScClass) { - WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, - "%s: could not create reference", __FUNCTION__); - return -1; - } - - globalContext = globalJNIEnv->NewGlobalRef( + globalContext = env->NewGlobalRef( reinterpret_cast(context)); if (!globalContext) { WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, "%s: could not create context reference", __FUNCTION__); return -1; } - - // Delete local class ref, we only use the global ref - globalJNIEnv->DeleteLocalRef(javaScClassLocal); } else { // User is resetting the env variable WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1, "%s: env is NULL, assuming deinit", __FUNCTION__); - if (!globalJNIEnv) { + if (!env) { WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1, "%s: saved env already NULL", __FUNCTION__); return 0; } - globalJNIEnv->DeleteGlobalRef(globalScClass); + env->DeleteGlobalRef(globalScClass); globalScClass = reinterpret_cast(NULL); - globalJNIEnv->DeleteGlobalRef(globalContext); + env->DeleteGlobalRef(globalContext); globalContext = reinterpret_cast(NULL); - - globalJNIEnv = reinterpret_cast(NULL); } return 0; @@ -140,8 +141,8 @@ AudioDeviceAndroidJni::AudioDeviceAndroidJni(const int32_t id) : _playError(0), _recWarning(0), _recError(0), _delayPlayout(0), _delayRecording(0), _AGC(false), - _samplingFreqIn((N_REC_SAMPLES_PER_SEC/1000)), - _samplingFreqOut((N_PLAY_SAMPLES_PER_SEC/1000)), + _samplingFreqIn((N_REC_SAMPLES_PER_SEC)), + _samplingFreqOut((N_PLAY_SAMPLES_PER_SEC)), _maxSpeakerVolume(0), _loudSpeakerOn(false), _recAudioSource(1), // 1 is AudioSource.MIC which is our default @@ -1385,17 +1386,10 @@ int32_t AudioDeviceAndroidJni::InitPlayout() // get the method ID jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback", "(I)I"); - - int samplingFreq = 44100; - if (_samplingFreqOut != 44) - { - samplingFreq = _samplingFreqOut * 1000; - } - int retVal = -1; // Call java sc object method - jint res = env->CallIntMethod(_javaScObj, initPlaybackID, samplingFreq); + jint res = env->CallIntMethod(_javaScObj, initPlaybackID, _samplingFreqOut); if (res < 0) { WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, @@ -1404,7 +1398,7 @@ int32_t AudioDeviceAndroidJni::InitPlayout() else { // Set the audio device buffer sampling rate - _ptrAudioBuffer->SetPlayoutSampleRate(_samplingFreqOut * 1000); + _ptrAudioBuffer->SetPlayoutSampleRate(_samplingFreqOut); _playIsInitialized = true; retVal = 0; } @@ -1490,18 +1484,11 @@ int32_t AudioDeviceAndroidJni::InitRecording() // get the method ID jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording", "(II)I"); - - int samplingFreq = 44100; - if (_samplingFreqIn != 44) - { - samplingFreq = _samplingFreqIn * 1000; - } - int retVal = -1; // call java sc object method jint res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource, - samplingFreq); + _samplingFreqIn); if (res < 0) { WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, @@ -1510,10 +1497,10 @@ int32_t AudioDeviceAndroidJni::InitRecording() else { // Set the audio device buffer sampling rate - _ptrAudioBuffer->SetRecordingSampleRate(_samplingFreqIn * 1000); + _ptrAudioBuffer->SetRecordingSampleRate(_samplingFreqIn); // the init rec function returns a fixed delay - _delayRecording = res / _samplingFreqIn; + _delayRecording = (res * 1000) / _samplingFreqIn; _recIsInitialized = true; retVal = 0; @@ -2031,14 +2018,7 @@ int32_t AudioDeviceAndroidJni::SetRecordingSampleRate( } // set the recording sample rate to use - if (samplesPerSec == 44100) - { - _samplingFreqIn = 44; - } - else - { - _samplingFreqIn = samplesPerSec / 1000; - } + _samplingFreqIn = samplesPerSec; // Update the AudioDeviceBuffer _ptrAudioBuffer->SetRecordingSampleRate(samplesPerSec); @@ -2062,14 +2042,7 @@ int32_t AudioDeviceAndroidJni::SetPlayoutSampleRate( } // set the playout sample rate to use - if (samplesPerSec == 44100) - { - _samplingFreqOut = 44; - } - else - { - _samplingFreqOut = samplesPerSec / 1000; - } + _samplingFreqOut = samplesPerSec; // Update the AudioDeviceBuffer _ptrAudioBuffer->SetPlayoutSampleRate(samplesPerSec); @@ -2211,7 +2184,7 @@ int32_t AudioDeviceAndroidJni::InitJavaResources() } WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, - "construct object", __FUNCTION__); + "%s: construct object", __FUNCTION__); // construct the object jobject javaScObjLocal = env->NewObject(_javaScClass, cid); @@ -2423,11 +2396,7 @@ int32_t AudioDeviceAndroidJni::InitSampleRate() if (_samplingFreqIn > 0) { // read the configured sampling rate - samplingFreq = 44100; - if (_samplingFreqIn != 44) - { - samplingFreq = _samplingFreqIn * 1000; - } + samplingFreq = _samplingFreqIn; WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, " Trying configured recording sampling rate %d", samplingFreq); @@ -2468,14 +2437,7 @@ int32_t AudioDeviceAndroidJni::InitSampleRate() } // set the recording sample rate to use - if (samplingFreq == 44100) - { - _samplingFreqIn = 44; - } - else - { - _samplingFreqIn = samplingFreq / 1000; - } + _samplingFreqIn = samplingFreq; WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "Recording sample rate set to (%d)", _samplingFreqIn); @@ -2499,11 +2461,7 @@ int32_t AudioDeviceAndroidJni::InitSampleRate() if (_samplingFreqOut > 0) { // read the configured sampling rate - samplingFreq = 44100; - if (_samplingFreqOut != 44) - { - samplingFreq = _samplingFreqOut * 1000; - } + samplingFreq = _samplingFreqOut; WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, " Trying configured playback sampling rate %d", samplingFreq); @@ -2557,15 +2515,7 @@ int32_t AudioDeviceAndroidJni::InitSampleRate() } // set the playback sample rate to use - if (samplingFreq == 44100) - { - _samplingFreqOut = 44; - } - else - { - _samplingFreqOut = samplingFreq / 1000; - } - + _samplingFreqOut = samplingFreq; WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "Playback sample rate set to (%d)", _samplingFreqOut); @@ -2678,7 +2628,7 @@ bool AudioDeviceAndroidJni::PlayThreadProcess() if (_playing) { int8_t playBuffer[2 * 480]; // Max 10 ms @ 48 kHz / 16 bit - uint32_t samplesToPlay = _samplingFreqOut * 10; + uint32_t samplesToPlay = _samplingFreqOut / 100; // ask for new PCM data to be played out using the AudioDeviceBuffer // ensure that this callback is executed without taking the @@ -2723,7 +2673,7 @@ bool AudioDeviceAndroidJni::PlayThreadProcess() else if (res > 0) { // we are not recording and have got a delay value from playback - _delayPlayout = res / _samplingFreqOut; + _delayPlayout = (res * 1000) / _samplingFreqOut; } // If 0 is returned we are recording and then play delay is updated // in RecordProcess @@ -2821,7 +2771,7 @@ bool AudioDeviceAndroidJni::RecThreadProcess() if (_recording) { - uint32_t samplesToRec = _samplingFreqIn * 10; + uint32_t samplesToRec = _samplingFreqIn / 100; // Call java sc object method to record data to direct buffer // Will block until data has been recorded (see java sc class), @@ -2838,7 +2788,7 @@ bool AudioDeviceAndroidJni::RecThreadProcess() } else { - _delayPlayout = playDelayInSamples / _samplingFreqOut; + _delayPlayout = (playDelayInSamples * 1000) / _samplingFreqOut; } Lock(); diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h index 677d3756643..7eb57cbd7f3 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h @@ -20,19 +20,12 @@ #include // For accessing AudioDeviceAndroid java class +#define AudioCaptureClass "org/webrtc/voiceengine/WebRTCAudioDevice" + namespace webrtc { class EventWrapper; -const uint32_t N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz -const uint32_t N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz - -const uint32_t N_REC_CHANNELS = 1; // default is mono recording -const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout - -const uint32_t REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz - - class ThreadWrapper; class AudioDeviceAndroidJni : public AudioDeviceGeneric { @@ -40,6 +33,9 @@ class AudioDeviceAndroidJni : public AudioDeviceGeneric { AudioDeviceAndroidJni(const int32_t id); ~AudioDeviceAndroidJni(); + static int32_t SetAndroidAudioDeviceObjects(void* javaVM, + void* context); + static int32_t SetAndroidAudioDeviceObjects(void* javaVM, void* env, void* context); @@ -158,6 +154,14 @@ class AudioDeviceAndroidJni : public AudioDeviceGeneric { virtual int32_t SetLoudspeakerStatus(bool enable); virtual int32_t GetLoudspeakerStatus(bool& enable) const; + static const uint32_t N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz + static const uint32_t N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz + + static const uint32_t N_REC_CHANNELS = 1; // default is mono recording + static const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout + + static const uint32_t REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz + private: // Lock void Lock() { diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.h index 6f85e72df9b..d8d7bda7a3c 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager_jni.h @@ -36,6 +36,7 @@ class AudioManagerJni { // It has to be called for this class' APIs to be successful. Calling // ClearAndroidAudioDeviceObjects will prevent this class' APIs to be called // successfully if SetAndroidAudioDeviceObjects is not called after it. + static void SetAndroidAudioDeviceObjects(void* jvm, void* context); static void SetAndroidAudioDeviceObjects(void* jvm, void* env, void* context); // This function must be called when the AudioManagerJni class is no diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc index 9f58205a95d..59a1ce759e1 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc @@ -11,6 +11,7 @@ #include "webrtc/modules/audio_device/android/opensles_input.h" #include +#include #include "webrtc/modules/audio_device/android/single_rw_fifo.h" #include "webrtc/modules/audio_device/audio_device_buffer.h" @@ -65,7 +66,8 @@ OpenSlesInput::OpenSlesInput( active_queue_(0), rec_sampling_rate_(0), agc_enabled_(false), - recording_delay_(0) { + recording_delay_(0), + opensles_lib_(NULL) { } OpenSlesInput::~OpenSlesInput() { @@ -74,15 +76,41 @@ OpenSlesInput::~OpenSlesInput() { int32_t OpenSlesInput::Init() { assert(!initialized_); + /* Try to dynamically open the OpenSLES library */ + opensles_lib_ = dlopen("libOpenSLES.so", RTLD_LAZY); + if (!opensles_lib_) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, + " failed to dlopen OpenSLES library"); + return -1; + } + + f_slCreateEngine = (slCreateEngine_t)dlsym(opensles_lib_, "slCreateEngine"); + SL_IID_ENGINE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ENGINE"); + SL_IID_BUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_BUFFERQUEUE"); + SL_IID_ANDROIDCONFIGURATION_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDCONFIGURATION"); + SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDSIMPLEBUFFERQUEUE"); + SL_IID_RECORD_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_RECORD"); + + if (!f_slCreateEngine || + !SL_IID_ENGINE_ || + !SL_IID_BUFFERQUEUE_ || + !SL_IID_ANDROIDCONFIGURATION_ || + !SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ || + !SL_IID_RECORD_) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, + " failed to find OpenSLES function"); + return -1; + } + // Set up OpenSL engine. - OPENSL_RETURN_ON_FAILURE(slCreateEngine(&sles_engine_, 1, kOption, 0, + OPENSL_RETURN_ON_FAILURE(f_slCreateEngine(&sles_engine_, 1, kOption, 0, NULL, NULL), -1); OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_, SL_BOOLEAN_FALSE), -1); OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_, - SL_IID_ENGINE, + SL_IID_ENGINE_, &sles_engine_itf_), -1); @@ -101,6 +129,7 @@ int32_t OpenSlesInput::Terminate() { initialized_ = false; mic_initialized_ = false; rec_initialized_ = false; + dlclose(opensles_lib_); return 0; } @@ -270,8 +299,12 @@ void OpenSlesInput::UpdateRecordingDelay() { } void OpenSlesInput::UpdateSampleRate() { +#if !defined(WEBRTC_GONK) rec_sampling_rate_ = audio_manager_.low_latency_supported() ? audio_manager_.native_output_sample_rate() : kDefaultSampleRate; +#else + rec_sampling_rate_ = kDefaultSampleRate; +#endif } void OpenSlesInput::CalculateNumFifoBuffersNeeded() { @@ -345,7 +378,7 @@ bool OpenSlesInput::CreateAudioRecorder() { // Note the interfaces still need to be initialized. This only tells OpenSl // that the interfaces will be needed at some point. const SLInterfaceID id[kNumInterfaces] = { - SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION }; + SL_IID_ANDROIDSIMPLEBUFFERQUEUE_, SL_IID_ANDROIDCONFIGURATION_ }; const SLboolean req[kNumInterfaces] = { SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE }; OPENSL_RETURN_ON_FAILURE( @@ -363,13 +396,13 @@ bool OpenSlesInput::CreateAudioRecorder() { SL_BOOLEAN_FALSE), false); OPENSL_RETURN_ON_FAILURE( - (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD, + (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD_, static_cast(&sles_recorder_itf_)), false); OPENSL_RETURN_ON_FAILURE( (*sles_recorder_)->GetInterface( sles_recorder_, - SL_IID_ANDROIDSIMPLEBUFFERQUEUE, + SL_IID_ANDROIDSIMPLEBUFFERQUEUE_, static_cast(&sles_recorder_sbq_itf_)), false); return true; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h index ca8a6f0886d..e718c511ad7 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h @@ -15,7 +15,9 @@ #include #include +#if !defined(WEBRTC_GONK) #include "webrtc/modules/audio_device/android/audio_manager_jni.h" +#endif #include "webrtc/modules/audio_device/android/low_latency_event.h" #include "webrtc/modules/audio_device/android/opensles_common.h" #include "webrtc/modules/audio_device/include/audio_device.h" @@ -165,8 +167,10 @@ class OpenSlesInput { // Thread-compatible. bool CbThreadImpl(); +#if !defined(WEBRTC_GONK) // Java API handle AudioManagerJni audio_manager_; +#endif int id_; webrtc_opensl::PlayoutDelayProvider* delay_provider_; @@ -212,6 +216,21 @@ class OpenSlesInput { // Audio status uint16_t recording_delay_; + + // dlopen for OpenSLES + void *opensles_lib_; + typedef SLresult (*slCreateEngine_t)(SLObjectItf *, + SLuint32, + const SLEngineOption *, + SLuint32, + const SLInterfaceID *, + const SLboolean *); + slCreateEngine_t f_slCreateEngine; + SLInterfaceID SL_IID_ENGINE_; + SLInterfaceID SL_IID_BUFFERQUEUE_; + SLInterfaceID SL_IID_ANDROIDCONFIGURATION_; + SLInterfaceID SL_IID_ANDROIDSIMPLEBUFFERQUEUE_; + SLInterfaceID SL_IID_RECORD_; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.cc index 882440e4fb4..84bff81060b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.cc @@ -11,6 +11,7 @@ #include "webrtc/modules/audio_device/android/opensles_output.h" #include +#include #include "webrtc/modules/audio_device/android/fine_audio_buffer.h" #include "webrtc/modules/audio_device/android/single_rw_fifo.h" @@ -65,7 +66,8 @@ OpenSlesOutput::OpenSlesOutput(const int32_t id) speaker_sampling_rate_(kDefaultSampleRate), buffer_size_samples_(0), buffer_size_bytes_(0), - playout_delay_(0) { + playout_delay_(0), + opensles_lib_(NULL) { } OpenSlesOutput::~OpenSlesOutput() { @@ -74,15 +76,43 @@ OpenSlesOutput::~OpenSlesOutput() { int32_t OpenSlesOutput::Init() { assert(!initialized_); + /* Try to dynamically open the OpenSLES library */ + opensles_lib_ = dlopen("libOpenSLES.so", RTLD_LAZY); + if (!opensles_lib_) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, + " failed to dlopen OpenSLES library"); + return -1; + } + + f_slCreateEngine = (slCreateEngine_t)dlsym(opensles_lib_, "slCreateEngine"); + SL_IID_ENGINE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ENGINE"); + SL_IID_BUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_BUFFERQUEUE"); + SL_IID_ANDROIDCONFIGURATION_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDCONFIGURATION"); + SL_IID_PLAY_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_PLAY"); + SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDSIMPLEBUFFERQUEUE"); + SL_IID_VOLUME_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_VOLUME"); + + if (!f_slCreateEngine || + !SL_IID_ENGINE_ || + !SL_IID_BUFFERQUEUE_ || + !SL_IID_ANDROIDCONFIGURATION_ || + !SL_IID_PLAY_ || + !SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ || + !SL_IID_VOLUME_) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, + " failed to find OpenSLES function"); + return -1; + } + // Set up OpenSl engine. - OPENSL_RETURN_ON_FAILURE(slCreateEngine(&sles_engine_, 1, kOption, 0, + OPENSL_RETURN_ON_FAILURE(f_slCreateEngine(&sles_engine_, 1, kOption, 0, NULL, NULL), -1); OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_, SL_BOOLEAN_FALSE), -1); OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_, - SL_IID_ENGINE, + SL_IID_ENGINE_, &sles_engine_itf_), -1); // Set up OpenSl output mix. @@ -114,6 +144,7 @@ int32_t OpenSlesOutput::Terminate() { initialized_ = false; speaker_initialized_ = false; play_initialized_ = false; + dlclose(opensles_lib_); return 0; } @@ -302,6 +333,7 @@ void OpenSlesOutput::UpdatePlayoutDelay() { } bool OpenSlesOutput::SetLowLatency() { +#if !defined(WEBRTC_GONK) if (!audio_manager_.low_latency_supported()) { return false; } @@ -310,6 +342,9 @@ bool OpenSlesOutput::SetLowLatency() { speaker_sampling_rate_ = audio_manager_.native_output_sample_rate(); assert(speaker_sampling_rate_ > 0); return true; +#else + return false; +#endif } void OpenSlesOutput::CalculateNumFifoBuffersNeeded() { @@ -395,7 +430,7 @@ bool OpenSlesOutput::CreateAudioPlayer() { // Note the interfaces still need to be initialized. This only tells OpenSl // that the interfaces will be needed at some point. SLInterfaceID ids[kNumInterfaces] = { - SL_IID_BUFFERQUEUE, SL_IID_VOLUME, SL_IID_ANDROIDCONFIGURATION }; + SL_IID_BUFFERQUEUE_, SL_IID_VOLUME_, SL_IID_ANDROIDCONFIGURATION_ }; SLboolean req[kNumInterfaces] = { SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE }; OPENSL_RETURN_ON_FAILURE( @@ -408,11 +443,11 @@ bool OpenSlesOutput::CreateAudioPlayer() { SL_BOOLEAN_FALSE), false); OPENSL_RETURN_ON_FAILURE( - (*sles_player_)->GetInterface(sles_player_, SL_IID_PLAY, + (*sles_player_)->GetInterface(sles_player_, SL_IID_PLAY_, &sles_player_itf_), false); OPENSL_RETURN_ON_FAILURE( - (*sles_player_)->GetInterface(sles_player_, SL_IID_BUFFERQUEUE, + (*sles_player_)->GetInterface(sles_player_, SL_IID_BUFFERQUEUE_, &sles_player_sbq_itf_), false); return true; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.h index bf20cf6487f..0fe9a454b35 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_output.h @@ -15,7 +15,9 @@ #include #include +#if !defined(WEBRTC_GONK) #include "webrtc/modules/audio_device/android/audio_manager_jni.h" +#endif #include "webrtc/modules/audio_device/android/low_latency_event.h" #include "webrtc/modules/audio_device/android/opensles_common.h" #include "webrtc/modules/audio_device/include/audio_device_defines.h" @@ -181,8 +183,10 @@ class OpenSlesOutput : public webrtc_opensl::PlayoutDelayProvider { // Thread-compatible. bool CbThreadImpl(); +#if !defined(WEBRTC_GONK) // Java API handle AudioManagerJni audio_manager_; +#endif int id_; bool initialized_; @@ -229,6 +233,22 @@ class OpenSlesOutput : public webrtc_opensl::PlayoutDelayProvider { // Audio status uint16_t playout_delay_; + + // dlopen for OpenSLES + void *opensles_lib_; + typedef SLresult (*slCreateEngine_t)(SLObjectItf *, + SLuint32, + const SLEngineOption *, + SLuint32, + const SLInterfaceID *, + const SLboolean *); + slCreateEngine_t f_slCreateEngine; + SLInterfaceID SL_IID_ENGINE_; + SLInterfaceID SL_IID_BUFFERQUEUE_; + SLInterfaceID SL_IID_ANDROIDCONFIGURATION_; + SLInterfaceID SL_IID_PLAY_; + SLInterfaceID SL_IID_ANDROIDSIMPLEBUFFERQUEUE_; + SLInterfaceID SL_IID_VOLUME_; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi index 3df12eb76b4..9a24f931fdf 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi @@ -46,11 +46,19 @@ 'dummy/audio_device_utility_dummy.h', ], 'conditions': [ - ['OS=="linux"', { + ['build_with_mozilla==1', { + 'include_dirs': [ + '$(DIST)/include', + ], + 'cflags_mozilla': [ + '$(NSPR_CFLAGS)', + ], + }], + ['OS=="linux" or include_alsa_audio==1 or include_pulse_audio==1', { 'include_dirs': [ 'linux', ], - }], # OS==linux + }], # OS=="linux" or include_alsa_audio==1 or include_pulse_audio==1 ['OS=="ios"', { 'include_dirs': [ 'ios', @@ -68,9 +76,22 @@ }], ['OS=="android"', { 'include_dirs': [ + '$(topsrcdir)/widget/android', 'android', ], }], # OS==android + ['moz_widget_toolkit_gonk==1', { + 'include_dirs': [ + '$(ANDROID_SOURCE)/frameworks/wilhelm/include', + '$(ANDROID_SOURCE)/system/media/wilhelm/include', + 'android', + ], + }], # moz_widget_toolkit_gonk==1 + ['enable_android_opensl==1', { + 'include_dirs': [ + 'opensl', + ], + }], # enable_android_opensl ['include_internal_audio_device==0', { 'defines': [ 'WEBRTC_DUMMY_AUDIO_BUILD', @@ -78,14 +99,8 @@ }], ['include_internal_audio_device==1', { 'sources': [ - 'linux/alsasymboltable_linux.cc', - 'linux/alsasymboltable_linux.h', - 'linux/audio_device_alsa_linux.cc', - 'linux/audio_device_alsa_linux.h', 'linux/audio_device_utility_linux.cc', 'linux/audio_device_utility_linux.h', - 'linux/audio_mixer_manager_alsa_linux.cc', - 'linux/audio_mixer_manager_alsa_linux.h', 'linux/latebindingsymboltable_linux.cc', 'linux/latebindingsymboltable_linux.h', 'ios/audio_device_ios.cc', @@ -109,11 +124,17 @@ 'win/audio_device_utility_win.h', 'win/audio_mixer_manager_win.cc', 'win/audio_mixer_manager_win.h', - 'android/audio_device_utility_android.cc', - 'android/audio_device_utility_android.h', ], 'conditions': [ ['OS=="android"', { + 'sources': [ + 'opensl/audio_manager_jni.cc', + 'opensl/audio_manager_jni.h', + 'android/audio_device_jni_android.cc', + 'android/audio_device_jni_android.h', + ], + }], + ['OS=="android" or moz_widget_toolkit_gonk==1', { 'link_settings': { 'libraries': [ '-llog', @@ -123,54 +144,70 @@ 'conditions': [ ['enable_android_opensl==1', { 'sources': [ - 'android/audio_device_opensles_android.cc', - 'android/audio_device_opensles_android.h', - 'android/audio_manager_jni.cc', - 'android/audio_manager_jni.h', - 'android/fine_audio_buffer.cc', - 'android/fine_audio_buffer.h', - 'android/low_latency_event_posix.cc', - 'android/low_latency_event.h', - 'android/opensles_common.cc', - 'android/opensles_common.h', - 'android/opensles_input.cc', - 'android/opensles_input.h', - 'android/opensles_output.cc', - 'android/opensles_output.h', - 'android/single_rw_fifo.cc', - 'android/single_rw_fifo.h', + 'opensl/audio_device_opensles.cc', + 'opensl/audio_device_opensles.h', + 'opensl/fine_audio_buffer.cc', + 'opensl/fine_audio_buffer.h', + 'opensl/low_latency_event_posix.cc', + 'opensl/low_latency_event.h', + 'opensl/opensles_common.cc', + 'opensl/opensles_common.h', + 'opensl/opensles_input.cc', + 'opensl/opensles_input.h', + 'opensl/opensles_output.cc', + 'opensl/opensles_output.h', + 'opensl/single_rw_fifo.cc', + 'opensl/single_rw_fifo.h', + 'shared/audio_device_utility_shared.cc', + 'shared/audio_device_utility_shared.h', ], }, { 'sources': [ - 'android/audio_device_jni_android.cc', - 'android/audio_device_jni_android.h', + 'shared/audio_device_utility_shared.cc', + 'shared/audio_device_utility_shared.h', + 'android/audio_device_jni_android.cc', + 'android/audio_device_jni_android.h', ], }], ], }], ['OS=="linux"', { - 'defines': [ - 'LINUX_ALSA', - ], 'link_settings': { 'libraries': [ '-ldl','-lX11', ], }, - 'conditions': [ - ['include_pulse_audio==1', { - 'defines': [ - 'LINUX_PULSE', - ], - 'sources': [ - 'linux/audio_device_pulse_linux.cc', - 'linux/audio_device_pulse_linux.h', - 'linux/audio_mixer_manager_pulse_linux.cc', - 'linux/audio_mixer_manager_pulse_linux.h', - 'linux/pulseaudiosymboltable_linux.cc', - 'linux/pulseaudiosymboltable_linux.h', - ], - }], + }], + ['include_alsa_audio==1', { + 'cflags_mozilla': [ + '$(MOZ_ALSA_CFLAGS)', + ], + 'defines': [ + 'LINUX_ALSA', + ], + 'sources': [ + 'linux/alsasymboltable_linux.cc', + 'linux/alsasymboltable_linux.h', + 'linux/audio_device_alsa_linux.cc', + 'linux/audio_device_alsa_linux.h', + 'linux/audio_mixer_manager_alsa_linux.cc', + 'linux/audio_mixer_manager_alsa_linux.h', + ], + }], + ['include_pulse_audio==1', { + 'cflags_mozilla': [ + '$(MOZ_PULSEAUDIO_CFLAGS)', + ], + 'defines': [ + 'LINUX_PULSE', + ], + 'sources': [ + 'linux/audio_device_pulse_linux.cc', + 'linux/audio_device_pulse_linux.h', + 'linux/audio_mixer_manager_pulse_linux.cc', + 'linux/audio_mixer_manager_pulse_linux.h', + 'linux/pulseaudiosymboltable_linux.cc', + 'linux/pulseaudiosymboltable_linux.h', ], }], ['OS=="mac" or OS=="ios"', { @@ -279,4 +316,3 @@ }], # include_tests ], } - diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc index 9acaf80480a..0e7543c558d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc @@ -16,26 +16,34 @@ #include #include -#if defined(_WIN32) +#if defined(WEBRTC_DUMMY_AUDIO_BUILD) +// do not include platform specific headers +#elif defined(_WIN32) #include "audio_device_utility_win.h" #include "audio_device_wave_win.h" #if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD) #include "audio_device_core_win.h" #endif #elif defined(WEBRTC_ANDROID_OPENSLES) +// ANDROID and GONK #include + #include #include "audio_device_utility_android.h" #include "audio_device_opensles_android.h" +#if !defined(WEBRTC_GONK) + #include "audio_device_jni_android.h" +#endif #elif defined(WEBRTC_ANDROID) +// GONK only supports opensles; android can use that or jni #include #include "audio_device_utility_android.h" #include "audio_device_jni_android.h" -#elif defined(WEBRTC_LINUX) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) #include "audio_device_utility_linux.h" - #if defined(LINUX_ALSA) +#if defined(LINUX_ALSA) #include "audio_device_alsa_linux.h" - #endif - #if defined(LINUX_PULSE) +#endif +#if defined(LINUX_PULSE) #include "audio_device_pulse_linux.h" #endif #elif defined(WEBRTC_IOS) @@ -159,7 +167,7 @@ int32_t AudioDeviceModuleImpl::CheckPlatform() #elif defined(WEBRTC_ANDROID) platform = kPlatformAndroid; WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is ANDROID"); -#elif defined(WEBRTC_LINUX) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) platform = kPlatformLinux; WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is LINUX"); #elif defined(WEBRTC_IOS) @@ -259,41 +267,41 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() // Create the *Android OpenSLES* implementation of the Audio Device // #if defined(WEBRTC_ANDROID_OPENSLES) - if (audioLayer == kPlatformDefaultAudio) - { - // Create *Android OpenELSE Audio* implementation - ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id()); - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, - "Android OpenSLES Audio APIs will be utilized"); + // Check if the OpenSLES library is available before going further. + void* opensles_lib = dlopen("libOpenSLES.so", RTLD_LAZY); + if (opensles_lib) { + // That worked, close for now and proceed normally. + dlclose(opensles_lib); + if (audioLayer == kPlatformDefaultAudio) + { + // Create *Android OpenSLES Audio* implementation + ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id()); + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + "Android OpenSLES Audio APIs will be utilized"); + } + } + +#if !defined(WEBRTC_GONK) + // Fall back to this case if on Android 2.2/OpenSLES not available. + if (ptrAudioDevice == NULL) { + // Create the *Android Java* implementation of the Audio Device + if (audioLayer == kPlatformDefaultAudio) + { + // Create *Android JNI Audio* implementation + ptrAudioDevice = new AudioDeviceAndroidJni(Id()); + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized"); + } } if (ptrAudioDevice != NULL) { - // Create the Android implementation of the Device Utility. - ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id()); + // Create the Android implementation of the Device Utility. + ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id()); } - // END #if defined(WEBRTC_ANDROID_OPENSLES) - - // Create the *Android Java* implementation of the Audio Device - // -#elif defined(WEBRTC_ANDROID) - if (audioLayer == kPlatformDefaultAudio) - { - // Create *Android JNI Audio* implementation - ptrAudioDevice = new AudioDeviceAndroidJni(Id()); - WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized"); - } - - if (ptrAudioDevice != NULL) - { - // Create the Android implementation of the Device Utility. - ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id()); - } - // END #if defined(WEBRTC_ANDROID) - +#endif // Create the *Linux* implementation of the Audio Device // -#elif defined(WEBRTC_LINUX) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) if ((audioLayer == kLinuxPulseAudio) || (audioLayer == kPlatformDefaultAudio)) { #if defined(LINUX_PULSE) @@ -339,7 +347,7 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() // ptrAudioDeviceUtility = new AudioDeviceUtilityLinux(Id()); } -#endif // #if defined(WEBRTC_LINUX) +#endif // #if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) // Create the *iPhone* implementation of the Audio Device // diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_utility.cc b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_utility.cc index b6c5c482536..6037a153a82 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_utility.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_utility.cc @@ -46,7 +46,7 @@ bool AudioDeviceUtility::StringCompare( } // namespace webrtc -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) // ============================================================================ // Linux & Mac @@ -109,4 +109,4 @@ bool AudioDeviceUtility::StringCompare( } // namespace webrtc -#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/ios/audio_device_ios.cc b/media/webrtc/trunk/webrtc/modules/audio_device/ios/audio_device_ios.cc index bad3915f14f..34b335c1857 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/ios/audio_device_ios.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/ios/audio_device_ios.cc @@ -1332,7 +1332,7 @@ int32_t AudioDeviceIPhone::InitPlayOrRecord() { // todo: Add 48 kHz (increase buffer sizes). Other fs? if ((playoutDesc.mSampleRate > 44090.0) && (playoutDesc.mSampleRate < 44110.0)) { - _adbSampFreq = 44000; + _adbSampFreq = 44100; } else if ((playoutDesc.mSampleRate > 15990.0) && (playoutDesc.mSampleRate < 16010.0)) { _adbSampFreq = 16000; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/ios/audio_device_ios.h b/media/webrtc/trunk/webrtc/modules/audio_device/ios/audio_device_ios.h index fdaf94d6a60..5a0b364db1a 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/ios/audio_device_ios.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/ios/audio_device_ios.h @@ -19,8 +19,8 @@ namespace webrtc { class ThreadWrapper; -const uint32_t N_REC_SAMPLES_PER_SEC = 44000; -const uint32_t N_PLAY_SAMPLES_PER_SEC = 44000; +const uint32_t N_REC_SAMPLES_PER_SEC = 44100; +const uint32_t N_PLAY_SAMPLES_PER_SEC = 44100; const uint32_t N_REC_CHANNELS = 1; // default is mono recording const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc index 8181fe5428d..890b25aab2b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc @@ -19,6 +19,13 @@ #include "webrtc/system_wrappers/interface/thread_wrapper.h" #include "webrtc/system_wrappers/interface/trace.h" +#include "Latency.h" + +#define LOG_FIRST_CAPTURE(x) LogTime(AsyncLatencyLogger::AudioCaptureBase, \ + reinterpret_cast(x), 0) +#define LOG_CAPTURE_FRAMES(x, frames) LogLatency(AsyncLatencyLogger::AudioCapture, \ + reinterpret_cast(x), frames) + webrtc_adm_linux_alsa::AlsaSymbolTable AlsaSymbolTable; // Accesses ALSA functions through our late-binding symbol table instead of @@ -96,6 +103,7 @@ AudioDeviceLinuxALSA::AudioDeviceLinuxALSA(const int32_t id) : _playBufType(AudioDeviceModule::kFixedBufferSize), _initialized(false), _recording(false), + _firstRecord(true), _playing(false), _recIsInitialized(false), _playIsInitialized(false), @@ -986,7 +994,8 @@ int32_t AudioDeviceLinuxALSA::RecordingDeviceName( memset(guid, 0, kAdmMaxGuidSize); } - return GetDevicesInfo(1, false, index, name, kAdmMaxDeviceNameSize); + return GetDevicesInfo(1, false, index, name, kAdmMaxDeviceNameSize, + guid, kAdmMaxGuidSize); } int16_t AudioDeviceLinuxALSA::RecordingDevices() @@ -1448,6 +1457,7 @@ int32_t AudioDeviceLinuxALSA::StartRecording() } // RECORDING const char* threadName = "webrtc_audio_module_capture_thread"; + _firstRecord = true; _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this, kRealtimePriority, @@ -1634,6 +1644,17 @@ int32_t AudioDeviceLinuxALSA::StartPlayout() return -1; } + int errVal = LATE(snd_pcm_prepare)(_handlePlayout); + if (errVal < 0) + { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, + " playout snd_pcm_prepare failed (%s)\n", + LATE(snd_strerror)(errVal)); + // just log error + // if snd_pcm_open fails will return -1 + } + + unsigned int threadID(0); if (!_ptrThreadPlay->Start(threadID)) { @@ -1648,16 +1669,6 @@ int32_t AudioDeviceLinuxALSA::StartPlayout() } _playThreadID = threadID; - int errVal = LATE(snd_pcm_prepare)(_handlePlayout); - if (errVal < 0) - { - WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, - " playout snd_pcm_prepare failed (%s)\n", - LATE(snd_strerror)(errVal)); - // just log error - // if snd_pcm_open fails will return -1 - } - return 0; } @@ -1829,7 +1840,9 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo( const bool playback, const int32_t enumDeviceNo, char* enumDeviceName, - const int32_t ednLen) const + const int32_t ednLen, + char* enumDeviceId, + const int32_t ediLen) const { // Device enumeration based on libjingle implementation @@ -1868,6 +1881,8 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo( function == FUNC_GET_DEVICE_NAME_FOR_AN_ENUM) && enumDeviceNo == 0) { strcpy(enumDeviceName, "default"); + if (enumDeviceId) + memset(enumDeviceId, 0, ediLen); err = LATE(snd_device_name_free_hint)(hints); if (err != 0) @@ -1930,6 +1945,11 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo( // We have found the enum device, copy the name to buffer. strncpy(enumDeviceName, desc, ednLen); enumDeviceName[ednLen-1] = '\0'; + if (enumDeviceId) + { + strncpy(enumDeviceId, name, ediLen); + enumDeviceId[ediLen-1] = '\0'; + } keepSearching = false; // Replace '\n' with '-'. char * pret = strchr(enumDeviceName, '\n'/*0xa*/); //LF @@ -1942,6 +1962,11 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo( // We have found the enum device, copy the name to buffer. strncpy(enumDeviceName, name, ednLen); enumDeviceName[ednLen-1] = '\0'; + if (enumDeviceId) + { + strncpy(enumDeviceId, name, ediLen); + enumDeviceId[ediLen-1] = '\0'; + } keepSearching = false; } @@ -1966,7 +1991,7 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo( LATE(snd_strerror)(err)); // Continue and return true anyway, since we did get the whole list. } - } + } if (FUNC_GET_NUM_OF_DEVICE == function) { @@ -2251,6 +2276,11 @@ bool AudioDeviceLinuxALSA::RecThreadProcess() { // buf is full _recordingFramesLeft = _recordingFramesIn10MS; + if (_firstRecord) { + LOG_FIRST_CAPTURE(this); + _firstRecord = false; + } + LOG_CAPTURE_FRAMES(this, _recordingFramesIn10MS); // store the recorded buffer (no action will be taken if the // #recorded samples is not a full buffer) _ptrAudioBuffer->SetRecordedBuffer(_recordingBuffer, diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h index 35abc152fc8..fdfb2094d94 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h @@ -167,7 +167,9 @@ private: const bool playback, const int32_t enumDeviceNo = 0, char* enumDeviceName = NULL, - const int32_t ednLen = 0) const; + const int32_t ednLen = 0, + char* enumDeviceID = NULL, + const int32_t ediLen = 0) const; int32_t ErrorRecovery(int32_t error, snd_pcm_t* deviceHandle); private: @@ -233,6 +235,7 @@ private: private: bool _initialized; bool _recording; + bool _firstRecord; bool _playing; bool _recIsInitialized; bool _playIsInitialized; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc b/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc index d6a216216ce..079713ef4f2 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc @@ -27,7 +27,7 @@ #include "webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h" -#ifdef WEBRTC_LINUX +#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) #include #endif @@ -37,8 +37,8 @@ using namespace webrtc; namespace webrtc_adm_linux { inline static const char *GetDllError() { -#ifdef WEBRTC_LINUX - char *err = dlerror(); +#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) + const char *err = dlerror(); if (err) { return err; } else { @@ -50,7 +50,7 @@ inline static const char *GetDllError() { } DllHandle InternalLoadDll(const char dll_name[]) { -#ifdef WEBRTC_LINUX +#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) DllHandle handle = dlopen(dll_name, RTLD_NOW); #else #error Not implemented @@ -63,7 +63,7 @@ DllHandle InternalLoadDll(const char dll_name[]) { } void InternalUnloadDll(DllHandle handle) { -#ifdef WEBRTC_LINUX +#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) if (dlclose(handle) != 0) { WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, "%s", GetDllError()); @@ -76,9 +76,9 @@ void InternalUnloadDll(DllHandle handle) { static bool LoadSymbol(DllHandle handle, const char *symbol_name, void **symbol) { -#ifdef WEBRTC_LINUX +#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) *symbol = dlsym(handle, symbol_name); - char *err = dlerror(); + const char *err = dlerror(); if (err) { WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, "Error loading symbol %s : %d", symbol_name, err); @@ -101,7 +101,7 @@ bool InternalLoadSymbols(DllHandle handle, int num_symbols, const char *const symbol_names[], void *symbols[]) { -#ifdef WEBRTC_LINUX +#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) // Clear any old errors. dlerror(); #endif diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h b/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h index b5186fa7bf2..28116634877 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h @@ -42,7 +42,7 @@ namespace webrtc_adm_linux { -#ifdef WEBRTC_LINUX +#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) typedef void *DllHandle; const DllHandle kInvalidDllHandle = NULL; diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/linux/pulseaudiosymboltable_linux.cc b/media/webrtc/trunk/webrtc/modules/audio_device/linux/pulseaudiosymboltable_linux.cc index 1f1890a6d96..1fe2cf7511f 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/pulseaudiosymboltable_linux.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/pulseaudiosymboltable_linux.cc @@ -29,7 +29,11 @@ namespace webrtc_adm_linux_pulse { +#if defined(__OpenBSD__) || defined(WEBRTC_GONK) +LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(PulseAudioSymbolTable, "libpulse.so") +#else LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(PulseAudioSymbolTable, "libpulse.so.0") +#endif #define X(sym) \ LATE_BINDING_SYMBOL_TABLE_DEFINE_ENTRY(PulseAudioSymbolTable, sym) PULSE_AUDIO_SYMBOLS_LIST diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_device_opensles.cc b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_device_opensles.cc new file mode 100644 index 00000000000..f7ce95b17d0 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_device_opensles.cc @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/audio_device_opensles_android.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_device_opensles.h b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_device_opensles.h new file mode 100644 index 00000000000..480149e86de --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_device_opensles.h @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/audio_device_opensles_android.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_manager_jni.cc b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_manager_jni.cc new file mode 100644 index 00000000000..10168c2a117 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_manager_jni.cc @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/audio_manager_jni.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_manager_jni.h b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_manager_jni.h new file mode 100644 index 00000000000..961fa23cb04 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/audio_manager_jni.h @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/audio_manager_jni.h" + diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/fine_audio_buffer.cc b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/fine_audio_buffer.cc new file mode 100644 index 00000000000..31351f4e4ca --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/fine_audio_buffer.cc @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/fine_audio_buffer.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/fine_audio_buffer.h b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/fine_audio_buffer.h new file mode 100644 index 00000000000..2512b71ee34 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/fine_audio_buffer.h @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/fine_audio_buffer.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/low_latency_event_posix.cc b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/low_latency_event_posix.cc new file mode 100644 index 00000000000..b5295665216 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/low_latency_event_posix.cc @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/low_latency_event_posix.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/low_latency_event_posix.h b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/low_latency_event_posix.h new file mode 100644 index 00000000000..21e4e87ed03 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/low_latency_event_posix.h @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/low_latency_event_posix.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_common.cc b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_common.cc new file mode 100644 index 00000000000..ea7a4f92be1 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_common.cc @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/opensles_common.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_common.h b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_common.h new file mode 100644 index 00000000000..33b8e5c43f1 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_common.h @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/opensles_common.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_input.cc b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_input.cc new file mode 100644 index 00000000000..48ae8813aae --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_input.cc @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/opensles_input.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_input.h b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_input.h new file mode 100644 index 00000000000..f42cdea837e --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_input.h @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/opensles_input.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_output.cc b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_output.cc new file mode 100644 index 00000000000..270feb7b533 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_output.cc @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/opensles_output.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_output.h b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_output.h new file mode 100644 index 00000000000..d8c426eed2b --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/opensles_output.h @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/opensles_output.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/single_rw_fifo.cc b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/single_rw_fifo.cc new file mode 100644 index 00000000000..c3927ff0c92 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/single_rw_fifo.cc @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/single_rw_fifo.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/opensl/single_rw_fifo.h b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/single_rw_fifo.h new file mode 100644 index 00000000000..378be4dad03 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/opensl/single_rw_fifo.h @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/single_rw_fifo.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.cc b/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.cc new file mode 100644 index 00000000000..c1f56f199d6 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.cc @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/audio_device_jni_android.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.h b/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.h new file mode 100644 index 00000000000..88184b22176 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_jni_android.h @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/audio_device_jni_android.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_utility_shared.cc b/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_utility_shared.cc new file mode 100644 index 00000000000..ea40bbfe6cb --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_utility_shared.cc @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/audio_device_utility_android.cc" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_utility_shared.h b/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_utility_shared.h new file mode 100644 index 00000000000..38eeebee7e9 --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_device/shared/audio_device_utility_shared.h @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "../android/audio_device_utility_android.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/test/audio_device_test_api.cc b/media/webrtc/trunk/webrtc/modules/audio_device/test/audio_device_test_api.cc index fb25cddeed3..f1dc86a1b0e 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/test/audio_device_test_api.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/test/audio_device_test_api.cc @@ -210,7 +210,7 @@ class AudioDeviceAPITest: public testing::Test { // Create default implementation instance EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create( kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL); -#elif defined(WEBRTC_LINUX) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create( kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL); EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create( @@ -1703,7 +1703,7 @@ TEST_F(AudioDeviceAPITest, CPULoad) { // TODO(kjellander): Fix flakiness causing failures on Windows. // TODO(phoglund): Fix flakiness causing failures on Linux. -#if !defined(_WIN32) && !defined(WEBRTC_LINUX) +#if !defined(_WIN32) && !defined(WEBRTC_LINUX) && !defined(WEBRTC_BSD) TEST_F(AudioDeviceAPITest, StartAndStopRawOutputFileRecording) { // NOTE: this API is better tested in a functional test CheckInitialPlayoutStates(); @@ -1772,7 +1772,7 @@ TEST_F(AudioDeviceAPITest, StartAndStopRawInputFileRecording) { // - size of raw_input_not_recording.pcm shall be 0 // - size of raw_input_not_recording.pcm shall be > 0 } -#endif // !WIN32 && !WEBRTC_LINUX +#endif // !WIN32 && !WEBRTC_LINUX && !defined(WEBRTC_BSD) TEST_F(AudioDeviceAPITest, RecordingSampleRate) { uint32_t sampleRate(0); @@ -1783,10 +1783,10 @@ TEST_F(AudioDeviceAPITest, RecordingSampleRate) { EXPECT_EQ(48000, sampleRate); #elif defined(ANDROID) TEST_LOG("Recording sample rate is %u\n\n", sampleRate); - EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000)); + EXPECT_TRUE((sampleRate == 44100) || (sampleRate == 16000)); #elif defined(WEBRTC_IOS) TEST_LOG("Recording sample rate is %u\n\n", sampleRate); - EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000) || + EXPECT_TRUE((sampleRate == 44100) || (sampleRate == 16000) || (sampleRate == 8000)); #endif @@ -1802,10 +1802,10 @@ TEST_F(AudioDeviceAPITest, PlayoutSampleRate) { EXPECT_EQ(48000, sampleRate); #elif defined(ANDROID) TEST_LOG("Playout sample rate is %u\n\n", sampleRate); - EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000)); + EXPECT_TRUE((sampleRate == 44100) || (sampleRate == 16000)); #elif defined(WEBRTC_IOS) TEST_LOG("Playout sample rate is %u\n\n", sampleRate); - EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000) || + EXPECT_TRUE((sampleRate == 44100) || (sampleRate == 16000) || (sampleRate == 8000)); #endif } diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.cc b/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.cc index 12a4bad4916..8bc462d3c13 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/test/func_test_manager.cc @@ -348,12 +348,6 @@ int32_t AudioTransportImpl::NeedMorePlayData( int32_t fsInHz(samplesPerSecIn); int32_t fsOutHz(samplesPerSec); - if (fsInHz == 44100) - fsInHz = 44000; - - if (fsOutHz == 44100) - fsOutHz = 44000; - if (nChannelsIn == 2 && nBytesPerSampleIn == 4) { // input is stereo => we will resample in stereo @@ -1258,7 +1252,7 @@ int32_t FuncTestManager::TestAudioTransport() if (samplesPerSec == 48000) { _audioTransport->SetFilePlayout( true, GetResource(_playoutFile48.c_str())); - } else if (samplesPerSec == 44100 || samplesPerSec == 44000) { + } else if (samplesPerSec == 44100) { _audioTransport->SetFilePlayout( true, GetResource(_playoutFile44.c_str())); } else if (samplesPerSec == 16000) { @@ -1491,7 +1485,7 @@ int32_t FuncTestManager::TestSpeakerVolume() if (48000 == samplesPerSec) { _audioTransport->SetFilePlayout( true, GetResource(_playoutFile48.c_str())); - } else if (44100 == samplesPerSec || samplesPerSec == 44000) { + } else if (44100 == samplesPerSec) { _audioTransport->SetFilePlayout( true, GetResource(_playoutFile44.c_str())); } else if (samplesPerSec == 16000) { @@ -1592,7 +1586,7 @@ int32_t FuncTestManager::TestSpeakerMute() EXPECT_EQ(0, audioDevice->PlayoutSampleRate(&samplesPerSec)); if (48000 == samplesPerSec) _audioTransport->SetFilePlayout(true, _playoutFile48.c_str()); - else if (44100 == samplesPerSec || 44000 == samplesPerSec) + else if (44100 == samplesPerSec) _audioTransport->SetFilePlayout(true, _playoutFile44.c_str()); else { diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.c index d194c82694c..40e9f67d0dd 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.c @@ -109,7 +109,17 @@ const float WebRtcAec_overDriveCurve[65] = { // Target suppression levels for nlp modes. // log{0.001, 0.00001, 0.00000001} static const float kTargetSupp[3] = { -6.9f, -11.5f, -18.4f }; -static const float kMinOverDrive[3] = { 1.0f, 2.0f, 5.0f }; + +// Two sets of parameters, one for the extended filter mode. +static const float kExtendedMinOverDrive[3] = { 3.0f, 6.0f, 15.0f }; +static const float kNormalMinOverDrive[3] = { 1.0f, 2.0f, 5.0f }; +static const float kExtendedSmoothingCoefficients[2][2] = + { { 0.9f, 0.1f }, { 0.92f, 0.08f } }; +static const float kNormalSmoothingCoefficients[2][2] = + { { 0.9f, 0.1f }, { 0.93f, 0.07f } }; + +// Number of partitions forming the NLP's "preferred" bands. +enum { kPrefBandSize = 24 }; #ifdef WEBRTC_AEC_DEBUG_DUMP extern int webrtc_aec_instance_count; @@ -281,13 +291,13 @@ int WebRtcAec_FreeAec(AecCore* aec) static void FilterFar(AecCore* aec, float yf[2][PART_LEN1]) { int i; - for (i = 0; i < NR_PART; i++) { + for (i = 0; i < aec->num_partitions; i++) { int j; int xPos = (i + aec->xfBufBlockPos) * PART_LEN1; int pos = i * PART_LEN1; // Check for wrap - if (i + aec->xfBufBlockPos >= NR_PART) { - xPos -= NR_PART*(PART_LEN1); + if (i + aec->xfBufBlockPos >= aec->num_partitions) { + xPos -= aec->num_partitions*(PART_LEN1); } for (j = 0; j < PART_LEN1; j++) { @@ -301,22 +311,25 @@ static void FilterFar(AecCore* aec, float yf[2][PART_LEN1]) static void ScaleErrorSignal(AecCore* aec, float ef[2][PART_LEN1]) { + const float mu = aec->extended_filter_enabled ? kExtendedMu : aec->normal_mu; + const float error_threshold = aec->extended_filter_enabled ? + kExtendedErrorThreshold : aec->normal_error_threshold; int i; - float absEf; + float abs_ef; for (i = 0; i < (PART_LEN1); i++) { ef[0][i] /= (aec->xPow[i] + 1e-10f); ef[1][i] /= (aec->xPow[i] + 1e-10f); - absEf = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]); + abs_ef = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]); - if (absEf > aec->errThresh) { - absEf = aec->errThresh / (absEf + 1e-10f); - ef[0][i] *= absEf; - ef[1][i] *= absEf; + if (abs_ef > error_threshold) { + abs_ef = error_threshold / (abs_ef + 1e-10f); + ef[0][i] *= abs_ef; + ef[1][i] *= abs_ef; } // Stepsize factor - ef[0][i] *= aec->mu; - ef[1][i] *= aec->mu; + ef[0][i] *= mu; + ef[1][i] *= mu; } } @@ -325,35 +338,35 @@ static void ScaleErrorSignal(AecCore* aec, float ef[2][PART_LEN1]) //static void FilterAdaptationUnconstrained(AecCore* aec, float *fft, // float ef[2][PART_LEN1]) { // int i, j; -// for (i = 0; i < NR_PART; i++) { +// for (i = 0; i < aec->num_partitions; i++) { // int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1); // int pos; // // Check for wrap -// if (i + aec->xfBufBlockPos >= NR_PART) { -// xPos -= NR_PART * PART_LEN1; +// if (i + aec->xfBufBlockPos >= aec->num_partitions) { +// xPos -= aec->num_partitions * PART_LEN1; // } // // pos = i * PART_LEN1; // // for (j = 0; j < PART_LEN1; j++) { -// aec->wfBuf[pos + j][0] += MulRe(aec->xfBuf[xPos + j][0], -// -aec->xfBuf[xPos + j][1], -// ef[j][0], ef[j][1]); -// aec->wfBuf[pos + j][1] += MulIm(aec->xfBuf[xPos + j][0], -// -aec->xfBuf[xPos + j][1], -// ef[j][0], ef[j][1]); +// aec->wfBuf[0][pos + j] += MulRe(aec->xfBuf[0][xPos + j], +// -aec->xfBuf[1][xPos + j], +// ef[0][j], ef[1][j]); +// aec->wfBuf[1][pos + j] += MulIm(aec->xfBuf[0][xPos + j], +// -aec->xfBuf[1][xPos + j], +// ef[0][j], ef[1][j]); // } // } //} static void FilterAdaptation(AecCore* aec, float *fft, float ef[2][PART_LEN1]) { int i, j; - for (i = 0; i < NR_PART; i++) { + for (i = 0; i < aec->num_partitions; i++) { int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1); int pos; // Check for wrap - if (i + aec->xfBufBlockPos >= NR_PART) { - xPos -= NR_PART * PART_LEN1; + if (i + aec->xfBufBlockPos >= aec->num_partitions) { + xPos -= aec->num_partitions * PART_LEN1; } pos = i * PART_LEN1; @@ -427,12 +440,12 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) aec->sampFreq = sampFreq; if (sampFreq == 8000) { - aec->mu = 0.6f; - aec->errThresh = 2e-6f; + aec->normal_mu = 0.6f; + aec->normal_error_threshold = 2e-6f; } else { - aec->mu = 0.5f; - aec->errThresh = 1.5e-6f; + aec->normal_mu = 0.5f; + aec->normal_error_threshold = 1.5e-6f; } if (WebRtc_InitBuffer(aec->nearFrBuf) == -1) { @@ -474,6 +487,9 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) aec->delay_logging_enabled = 0; memset(aec->delay_histogram, 0, sizeof(aec->delay_histogram)); + aec->extended_filter_enabled = 0; + aec->num_partitions = kNormalNumPartitions; + // Default target suppression mode. aec->nlp_mode = 1; @@ -483,7 +499,7 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) aec->mult = (short)aec->sampFreq / 16000; } else { - aec->mult = (short)aec->sampFreq / 8000; + aec->mult = (short)aec->sampFreq / 8000; } aec->farBufWritePos = 0; @@ -514,11 +530,14 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) aec->xfBufBlockPos = 0; // TODO: Investigate need for these initializations. Deleting them doesn't // change the output at all and yields 0.4% overall speedup. - memset(aec->xfBuf, 0, sizeof(complex_t) * NR_PART * PART_LEN1); - memset(aec->wfBuf, 0, sizeof(complex_t) * NR_PART * PART_LEN1); + memset(aec->xfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * + PART_LEN1); + memset(aec->wfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * + PART_LEN1); memset(aec->sde, 0, sizeof(complex_t) * PART_LEN1); memset(aec->sxd, 0, sizeof(complex_t) * PART_LEN1); - memset(aec->xfwBuf, 0, sizeof(complex_t) * NR_PART * PART_LEN1); + memset(aec->xfwBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * + PART_LEN1); memset(aec->se, 0, sizeof(float) * PART_LEN1); // To prevent numerical instability in the first block. @@ -734,13 +753,11 @@ int WebRtcAec_GetDelayMetricsCore(AecCore* self, int* median, int* std) { } int WebRtcAec_echo_state(AecCore* self) { - assert(self != NULL); return self->echoState; } void WebRtcAec_GetEchoStats(AecCore* self, Stats* erl, Stats* erle, Stats* a_nlp) { - assert(self != NULL); assert(erl != NULL); assert(erle != NULL); assert(a_nlp != NULL); @@ -751,14 +768,12 @@ void WebRtcAec_GetEchoStats(AecCore* self, Stats* erl, Stats* erle, #ifdef WEBRTC_AEC_DEBUG_DUMP void* WebRtcAec_far_time_buf(AecCore* self) { - assert(self != NULL); return self->far_time_buf; } #endif void WebRtcAec_SetConfigCore(AecCore* self, int nlp_mode, int metrics_mode, int delay_logging) { - assert(self != NULL); assert(nlp_mode >= 0 && nlp_mode < 3); self->nlp_mode = nlp_mode; self->metricsMode = metrics_mode; @@ -771,13 +786,20 @@ void WebRtcAec_SetConfigCore(AecCore* self, int nlp_mode, int metrics_mode, } } +void WebRtcAec_enable_delay_correction(AecCore* self, int enable) { + self->extended_filter_enabled = enable; + self->num_partitions = enable ? kExtendedNumPartitions : kNormalNumPartitions; +} + +int WebRtcAec_delay_correction_enabled(AecCore* self) { + return self->extended_filter_enabled; +} + int WebRtcAec_system_delay(AecCore* self) { - assert(self != NULL); return self->system_delay; } void WebRtcAec_SetSystemDelay(AecCore* self, int delay) { - assert(self != NULL); assert(delay >= 0); self->system_delay = delay; } @@ -853,7 +875,8 @@ static void ProcessBlock(AecCore* aec) { for (i = 0; i < PART_LEN1; i++) { far_spectrum = (xf_ptr[i] * xf_ptr[i]) + (xf_ptr[PART_LEN1 + i] * xf_ptr[PART_LEN1 + i]); - aec->xPow[i] = gPow[0] * aec->xPow[i] + gPow[1] * NR_PART * far_spectrum; + aec->xPow[i] = gPow[0] * aec->xPow[i] + gPow[1] * aec->num_partitions * + far_spectrum; // Calculate absolute spectra abs_far_spectrum[i] = sqrtf(far_spectrum); @@ -913,7 +936,7 @@ static void ProcessBlock(AecCore* aec) { // Update the xfBuf block position. aec->xfBufBlockPos--; if (aec->xfBufBlockPos == -1) { - aec->xfBufBlockPos = NR_PART - 1; + aec->xfBufBlockPos = aec->num_partitions - 1; } // Buffer xf @@ -1014,18 +1037,21 @@ static void NonLinearProcessing(AecCore* aec, short *output, short *outputH) float cohde[PART_LEN1], cohxd[PART_LEN1]; float hNlDeAvg, hNlXdAvg; float hNl[PART_LEN1]; - float hNlPref[PREF_BAND_SIZE]; + float hNlPref[kPrefBandSize]; float hNlFb = 0, hNlFbLow = 0; const float prefBandQuant = 0.75f, prefBandQuantLow = 0.5f; - const int prefBandSize = PREF_BAND_SIZE / aec->mult; + const int prefBandSize = kPrefBandSize / aec->mult; const int minPrefBand = 4 / aec->mult; // Near and error power sums float sdSum = 0, seSum = 0; - // Power estimate smoothing coefficients - const float gCoh[2][2] = {{0.9f, 0.1f}, {0.93f, 0.07f}}; - const float *ptrGCoh = gCoh[aec->mult - 1]; + // Power estimate smoothing coefficients. + const float *ptrGCoh = aec->extended_filter_enabled ? + kExtendedSmoothingCoefficients[aec->mult - 1] : + kNormalSmoothingCoefficients[aec->mult - 1]; + const float* min_overdrive = aec->extended_filter_enabled ? + kExtendedMinOverDrive : kNormalMinOverDrive; // Filter energy float wfEnMax = 0, wfEn = 0; @@ -1048,7 +1074,7 @@ static void NonLinearProcessing(AecCore* aec, short *output, short *outputH) if (aec->delayEstCtr == 0) { wfEnMax = 0; aec->delayIdx = 0; - for (i = 0; i < NR_PART; i++) { + for (i = 0; i < aec->num_partitions; i++) { pos = i * PART_LEN1; wfEn = 0; for (j = 0; j < PART_LEN1; j++) { @@ -1189,7 +1215,7 @@ static void NonLinearProcessing(AecCore* aec, short *output, short *outputH) if (aec->hNlXdAvgMin == 1) { aec->echoState = 0; - aec->overDrive = kMinOverDrive[aec->nlp_mode]; + aec->overDrive = min_overdrive[aec->nlp_mode]; if (aec->stNearState == 1) { memcpy(hNl, cohde, sizeof(hNl)); @@ -1245,7 +1271,7 @@ static void NonLinearProcessing(AecCore* aec, short *output, short *outputH) aec->hNlMinCtr = 0; aec->overDrive = WEBRTC_SPL_MAX(kTargetSupp[aec->nlp_mode] / ((float)log(aec->hNlFbMin + 1e-10f) + 1e-10f), - kMinOverDrive[aec->nlp_mode]); + min_overdrive[aec->nlp_mode]); } // Smooth the overdrive. @@ -1465,7 +1491,6 @@ static void InitStats(Stats* stats) { } static void InitMetrics(AecCore* self) { - assert(self != NULL); self->stateCounter = 0; InitLevel(&self->farlevel); InitLevel(&self->nearlevel); @@ -1687,3 +1712,4 @@ static void TimeToFrequency(float time_data[PART_LEN2], freq_data[1][i] = time_data[2 * i + 1]; } } + diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.h b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.h index 638071735d1..f83c37c8c4b 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core.h @@ -70,23 +70,38 @@ void WebRtcAec_ProcessFrame(AecCore* aec, // Returns the number of elements moved, and adjusts |system_delay| by the // corresponding amount in ms. int WebRtcAec_MoveFarReadPtr(AecCore* aec, int elements); + // Calculates the median and standard deviation among the delay estimates // collected since the last call to this function. int WebRtcAec_GetDelayMetricsCore(AecCore* self, int* median, int* std); + // Returns the echo state (1: echo, 0: no echo). int WebRtcAec_echo_state(AecCore* self); + // Gets statistics of the echo metrics ERL, ERLE, A_NLP. void WebRtcAec_GetEchoStats(AecCore* self, Stats* erl, Stats* erle, Stats* a_nlp); #ifdef WEBRTC_AEC_DEBUG_DUMP void* WebRtcAec_far_time_buf(AecCore* self); #endif + // Sets local configuration modes. void WebRtcAec_SetConfigCore(AecCore* self, int nlp_mode, int metrics_mode, int delay_logging); + +// We now interpret delay correction to mean an extended filter length feature. +// We reuse the delay correction infrastructure to avoid changes through to +// libjingle. See details along with |DelayCorrection| in +// echo_cancellation_impl.h. Non-zero enables, zero disables. +void WebRtcAec_enable_delay_correction(AecCore* self, int enable); + +// Returns non-zero if delay correction is enabled and zero if disabled. +int WebRtcAec_delay_correction_enabled(AecCore* self); + // Returns the current |system_delay|, i.e., the buffered difference between // far-end and near-end. int WebRtcAec_system_delay(AecCore* self); + // Sets the |system_delay| to |value|. Note that if the value is changed // improperly, there can be a performance regression. So it should be used with // care. diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_internal.h b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_internal.h index 3b92bd609e7..fd0e7847494 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_internal.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_internal.h @@ -22,6 +22,16 @@ #define NR_PART 12 // Number of partitions in filter. #define PREF_BAND_SIZE 24 +// Number of partitions for the extended filter mode. The first one is an enum +// to be used in array declarations, as it represents the maximum filter length. +enum { kExtendedNumPartitions = 32 }; +static const int kNormalNumPartitions = 12; + +// Extended filter adaptation parameters. +// TODO(ajm): No narrowband tuning yet. +static const float kExtendedMu = 0.4f; +static const float kExtendedErrorThreshold = 1.0e-6f; + typedef struct PowerLevel { float sfrsum; int sfrcounter; @@ -56,11 +66,12 @@ struct AecCore { float dInitMinPow[PART_LEN1]; float *noisePow; - float xfBuf[2][NR_PART * PART_LEN1]; // farend fft buffer - float wfBuf[2][NR_PART * PART_LEN1]; // filter fft + float xfBuf[2][kExtendedNumPartitions * PART_LEN1]; // farend fft buffer + float wfBuf[2][kExtendedNumPartitions * PART_LEN1]; // filter fft complex_t sde[PART_LEN1]; // cross-psd of nearend and error complex_t sxd[PART_LEN1]; // cross-psd of farend and nearend - complex_t xfwBuf[NR_PART * PART_LEN1]; // farend windowed fft buffer + // Farend windowed fft buffer. + complex_t xfwBuf[kExtendedNumPartitions * PART_LEN1]; float sx[PART_LEN1], sd[PART_LEN1], se[PART_LEN1]; // far, near, error psd float hNs[PART_LEN1]; @@ -85,8 +96,8 @@ struct AecCore { int sampFreq; uint32_t seed; - float mu; // stepsize - float errThresh; // error threshold + float normal_mu; // stepsize + float normal_error_threshold; // error threshold int noiseEstCtr; @@ -112,6 +123,11 @@ struct AecCore { void* delay_estimator_farend; void* delay_estimator; + // 1 = extended filter mode enabled, 0 = disabled. + int extended_filter_enabled; + // Runtime selection of number of filter partitions. + int num_partitions; + #ifdef WEBRTC_AEC_DEBUG_DUMP RingBuffer* far_time_buf; FILE *farFile; diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_sse2.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_sse2.c index fdc68723eff..61602a82342 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_sse2.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/aec_core_sse2.c @@ -34,13 +34,14 @@ __inline static float MulIm(float aRe, float aIm, float bRe, float bIm) static void FilterFarSSE2(AecCore* aec, float yf[2][PART_LEN1]) { int i; - for (i = 0; i < NR_PART; i++) { + const int num_partitions = aec->num_partitions; + for (i = 0; i < num_partitions; i++) { int j; int xPos = (i + aec->xfBufBlockPos) * PART_LEN1; int pos = i * PART_LEN1; // Check for wrap - if (i + aec->xfBufBlockPos >= NR_PART) { - xPos -= NR_PART*(PART_LEN1); + if (i + aec->xfBufBlockPos >= num_partitions) { + xPos -= num_partitions*(PART_LEN1); } // vectorized code (four at once) @@ -75,8 +76,11 @@ static void FilterFarSSE2(AecCore* aec, float yf[2][PART_LEN1]) static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1]) { const __m128 k1e_10f = _mm_set1_ps(1e-10f); - const __m128 kThresh = _mm_set1_ps(aec->errThresh); - const __m128 kMu = _mm_set1_ps(aec->mu); + const __m128 kMu = aec->extended_filter_enabled ? + _mm_set1_ps(kExtendedMu) : _mm_set1_ps(aec->normal_mu); + const __m128 kThresh = aec->extended_filter_enabled ? + _mm_set1_ps(kExtendedErrorThreshold) : + _mm_set1_ps(aec->normal_error_threshold); int i; // vectorized code (four at once) @@ -110,32 +114,39 @@ static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1]) _mm_storeu_ps(&ef[1][i], ef_im); } // scalar code for the remaining items. - for (; i < (PART_LEN1); i++) { - float absEf; - ef[0][i] /= (aec->xPow[i] + 1e-10f); - ef[1][i] /= (aec->xPow[i] + 1e-10f); - absEf = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]); + { + const float mu = aec->extended_filter_enabled ? + kExtendedMu : aec->normal_mu; + const float error_threshold = aec->extended_filter_enabled ? + kExtendedErrorThreshold : aec->normal_error_threshold; + for (; i < (PART_LEN1); i++) { + float abs_ef; + ef[0][i] /= (aec->xPow[i] + 1e-10f); + ef[1][i] /= (aec->xPow[i] + 1e-10f); + abs_ef = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]); - if (absEf > aec->errThresh) { - absEf = aec->errThresh / (absEf + 1e-10f); - ef[0][i] *= absEf; - ef[1][i] *= absEf; + if (abs_ef > error_threshold) { + abs_ef = error_threshold / (abs_ef + 1e-10f); + ef[0][i] *= abs_ef; + ef[1][i] *= abs_ef; + } + + // Stepsize factor + ef[0][i] *= mu; + ef[1][i] *= mu; } - - // Stepsize factor - ef[0][i] *= aec->mu; - ef[1][i] *= aec->mu; } } static void FilterAdaptationSSE2(AecCore* aec, float *fft, float ef[2][PART_LEN1]) { int i, j; - for (i = 0; i < NR_PART; i++) { + const int num_partitions = aec->num_partitions; + for (i = 0; i < num_partitions; i++) { int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1); int pos = i * PART_LEN1; // Check for wrap - if (i + aec->xfBufBlockPos >= NR_PART) { - xPos -= NR_PART * PART_LEN1; + if (i + aec->xfBufBlockPos >= num_partitions) { + xPos -= num_partitions * PART_LEN1; } // Process the whole array... @@ -413,3 +424,4 @@ void WebRtcAec_InitAec_SSE2(void) { WebRtcAec_FilterAdaptation = FilterAdaptationSSE2; WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressSSE2; } + diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation.c b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation.c index 2d4135982c0..27da67de515 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation.c +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation.c @@ -27,6 +27,61 @@ #include "webrtc/modules/audio_processing/utility/ring_buffer.h" #include "webrtc/typedefs.h" +// Measured delays [ms] +// Device Chrome GTP +// MacBook Air 10 +// MacBook Retina 10 100 +// MacPro 30? +// +// Win7 Desktop 70 80? +// Win7 T430s 110 +// Win8 T420s 70 +// +// Daisy 50 +// Pixel (w/ preproc?) 240 +// Pixel (w/o preproc?) 110 110 + +// The extended filter mode gives us the flexibility to ignore the system's +// reported delays. We do this for platforms which we believe provide results +// which are incompatible with the AEC's expectations. Based on measurements +// (some provided above) we set a conservative (i.e. lower than measured) +// fixed delay. +// +// WEBRTC_UNTRUSTED_DELAY will only have an impact when |extended_filter_mode| +// is enabled. See the note along with |DelayCorrection| in +// echo_cancellation_impl.h for more details on the mode. +// +// Justification: +// Chromium/Mac: Here, the true latency is so low (~10-20 ms), that it plays +// havoc with the AEC's buffering. To avoid this, we set a fixed delay of 20 ms +// and then compensate by rewinding by 10 ms (in wideband) through +// kDelayDiffOffsetSamples. This trick does not seem to work for larger rewind +// values, but fortunately this is sufficient. +// +// Chromium/Linux(ChromeOS): The values we get on this platform don't correspond +// well to reality. The variance doesn't match the AEC's buffer changes, and the +// bulk values tend to be too low. However, the range across different hardware +// appears to be too large to choose a single value. +// +// GTP/Linux(ChromeOS): TBD, but for the moment we will trust the values. +#if defined(WEBRTC_CHROMIUM_BUILD) && defined(WEBRTC_MAC) +#define WEBRTC_UNTRUSTED_DELAY +#endif + +#if defined(WEBRTC_MAC) +static const int kFixedDelayMs = 20; +static const int kDelayDiffOffsetSamples = -160; +#elif defined(WEBRTC_WIN) +static const int kFixedDelayMs = 50; +static const int kDelayDiffOffsetSamples = 0; +#else +// Essentially ChromeOS. +static const int kFixedDelayMs = 50; +static const int kDelayDiffOffsetSamples = 0; +#endif +static const int kMinTrustedDelayMs = 20; +static const int kMaxTrustedDelayMs = 500; + // Maximum length of resampled signal. Must be an integer multiple of frames // (ceil(1/(1 + MIN_SKEW)*2) + 1)*FRAME_LEN // The factor of 2 handles wb, and the + 1 is as a safety margin @@ -43,7 +98,14 @@ int webrtc_aec_instance_count = 0; // Estimates delay to set the position of the far-end buffer read pointer // (controlled by knownDelay) -static int EstBufDelay(aecpc_t *aecInst); +static void EstBufDelayNormal(aecpc_t *aecInst); +static void EstBufDelayExtended(aecpc_t *aecInst); +static int ProcessNormal(aecpc_t* self, const int16_t* near, + const int16_t* near_high, int16_t* out, int16_t* out_high, + int16_t num_samples, int16_t reported_delay_ms, int32_t skew); +static void ProcessExtended(aecpc_t* self, const int16_t* near, + const int16_t* near_high, int16_t* out, int16_t* out_high, + int16_t num_samples, int16_t reported_delay_ms, int32_t skew); int32_t WebRtcAec_Create(void **aecInst) { @@ -135,10 +197,6 @@ int32_t WebRtcAec_Init(void *aecInst, int32_t sampFreq, int32_t scSampFreq) aecpc_t *aecpc = aecInst; AecConfig aecConfig; - if (aecpc == NULL) { - return -1; - } - if (sampFreq != 8000 && sampFreq != 16000 && sampFreq != 32000) { aecpc->lastError = AEC_BAD_PARAMETER_ERROR; return -1; @@ -177,31 +235,31 @@ int32_t WebRtcAec_Init(void *aecInst, int32_t sampFreq, int32_t scSampFreq) aecpc->splitSampFreq = sampFreq; } - aecpc->skewFrCtr = 0; - aecpc->activity = 0; - aecpc->delayCtr = 0; + aecpc->sampFactor = (aecpc->scSampFreq * 1.0f) / aecpc->splitSampFreq; + // Sampling frequency multiplier (SWB is processed as 160 frame size). + aecpc->rate_factor = aecpc->splitSampFreq / 8000; aecpc->sum = 0; aecpc->counter = 0; aecpc->checkBuffSize = 1; aecpc->firstVal = 0; - aecpc->ECstartup = 1; + aecpc->startup_phase = 1; aecpc->bufSizeStart = 0; aecpc->checkBufSizeCtr = 0; - aecpc->filtDelay = 0; + aecpc->msInSndCardBuf = 0; + aecpc->filtDelay = -1; // -1 indicates an initialized state. aecpc->timeForDelayChange = 0; aecpc->knownDelay = 0; aecpc->lastDelayDiff = 0; - aecpc->skew = 0; + aecpc->skewFrCtr = 0; aecpc->resample = kAecFalse; aecpc->highSkewCtr = 0; - aecpc->sampFactor = (aecpc->scSampFreq * 1.0f) / aecpc->splitSampFreq; + aecpc->skew = 0; - // Sampling frequency multiplier (SWB is processed as 160 frame size). - aecpc->rate_factor = aecpc->splitSampFreq / 8000; + aecpc->farend_started = 0; // Default settings. aecConfig.nlpMode = kAecNlpModerate; @@ -239,10 +297,6 @@ int32_t WebRtcAec_BufferFarend(void *aecInst, const int16_t *farend, float skew; int i = 0; - if (aecpc == NULL) { - return -1; - } - if (farend == NULL) { aecpc->lastError = AEC_NULL_POINTER_ERROR; return -1; @@ -268,6 +322,7 @@ int32_t WebRtcAec_BufferFarend(void *aecInst, const int16_t *farend, farend_ptr = (const int16_t*) newFarend; } + aecpc->farend_started = 1; WebRtcAec_SetSystemDelay(aecpc->aec, WebRtcAec_system_delay(aecpc->aec) + newNrOfSamples); @@ -311,17 +366,6 @@ int32_t WebRtcAec_Process(void *aecInst, const int16_t *nearend, { aecpc_t *aecpc = aecInst; int32_t retVal = 0; - short i; - short nBlocks10ms; - short nFrames; - // Limit resampling to doubling/halving of signal - const float minSkewEst = -0.5f; - const float maxSkewEst = 1.0f; - - if (aecpc == NULL) { - return -1; - } - if (nearend == NULL) { aecpc->lastError = AEC_NULL_POINTER_ERROR; return -1; @@ -354,144 +398,21 @@ int32_t WebRtcAec_Process(void *aecInst, const int16_t *nearend, aecpc->lastError = AEC_BAD_PARAMETER_WARNING; retVal = -1; } - else if (msInSndCardBuf > 500) { - msInSndCardBuf = 500; + else if (msInSndCardBuf > kMaxTrustedDelayMs) { + // The clamping is now done in ProcessExtended/Normal(). aecpc->lastError = AEC_BAD_PARAMETER_WARNING; retVal = -1; } - // TODO(andrew): we need to investigate if this +10 is really wanted. - msInSndCardBuf += 10; - aecpc->msInSndCardBuf = msInSndCardBuf; - if (aecpc->skewMode == kAecTrue) { - if (aecpc->skewFrCtr < 25) { - aecpc->skewFrCtr++; - } - else { - retVal = WebRtcAec_GetSkew(aecpc->resampler, skew, &aecpc->skew); - if (retVal == -1) { - aecpc->skew = 0; - aecpc->lastError = AEC_BAD_PARAMETER_WARNING; - } - - aecpc->skew /= aecpc->sampFactor*nrOfSamples; - - if (aecpc->skew < 1.0e-3 && aecpc->skew > -1.0e-3) { - aecpc->resample = kAecFalse; - } - else { - aecpc->resample = kAecTrue; - } - - if (aecpc->skew < minSkewEst) { - aecpc->skew = minSkewEst; - } - else if (aecpc->skew > maxSkewEst) { - aecpc->skew = maxSkewEst; - } - -#ifdef WEBRTC_AEC_DEBUG_DUMP - (void)fwrite(&aecpc->skew, sizeof(aecpc->skew), 1, aecpc->skewFile); -#endif - } - } - - nFrames = nrOfSamples / FRAME_LEN; - nBlocks10ms = nFrames / aecpc->rate_factor; - - if (aecpc->ECstartup) { - if (nearend != out) { - // Only needed if they don't already point to the same place. - memcpy(out, nearend, sizeof(short) * nrOfSamples); - } - - // The AEC is in the start up mode - // AEC is disabled until the system delay is OK - - // Mechanism to ensure that the system delay is reasonably stable. - if (aecpc->checkBuffSize) { - aecpc->checkBufSizeCtr++; - // Before we fill up the far-end buffer we require the system delay - // to be stable (+/-8 ms) compared to the first value. This - // comparison is made during the following 6 consecutive 10 ms - // blocks. If it seems to be stable then we start to fill up the - // far-end buffer. - if (aecpc->counter == 0) { - aecpc->firstVal = aecpc->msInSndCardBuf; - aecpc->sum = 0; - } - - if (abs(aecpc->firstVal - aecpc->msInSndCardBuf) < - WEBRTC_SPL_MAX(0.2 * aecpc->msInSndCardBuf, sampMsNb)) { - aecpc->sum += aecpc->msInSndCardBuf; - aecpc->counter++; - } - else { - aecpc->counter = 0; - } - - if (aecpc->counter * nBlocks10ms >= 6) { - // The far-end buffer size is determined in partitions of - // PART_LEN samples. Use 75% of the average value of the system - // delay as buffer size to start with. - aecpc->bufSizeStart = WEBRTC_SPL_MIN((3 * aecpc->sum * - aecpc->rate_factor * 8) / (4 * aecpc->counter * PART_LEN), - kMaxBufSizeStart); - // Buffer size has now been determined. - aecpc->checkBuffSize = 0; - } - - if (aecpc->checkBufSizeCtr * nBlocks10ms > 50) { - // For really bad systems, don't disable the echo canceller for - // more than 0.5 sec. - aecpc->bufSizeStart = WEBRTC_SPL_MIN((aecpc->msInSndCardBuf * - aecpc->rate_factor * 3) / 40, kMaxBufSizeStart); - aecpc->checkBuffSize = 0; - } - } - - // If |checkBuffSize| changed in the if-statement above. - if (!aecpc->checkBuffSize) { - // The system delay is now reasonably stable (or has been unstable - // for too long). When the far-end buffer is filled with - // approximately the same amount of data as reported by the system - // we end the startup phase. - int overhead_elements = - WebRtcAec_system_delay(aecpc->aec) / PART_LEN - - aecpc->bufSizeStart; - if (overhead_elements == 0) { - // Enable the AEC - aecpc->ECstartup = 0; - } else if (overhead_elements > 0) { - // TODO(bjornv): Do we need a check on how much we actually - // moved the read pointer? It should always be possible to move - // the pointer |overhead_elements| since we have only added data - // to the buffer and no delay compensation nor AEC processing - // has been done. - WebRtcAec_MoveFarReadPtr(aecpc->aec, overhead_elements); - - // Enable the AEC - aecpc->ECstartup = 0; - } - } + // This returns the value of aec->extended_filter_enabled. + if (WebRtcAec_delay_correction_enabled(aecpc->aec)) { + ProcessExtended(aecpc, nearend, nearendH, out, outH, nrOfSamples, + msInSndCardBuf, skew); } else { - // AEC is enabled. - - EstBufDelay(aecpc); - - // Note that 1 frame is supported for NB and 2 frames for WB. - for (i = 0; i < nFrames; i++) { - // Call the AEC. - WebRtcAec_ProcessFrame(aecpc->aec, - &nearend[FRAME_LEN * i], - &nearendH[FRAME_LEN * i], - aecpc->knownDelay, - &out[FRAME_LEN * i], - &outH[FRAME_LEN * i]); - // TODO(bjornv): Re-structure such that we don't have to pass - // |aecpc->knownDelay| as input. Change name to something like - // |system_buffer_diff|. - } + if (ProcessNormal(aecpc, nearend, nearendH, out, outH, nrOfSamples, + msInSndCardBuf, skew) != 0) { + retVal = -1; + } } #ifdef WEBRTC_AEC_DEBUG_DUMP @@ -509,11 +430,6 @@ int32_t WebRtcAec_Process(void *aecInst, const int16_t *nearend, int WebRtcAec_set_config(void* handle, AecConfig config) { aecpc_t* self = (aecpc_t*)handle; - - if (handle == NULL ) { - return -1; - } - if (self->initFlag != initCheck) { self->lastError = AEC_UNINITIALIZED_ERROR; return -1; @@ -548,10 +464,6 @@ int WebRtcAec_set_config(void* handle, AecConfig config) { int WebRtcAec_get_echo_status(void* handle, int* status) { aecpc_t* self = (aecpc_t*)handle; - - if (handle == NULL ) { - return -1; - } if (status == NULL ) { self->lastError = AEC_NULL_POINTER_ERROR; return -1; @@ -665,10 +577,6 @@ int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) { int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std) { aecpc_t* self = handle; - - if (handle == NULL) { - return -1; - } if (median == NULL) { self->lastError = AEC_NULL_POINTER_ERROR; return -1; @@ -693,11 +601,6 @@ int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std) { int32_t WebRtcAec_get_error_code(void *aecInst) { aecpc_t *aecpc = aecInst; - - if (aecpc == NULL) { - return -1; - } - return aecpc->lastError; } @@ -708,7 +611,225 @@ AecCore* WebRtcAec_aec_core(void* handle) { return ((aecpc_t*) handle)->aec; } -static int EstBufDelay(aecpc_t* aecpc) { +static int ProcessNormal(aecpc_t *aecpc, const int16_t *nearend, + const int16_t *nearendH, int16_t *out, int16_t *outH, + int16_t nrOfSamples, int16_t msInSndCardBuf, + int32_t skew) { + int retVal = 0; + short i; + short nBlocks10ms; + short nFrames; + // Limit resampling to doubling/halving of signal + const float minSkewEst = -0.5f; + const float maxSkewEst = 1.0f; + + msInSndCardBuf = msInSndCardBuf > kMaxTrustedDelayMs ? + kMaxTrustedDelayMs : msInSndCardBuf; + // TODO(andrew): we need to investigate if this +10 is really wanted. + msInSndCardBuf += 10; + aecpc->msInSndCardBuf = msInSndCardBuf; + + if (aecpc->skewMode == kAecTrue) { + if (aecpc->skewFrCtr < 25) { + aecpc->skewFrCtr++; + } + else { + retVal = WebRtcAec_GetSkew(aecpc->resampler, skew, &aecpc->skew); + if (retVal == -1) { + aecpc->skew = 0; + aecpc->lastError = AEC_BAD_PARAMETER_WARNING; + } + + aecpc->skew /= aecpc->sampFactor*nrOfSamples; + + if (aecpc->skew < 1.0e-3 && aecpc->skew > -1.0e-3) { + aecpc->resample = kAecFalse; + } + else { + aecpc->resample = kAecTrue; + } + + if (aecpc->skew < minSkewEst) { + aecpc->skew = minSkewEst; + } + else if (aecpc->skew > maxSkewEst) { + aecpc->skew = maxSkewEst; + } + +#ifdef WEBRTC_AEC_DEBUG_DUMP + (void)fwrite(&aecpc->skew, sizeof(aecpc->skew), 1, aecpc->skewFile); +#endif + } + } + + nFrames = nrOfSamples / FRAME_LEN; + nBlocks10ms = nFrames / aecpc->rate_factor; + + if (aecpc->startup_phase) { + // Only needed if they don't already point to the same place. + if (nearend != out) { + memcpy(out, nearend, sizeof(short) * nrOfSamples); + } + if (nearendH != outH) { + memcpy(outH, nearendH, sizeof(short) * nrOfSamples); + } + + // The AEC is in the start up mode + // AEC is disabled until the system delay is OK + + // Mechanism to ensure that the system delay is reasonably stable. + if (aecpc->checkBuffSize) { + aecpc->checkBufSizeCtr++; + // Before we fill up the far-end buffer we require the system delay + // to be stable (+/-8 ms) compared to the first value. This + // comparison is made during the following 6 consecutive 10 ms + // blocks. If it seems to be stable then we start to fill up the + // far-end buffer. + if (aecpc->counter == 0) { + aecpc->firstVal = aecpc->msInSndCardBuf; + aecpc->sum = 0; + } + + if (abs(aecpc->firstVal - aecpc->msInSndCardBuf) < + WEBRTC_SPL_MAX(0.2 * aecpc->msInSndCardBuf, sampMsNb)) { + aecpc->sum += aecpc->msInSndCardBuf; + aecpc->counter++; + } + else { + aecpc->counter = 0; + } + + if (aecpc->counter * nBlocks10ms >= 6) { + // The far-end buffer size is determined in partitions of + // PART_LEN samples. Use 75% of the average value of the system + // delay as buffer size to start with. + aecpc->bufSizeStart = WEBRTC_SPL_MIN((3 * aecpc->sum * + aecpc->rate_factor * 8) / (4 * aecpc->counter * PART_LEN), + kMaxBufSizeStart); + // Buffer size has now been determined. + aecpc->checkBuffSize = 0; + } + + if (aecpc->checkBufSizeCtr * nBlocks10ms > 50) { + // For really bad systems, don't disable the echo canceller for + // more than 0.5 sec. + aecpc->bufSizeStart = WEBRTC_SPL_MIN((aecpc->msInSndCardBuf * + aecpc->rate_factor * 3) / 40, kMaxBufSizeStart); + aecpc->checkBuffSize = 0; + } + } + + // If |checkBuffSize| changed in the if-statement above. + if (!aecpc->checkBuffSize) { + // The system delay is now reasonably stable (or has been unstable + // for too long). When the far-end buffer is filled with + // approximately the same amount of data as reported by the system + // we end the startup phase. + int overhead_elements = + WebRtcAec_system_delay(aecpc->aec) / PART_LEN - aecpc->bufSizeStart; + if (overhead_elements == 0) { + // Enable the AEC + aecpc->startup_phase = 0; + } else if (overhead_elements > 0) { + // TODO(bjornv): Do we need a check on how much we actually + // moved the read pointer? It should always be possible to move + // the pointer |overhead_elements| since we have only added data + // to the buffer and no delay compensation nor AEC processing + // has been done. + WebRtcAec_MoveFarReadPtr(aecpc->aec, overhead_elements); + + // Enable the AEC + aecpc->startup_phase = 0; + } + } + } else { + // AEC is enabled. + EstBufDelayNormal(aecpc); + + // Note that 1 frame is supported for NB and 2 frames for WB. + for (i = 0; i < nFrames; i++) { + // Call the AEC. + WebRtcAec_ProcessFrame(aecpc->aec, + &nearend[FRAME_LEN * i], + &nearendH[FRAME_LEN * i], + aecpc->knownDelay, + &out[FRAME_LEN * i], + &outH[FRAME_LEN * i]); + // TODO(bjornv): Re-structure such that we don't have to pass + // |aecpc->knownDelay| as input. Change name to something like + // |system_buffer_diff|. + } + } + + return retVal; +} + +static void ProcessExtended(aecpc_t* self, const int16_t* near, + const int16_t* near_high, int16_t* out, int16_t* out_high, + int16_t num_samples, int16_t reported_delay_ms, int32_t skew) { + int i; + const int num_frames = num_samples / FRAME_LEN; +#if defined(WEBRTC_UNTRUSTED_DELAY) + const int delay_diff_offset = kDelayDiffOffsetSamples; + reported_delay_ms = kFixedDelayMs; +#else + // This is the usual mode where we trust the reported system delay values. + const int delay_diff_offset = 0; + // Due to the longer filter, we no longer add 10 ms to the reported delay + // to reduce chance of non-causality. Instead we apply a minimum here to avoid + // issues with the read pointer jumping around needlessly. + reported_delay_ms = reported_delay_ms < kMinTrustedDelayMs ? + kMinTrustedDelayMs : reported_delay_ms; + // If the reported delay appears to be bogus, we attempt to recover by using + // the measured fixed delay values. We use >= here because higher layers + // may already clamp to this maximum value, and we would otherwise not + // detect it here. + reported_delay_ms = reported_delay_ms >= kMaxTrustedDelayMs ? + kFixedDelayMs : reported_delay_ms; +#endif + self->msInSndCardBuf = reported_delay_ms; + + if (!self->farend_started) { + // Only needed if they don't already point to the same place. + if (near != out) { + memcpy(out, near, sizeof(short) * num_samples); + } + if (near_high != out_high) { + memcpy(out_high, near_high, sizeof(short) * num_samples); + } + return; + } + if (self->startup_phase) { + // In the extended mode, there isn't a startup "phase", just a special + // action on the first frame. In the trusted delay case, we'll take the + // current reported delay, unless it's less then our conservative + // measurement. + int startup_size_ms = reported_delay_ms < kFixedDelayMs ? + kFixedDelayMs : reported_delay_ms; + int overhead_elements = (WebRtcAec_system_delay(self->aec) - + startup_size_ms / 2 * self->rate_factor * 8) / PART_LEN; + WebRtcAec_MoveFarReadPtr(self->aec, overhead_elements); + self->startup_phase = 0; + } + + EstBufDelayExtended(self); + + { + // |delay_diff_offset| gives us the option to manually rewind the delay on + // very low delay platforms which can't be expressed purely through + // |reported_delay_ms|. + const int adjusted_known_delay = + WEBRTC_SPL_MAX(0, self->knownDelay + delay_diff_offset); + + for (i = 0; i < num_frames; ++i) { + WebRtcAec_ProcessFrame(self->aec, &near[FRAME_LEN * i], + &near_high[FRAME_LEN * i], adjusted_known_delay, + &out[FRAME_LEN * i], &out_high[FRAME_LEN * i]); + } + } +} + +static void EstBufDelayNormal(aecpc_t* aecpc) { int nSampSndCard = aecpc->msInSndCardBuf * sampMsNb * aecpc->rate_factor; int current_delay = nSampSndCard - WebRtcAec_system_delay(aecpc->aec); int delay_difference = 0; @@ -732,8 +853,11 @@ static int EstBufDelay(aecpc_t* aecpc) { current_delay += WebRtcAec_MoveFarReadPtr(aecpc->aec, 1) * PART_LEN; } + // We use -1 to signal an initialized state in the "extended" implementation; + // compensate for that. + aecpc->filtDelay = aecpc->filtDelay < 0 ? 0 : aecpc->filtDelay; aecpc->filtDelay = WEBRTC_SPL_MAX(0, (short) (0.8 * aecpc->filtDelay + - 0.2 * current_delay)); + 0.2 * current_delay)); delay_difference = aecpc->filtDelay - aecpc->knownDelay; if (delay_difference > 224) { @@ -756,6 +880,58 @@ static int EstBufDelay(aecpc_t* aecpc) { if (aecpc->timeForDelayChange > 25) { aecpc->knownDelay = WEBRTC_SPL_MAX((int) aecpc->filtDelay - 160, 0); } - - return 0; +} + +static void EstBufDelayExtended(aecpc_t* self) { + int reported_delay = self->msInSndCardBuf * sampMsNb * self->rate_factor; + int current_delay = reported_delay - WebRtcAec_system_delay(self->aec); + int delay_difference = 0; + + // Before we proceed with the delay estimate filtering we: + // 1) Compensate for the frame that will be read. + // 2) Compensate for drift resampling. + // 3) Compensate for non-causality if needed, since the estimated delay can't + // be negative. + + // 1) Compensating for the frame(s) that will be read/processed. + current_delay += FRAME_LEN * self->rate_factor; + + // 2) Account for resampling frame delay. + if (self->skewMode == kAecTrue && self->resample == kAecTrue) { + current_delay -= kResamplingDelay; + } + + // 3) Compensate for non-causality, if needed, by flushing two blocks. + if (current_delay < PART_LEN) { + current_delay += WebRtcAec_MoveFarReadPtr(self->aec, 2) * PART_LEN; + } + + if (self->filtDelay == -1) { + self->filtDelay = WEBRTC_SPL_MAX(0, 0.5 * current_delay); + } else { + self->filtDelay = WEBRTC_SPL_MAX(0, (short) (0.95 * self->filtDelay + + 0.05 * current_delay)); + } + + delay_difference = self->filtDelay - self->knownDelay; + if (delay_difference > 384) { + if (self->lastDelayDiff < 128) { + self->timeForDelayChange = 0; + } else { + self->timeForDelayChange++; + } + } else if (delay_difference < 128 && self->knownDelay > 0) { + if (self->lastDelayDiff > 384) { + self->timeForDelayChange = 0; + } else { + self->timeForDelayChange++; + } + } else { + self->timeForDelayChange = 0; + } + self->lastDelayDiff = delay_difference; + + if (self->timeForDelayChange > 25) { + self->knownDelay = WEBRTC_SPL_MAX((int) self->filtDelay - 256, 0); + } } diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h index 1298901aeb0..e939c4297be 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h @@ -20,8 +20,6 @@ typedef struct { int splitSampFreq; int scSampFreq; float sampFactor; // scSampRate / sampFreq - short autoOnOff; - short activity; short skewMode; int bufSizeStart; int knownDelay; @@ -39,7 +37,7 @@ typedef struct { short msInSndCardBuf; short filtDelay; // Filtered delay estimate. int timeForDelayChange; - int ECstartup; + int startup_phase; int checkBuffSize; short lastDelayDiff; @@ -62,6 +60,8 @@ typedef struct { int lastError; + int farend_started; + AecCore* aec; } aecpc_t; diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/system_delay_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/system_delay_unittest.cc index 97ebea3dfbe..db37f0e83a8 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/aec/system_delay_unittest.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/aec/system_delay_unittest.cc @@ -128,7 +128,7 @@ void SystemDelayTest::RunStableStartup() { for (; process_time_ms < kStableConvergenceMs; process_time_ms += 10) { RenderAndCapture(kDeviceBufMs); buffer_size += samples_per_frame_; - if (self_->ECstartup == 0) { + if (self_->startup_phase == 0) { // We have left the startup phase. break; } @@ -222,7 +222,7 @@ TEST_F(SystemDelayTest, CorrectDelayAfterUnstableStartup) { RenderAndCapture(reported_delay_ms); buffer_size += samples_per_frame_; buffer_offset_ms = -buffer_offset_ms; - if (self_->ECstartup == 0) { + if (self_->startup_phase == 0) { // We have left the startup phase. break; } @@ -268,7 +268,7 @@ TEST_F(SystemDelayTest, CorrectDelayAfterStableBufferBuildUp) { for (; process_time_ms <= kMaxConvergenceMs; process_time_ms += 10) { RenderAndCapture(kDeviceBufMs); buffer_size += samples_per_frame_; - if (self_->ECstartup == 0) { + if (self_->startup_phase == 0) { // We have left the startup phase. break; } diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi b/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi index 02ba74b3035..357ee495b84 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi @@ -155,6 +155,7 @@ 'aec/aec_rdft_sse2.c', ], 'cflags': ['-msse2',], + 'cflags_mozilla': [ '-msse2', ], 'xcode_settings': { 'OTHER_CFLAGS': ['-msse2',], }, @@ -178,11 +179,14 @@ 'dependencies': [ 'audio_processing_offsets', ], - 'sources': [ + # + # We disable the ASM source, because our gyp->Makefile translator + # does not support the build steps to get the asm offsets. + 'sources!': [ 'aecm/aecm_core_neon.S', 'ns/nsx_core_neon.S', ], - 'sources!': [ + 'sources': [ 'aecm/aecm_core_neon.c', 'ns/nsx_core_neon.c', ], diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.cc index 47ee802483b..7c36872ef4d 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.cc @@ -13,12 +13,14 @@ #include #include +extern "C" { +#include "webrtc/modules/audio_processing/aec/aec_core.h" +} +#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h" #include "webrtc/modules/audio_processing/audio_buffer.h" #include "webrtc/modules/audio_processing/audio_processing_impl.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" -#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h" - namespace webrtc { typedef void Handle; @@ -69,7 +71,8 @@ EchoCancellationImpl::EchoCancellationImpl(const AudioProcessingImpl* apm) stream_drift_samples_(0), was_stream_drift_set_(false), stream_has_echo_(false), - delay_logging_enabled_(false) {} + delay_logging_enabled_(false), + delay_correction_enabled_(true) {} EchoCancellationImpl::~EchoCancellationImpl() {} @@ -338,6 +341,13 @@ int EchoCancellationImpl::Initialize() { return apm_->kNoError; } +#if 0 +void EchoCancellationImpl::SetExtraOptions(const Config& config) { + delay_correction_enabled_ = config.Get().enabled; + Configure(); +} +#endif + void* EchoCancellationImpl::CreateHandle() const { Handle* handle = NULL; if (WebRtcAec_Create(&handle) != apm_->kNoError) { @@ -369,6 +379,8 @@ int EchoCancellationImpl::ConfigureHandle(void* handle) const { config.skewMode = drift_compensation_enabled_; config.delay_logging = delay_logging_enabled_; + WebRtcAec_enable_delay_correction(WebRtcAec_aec_core( + static_cast(handle)), delay_correction_enabled_ ? 1 : 0); return WebRtcAec_set_config(static_cast(handle), config); } diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.h b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.h index 07506d4e03d..5d98a0b3531 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.h +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl.h @@ -14,6 +14,29 @@ #include "webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h" namespace webrtc { +// Use to enable the delay correction feature. This now engages an extended +// filter mode in the AEC, along with robustness measures around the reported +// system delays. It comes with a significant increase in AEC complexity, but is +// much more robust to unreliable reported delays. +// +// Detailed changes to the algorithm: +// - The filter length is changed from 48 to 128 ms. This comes with tuning of +// several parameters: i) filter adaptation stepsize and error threshold; +// ii) non-linear processing smoothing and overdrive. +// - Option to ignore the reported delays on platforms which we deem +// sufficiently unreliable. See WEBRTC_UNTRUSTED_DELAY in echo_cancellation.c. +// - Faster startup times by removing the excessive "startup phase" processing +// of reported delays. +// - Much more conservative adjustments to the far-end read pointer. We smooth +// the delay difference more heavily, and back off from the difference more. +// Adjustments force a readaptation of the filter, so they should be avoided +// except when really necessary. +struct DelayCorrection { + DelayCorrection() : enabled(false) {} + DelayCorrection(bool enabled) : enabled(enabled) {} + + bool enabled; +}; class AudioProcessingImpl; class AudioBuffer; @@ -34,6 +57,7 @@ class EchoCancellationImpl : public EchoCancellationImplWrapper { // ProcessingComponent implementation. virtual int Initialize() OVERRIDE; + // virtual void SetExtraOptions(const Config& config) OVERRIDE; private: // EchoCancellation implementation. @@ -70,6 +94,7 @@ class EchoCancellationImpl : public EchoCancellationImplWrapper { bool was_stream_drift_set_; bool stream_has_echo_; bool delay_logging_enabled_; + bool delay_correction_enabled_; }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc new file mode 100644 index 00000000000..16ecf02e4bd --- /dev/null +++ b/media/webrtc/trunk/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "testing/gtest/include/gtest/gtest.h" +extern "C" { +#include "webrtc/modules/audio_processing/aec/aec_core.h" +} +#include "webrtc/modules/audio_processing/echo_cancellation_impl.h" +#include "webrtc/modules/audio_processing/include/audio_processing.h" +#include "webrtc/system_wrappers/interface/scoped_ptr.h" + +namespace webrtc { + +TEST(EchoCancellationInternalTest, DelayCorrection) { + scoped_ptr ap(AudioProcessing::Create(0)); + EXPECT_TRUE(ap->echo_cancellation()->aec_core() == NULL); + + EXPECT_EQ(ap->kNoError, ap->echo_cancellation()->Enable(true)); + EXPECT_TRUE(ap->echo_cancellation()->is_enabled()); + + AecCore* aec_core = ap->echo_cancellation()->aec_core(); + ASSERT_TRUE(aec_core != NULL); + // Disabled by default. + EXPECT_EQ(0, WebRtcAec_delay_correction_enabled(aec_core)); + + Config config; + config.Set(new DelayCorrection(true)); + ap->SetExtraOptions(config); + EXPECT_EQ(1, WebRtcAec_delay_correction_enabled(aec_core)); + + // Retains setting after initialization. + EXPECT_EQ(ap->kNoError, ap->Initialize()); + EXPECT_EQ(1, WebRtcAec_delay_correction_enabled(aec_core)); + + config.Set(new DelayCorrection(false)); + ap->SetExtraOptions(config); + EXPECT_EQ(0, WebRtcAec_delay_correction_enabled(aec_core)); + + // Retains setting after initialization. + EXPECT_EQ(ap->kNoError, ap->Initialize()); + EXPECT_EQ(0, WebRtcAec_delay_correction_enabled(aec_core)); +} + +} // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/media_file/source/media_file_utility.cc b/media/webrtc/trunk/webrtc/modules/media_file/source/media_file_utility.cc index 04022ad62a4..8971954763d 100644 --- a/media/webrtc/trunk/webrtc/modules/media_file/source/media_file_utility.cc +++ b/media/webrtc/trunk/webrtc/modules/media_file/source/media_file_utility.cc @@ -609,13 +609,13 @@ int32_t ModuleFileUtility::ReadWavHeader(InStream& wav) // special cases? if(_wavFormatObj.nSamplesPerSec == 44100) { - _readSizeBytes = 440 * _wavFormatObj.nChannels * + _readSizeBytes = 441 * _wavFormatObj.nChannels * (_wavFormatObj.nBitsPerSample / 8); } else if(_wavFormatObj.nSamplesPerSec == 22050) { - _readSizeBytes = 220 * _wavFormatObj.nChannels * + _readSizeBytes = 220 * _wavFormatObj.nChannels * // XXX inexact! (_wavFormatObj.nBitsPerSample / 8); } else if(_wavFormatObj.nSamplesPerSec == 11025) { - _readSizeBytes = 110 * _wavFormatObj.nChannels * + _readSizeBytes = 110 * _wavFormatObj.nChannels * // XXX inexact! (_wavFormatObj.nBitsPerSample / 8); } else { _readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) * @@ -677,22 +677,22 @@ int32_t ModuleFileUtility::InitWavCodec(uint32_t samplesPerSec, { strcpy(codec_info_.plname, "L16"); _codecId = kCodecL16_16kHz; - codec_info_.pacsize = 110; - codec_info_.plfreq = 11000; + codec_info_.pacsize = 110; // XXX inexact! + codec_info_.plfreq = 11000; // XXX inexact! } else if(samplesPerSec == 22050) { strcpy(codec_info_.plname, "L16"); _codecId = kCodecL16_16kHz; - codec_info_.pacsize = 220; - codec_info_.plfreq = 22000; + codec_info_.pacsize = 220; // XXX inexact! + codec_info_.plfreq = 22000; // XXX inexact! } else if(samplesPerSec == 44100) { strcpy(codec_info_.plname, "L16"); _codecId = kCodecL16_16kHz; - codec_info_.pacsize = 440; - codec_info_.plfreq = 44000; + codec_info_.pacsize = 441; + codec_info_.plfreq = 44100; } else if(samplesPerSec == 48000) { @@ -1125,8 +1125,6 @@ int32_t ModuleFileUtility::WriteWavHeader( { // Frame size in bytes for 10 ms of audio. - // TODO (hellner): 44.1 kHz has 440 samples frame size. Doesn't seem to - // be taken into consideration here! int32_t frameSize = (freq / 100) * bytesPerSample * channels; // Calculate the number of full frames that the wave file contain. diff --git a/media/webrtc/trunk/webrtc/modules/modules.gyp b/media/webrtc/trunk/webrtc/modules/modules.gyp index d5d3cbe8285..5dab6f7cd27 100644 --- a/media/webrtc/trunk/webrtc/modules/modules.gyp +++ b/media/webrtc/trunk/webrtc/modules/modules.gyp @@ -145,6 +145,7 @@ 'audio_coding/neteq4/mock/mock_payload_splitter.h', 'audio_processing/aec/system_delay_unittest.cc', 'audio_processing/aec/echo_cancellation_unittest.cc', + 'audio_processing/echo_cancellation_impl_unittest.cc', 'audio_processing/test/audio_processing_unittest.cc', 'audio_processing/utility/delay_estimator_unittest.cc', 'audio_processing/utility/ring_buffer_unittest.cc', diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc index a892386a674..d7b7877b986 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc @@ -11,6 +11,7 @@ #include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h" #include +#include // for abs() #include #include diff --git a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc index bc0be8b3454..18297ef5efc 100644 --- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc +++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc @@ -21,7 +21,7 @@ #include // timeval #include // timeGetTime -#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_MAC)) +#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_BSD) || (defined WEBRTC_MAC)) #include // gettimeofday #include #endif @@ -96,9 +96,9 @@ uint32_t GetCurrentRTP(Clock* clock, uint32_t freq) { } uint32_t ConvertNTPTimeToRTP(uint32_t NTPsec, uint32_t NTPfrac, uint32_t freq) { - float ftemp = (float)NTPfrac / (float)NTP_FRAC; + float ftemp = (float)NTPfrac / (float)NTP_FRAC; uint32_t tmp = (uint32_t)(ftemp * freq); - return NTPsec * freq + tmp; + return NTPsec * freq + tmp; } uint32_t ConvertNTPTimeToMS(uint32_t NTPsec, uint32_t NTPfrac) { @@ -118,7 +118,7 @@ bool StringCompare(const char* str1, const char* str2, const uint32_t length) { return (_strnicmp(str1, str2, length) == 0) ? true : false; } -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) bool StringCompare(const char* str1, const char* str2, const uint32_t length) { return (strncasecmp(str1, str2, length) == 0) ? true : false; @@ -158,7 +158,7 @@ void AssignUWord24ToBuffer(uint8_t* dataBuffer, uint32_t value) { } void AssignUWord16ToBuffer(uint8_t* dataBuffer, uint16_t value) { -#if defined(WEBRTC_LITTLE_ENDIAN) +#if defined(WEBRTC_LITTLE_ENDIAN) dataBuffer[0] = static_cast(value >> 8); dataBuffer[1] = static_cast(value); #else diff --git a/media/webrtc/trunk/webrtc/modules/utility/source/file_player_impl.cc b/media/webrtc/trunk/webrtc/modules/utility/source/file_player_impl.cc index 9240e64691e..2526035e276 100644 --- a/media/webrtc/trunk/webrtc/modules/utility/source/file_player_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/utility/source/file_player_impl.cc @@ -87,7 +87,7 @@ int32_t FilePlayerImpl::Frequency() const { return 32000; } - else if(_codec.plfreq == 44000) + else if(_codec.plfreq == 44100 || _codec.plfreq == 44000 ) // XXX just 44100? { return 32000; } diff --git a/media/webrtc/trunk/webrtc/modules/utility/source/rtp_dump_impl.cc b/media/webrtc/trunk/webrtc/modules/utility/source/rtp_dump_impl.cc index 1f8715dfa50..0225836f8ae 100644 --- a/media/webrtc/trunk/webrtc/modules/utility/source/rtp_dump_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/utility/source/rtp_dump_impl.cc @@ -19,7 +19,7 @@ #if defined(_WIN32) #include #include -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) || defined(WEBRTC_BSD) #include #include #include @@ -237,7 +237,7 @@ inline uint32_t RtpDumpImpl::GetTimeInMS() const { #if defined(_WIN32) return timeGetTime(); -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) struct timeval tv; struct timezone tz; unsigned long val; diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc index 3c153af1942..a6ec7ad14c4 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc @@ -16,6 +16,8 @@ #include "webrtc/system_wrappers/interface/ref_count.h" #include "webrtc/system_wrappers/interface/trace.h" +#include "AndroidJNIWrapper.h" + namespace webrtc { @@ -52,16 +54,13 @@ DeviceInfoAndroid::~DeviceInfoAndroid() { } uint32_t DeviceInfoAndroid::NumberOfDevices() { - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached) != 0) - return 0; + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return 0; + + jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); + jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, "%s GetMethodId", __FUNCTION__); @@ -76,7 +75,6 @@ uint32_t DeviceInfoAndroid::NumberOfDevices() { "%s Calling Number of devices", __FUNCTION__); numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid); } - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); if (numberOfDevices > 0) return numberOfDevices; @@ -92,17 +90,14 @@ int32_t DeviceInfoAndroid::GetDeviceName( char* /*productUniqueIdUTF8*/, uint32_t /*productUniqueIdUTF8Length*/) { - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; int32_t result = 0; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached)!= 0) - return -1; + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return -1; + + jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); + jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); // get the method ID for the Android Java GetDeviceUniqueName name. jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName", @@ -151,8 +146,6 @@ int32_t DeviceInfoAndroid::GetDeviceName( result = -1; } - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, "%s: result %d", __FUNCTION__, (int) result); return result; @@ -168,21 +161,17 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( delete it->second; _captureCapabilities.clear(); - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached) != 0) - return -1; + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return -1; + + jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); + jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); // Find the capability class - jclass javaCapClass = g_capabilityClass; + jclass javaCapClass = jsjni_GetGlobalClassRef(AndroidJavaCaptureCapabilityClass); if (javaCapClass == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: SetAndroidCaptureClasses must be called first!", __FUNCTION__); @@ -195,7 +184,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( "GetCapabilityArray", "(Ljava/lang/String;)[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;"); if (cid == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Can't find method GetCapabilityArray.", __FUNCTION__); return -1; @@ -204,7 +192,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); if (capureIdString == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Can't create string for method GetCapabilityArray.", __FUNCTION__); @@ -214,7 +201,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject, cid, capureIdString); if (!javaCapabilitiesObj) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Failed to call java GetCapabilityArray.", __FUNCTION__); @@ -225,7 +211,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( jfieldID heigtField = env->GetFieldID(javaCapClass, "height", "I"); jfieldID maxFpsField = env->GetFieldID(javaCapClass, "maxFPS", "I"); if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Failed to get field Id.", __FUNCTION__); return -1; @@ -258,7 +243,8 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( deviceUniqueIdUTF8, _lastUsedDeviceNameLength + 1); - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + env->DeleteGlobalRef(javaCapClass); + WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "CreateCapabilityMap %d", _captureCapabilities.size()); @@ -268,22 +254,18 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( int32_t DeviceInfoAndroid::GetOrientation( const char* deviceUniqueIdUTF8, VideoCaptureRotation& orientation) { - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached) != 0) - return -1; + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return -1; + + jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); + jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); // get the method ID for the Android Java GetOrientation . jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation", "(Ljava/lang/String;)I"); if (cid == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Can't find method GetOrientation.", __FUNCTION__); return -1; @@ -291,7 +273,6 @@ int32_t DeviceInfoAndroid::GetOrientation( // Create a jstring so we can pass the deviceUniquName to the java method. jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); if (capureIdString == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Can't create string for method GetCapabilityArray.", __FUNCTION__); @@ -300,7 +281,6 @@ int32_t DeviceInfoAndroid::GetOrientation( // Call the java class and get the orientation. jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid, capureIdString); - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); int32_t retValue = 0; switch (jorientation) { diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h index 6d985c380ec..0fd938e056f 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h @@ -16,6 +16,9 @@ #include "webrtc/modules/video_capture/device_info_impl.h" #include "webrtc/modules/video_capture/video_capture_impl.h" +#define AndroidJavaCaptureDeviceInfoClass "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid" +#define AndroidJavaCaptureCapabilityClass "org/webrtc/videoengine/CaptureCapabilityAndroid" + namespace webrtc { namespace videocapturemodule diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java index 5cdf37f9ead..df0369dc3fc 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java @@ -25,14 +25,25 @@ import android.graphics.YuvImage; import android.hardware.Camera; import android.hardware.Camera.PreviewCallback; import android.util.Log; +import android.view.Surface; import android.view.SurfaceHolder; import android.view.SurfaceHolder.Callback; +import android.view.SurfaceView; +import android.view.TextureView; +import android.view.TextureView.SurfaceTextureListener; +import android.view.View; + +import org.mozilla.gecko.GeckoApp; +import org.mozilla.gecko.GeckoAppShell; +import org.mozilla.gecko.GeckoAppShell.AppStateListener; +import org.mozilla.gecko.util.ThreadUtils; public class VideoCaptureAndroid implements PreviewCallback, Callback { private final static String TAG = "WEBRTC-JC"; private Camera camera; + private int cameraId; private AndroidVideoCaptureDevice currentDevice = null; public ReentrantLock previewBufferLock = new ReentrantLock(); // This lock takes sync with StartCapture and SurfaceChanged @@ -43,6 +54,9 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { private boolean isCaptureStarted = false; private boolean isCaptureRunning = false; private boolean isSurfaceReady = false; + private SurfaceHolder surfaceHolder = null; + private SurfaceTexture surfaceTexture = null; + private SurfaceTexture dummySurfaceTexture = null; private final int numCaptureBuffers = 3; private int expectedFrameSize = 0; @@ -51,7 +65,6 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { // C++ callback context variable. private long context = 0; private SurfaceHolder localPreview = null; - private SurfaceTexture dummySurfaceTexture = null; // True if this class owns the preview video buffers. private boolean ownsBuffers = false; @@ -59,25 +72,171 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { private int mCaptureHeight = -1; private int mCaptureFPS = -1; + private int mCaptureRotation = 0; + + private AppStateListener mAppStateListener = null; + + public class MySurfaceTextureListener implements TextureView.SurfaceTextureListener { + public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { + Log.d(TAG, "VideoCaptureAndroid::onSurfaceTextureAvailable"); + + captureLock.lock(); + isSurfaceReady = true; + surfaceTexture = surface; + + tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS); + captureLock.unlock(); + } + + public void onSurfaceTextureSizeChanged(SurfaceTexture surface, + int width, int height) { + // Ignored, Camera does all the work for us + // Note that for a TextureView we start on onSurfaceTextureAvailable, + // for a SurfaceView we start on surfaceChanged. TextureView + // will not give out an onSurfaceTextureSizeChanged during creation. + } + + public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { + Log.d(TAG, "VideoCaptureAndroid::onSurfaceTextureDestroyed"); + isSurfaceReady = false; + DetachCamera(); + return true; + } + + public void onSurfaceTextureUpdated(SurfaceTexture surface) { + // Invoked every time there's a new Camera preview frame + } + } public static void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) { Log.d(TAG, "DeleteVideoCaptureAndroid"); - if (captureAndroid.camera == null) { - return; - } + + GeckoAppShell.getGeckoInterface().removeAppStateListener(captureAndroid.mAppStateListener); captureAndroid.StopCapture(); - captureAndroid.camera.release(); - captureAndroid.camera = null; + if (captureAndroid.camera != null) { + captureAndroid.camera.release(); + captureAndroid.camera = null; + } captureAndroid.context = 0; + + View cameraView = GeckoAppShell.getGeckoInterface().getCameraView(); + if (cameraView instanceof SurfaceView) { + ((SurfaceView)cameraView).getHolder().removeCallback(captureAndroid); + } else if (cameraView instanceof TextureView) { + // No need to explicitly remove the Listener: + // i.e. ((SurfaceView)cameraView).setSurfaceTextureListener(null); + } + ThreadUtils.getUiHandler().post(new Runnable() { + @Override + public void run() { + try { + GeckoAppShell.getGeckoInterface().disableCameraView(); + } catch (Exception e) { + Log.e(TAG, + "VideoCaptureAndroid disableCameraView exception: " + + e.getLocalizedMessage()); + } + } + }); } public VideoCaptureAndroid(int in_id, long in_context, Camera in_camera, - AndroidVideoCaptureDevice in_device) { + AndroidVideoCaptureDevice in_device, + int in_cameraId) { id = in_id; context = in_context; camera = in_camera; + cameraId = in_cameraId; currentDevice = in_device; + mCaptureRotation = GetRotateAmount(); + + try { + View cameraView = GeckoAppShell.getGeckoInterface().getCameraView(); + if (cameraView instanceof SurfaceView) { + ((SurfaceView)cameraView).getHolder().addCallback(this); + } else if (cameraView instanceof TextureView) { + MySurfaceTextureListener listener = new MySurfaceTextureListener(); + ((TextureView)cameraView).setSurfaceTextureListener(listener); + } + ThreadUtils.getUiHandler().post(new Runnable() { + @Override + public void run() { + try { + GeckoAppShell.getGeckoInterface().enableCameraView(); + } catch (Exception e) { + Log.e(TAG, + "VideoCaptureAndroid enableCameraView exception: " + + e.getLocalizedMessage()); + } + } + }); + } catch (Exception ex) { + Log.e(TAG, "VideoCaptureAndroid constructor exception: " + + ex.getLocalizedMessage()); + } + + mAppStateListener = new AppStateListener() { + @Override + public void onPause() { + StopCapture(); + if (camera != null) { + camera.release(); + camera = null; + } + } + @Override + public void onResume() { + try { + if(android.os.Build.VERSION.SDK_INT>8) { + camera = Camera.open(cameraId); + } else { + camera = Camera.open(); + } + } catch (Exception ex) { + Log.e(TAG, "Error reopening to the camera: " + ex.getMessage()); + } + captureLock.lock(); + isCaptureStarted = true; + tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS); + captureLock.unlock(); + } + @Override + public void onOrientationChanged() { + mCaptureRotation = GetRotateAmount(); + } + }; + + GeckoAppShell.getGeckoInterface().addAppStateListener(mAppStateListener); + } + + public int GetRotateAmount() { + int rotation = GeckoAppShell.getGeckoInterface().getActivity().getWindowManager().getDefaultDisplay().getRotation(); + int degrees = 0; + switch (rotation) { + case Surface.ROTATION_0: degrees = 0; break; + case Surface.ROTATION_90: degrees = 90; break; + case Surface.ROTATION_180: degrees = 180; break; + case Surface.ROTATION_270: degrees = 270; break; + } + if(android.os.Build.VERSION.SDK_INT>8) { + android.hardware.Camera.CameraInfo info = + new android.hardware.Camera.CameraInfo(); + android.hardware.Camera.getCameraInfo(cameraId, info); + int result; + if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { + result = (info.orientation + degrees) % 360; + } else { // back-facing + result = (info.orientation - degrees + 360) % 360; + } + return result; + } else { + // Assume 90deg orientation for Froyo devices. + // Only back-facing cameras are supported in Froyo. + int orientation = 90; + int result = (orientation - degrees + 360) % 360; + return result; + } } private int tryStartCapture(int width, int height, int frameRate) { @@ -86,79 +245,77 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { return -1; } - Log.d(TAG, "tryStartCapture: " + width + - "x" + height +", frameRate: " + frameRate + - ", isCaptureRunning: " + isCaptureRunning + - ", isSurfaceReady: " + isSurfaceReady + - ", isCaptureStarted: " + isCaptureStarted); + Log.d(TAG, "tryStartCapture " + width + + " height " + height +" frame rate " + frameRate + + " isCaptureRunning " + isCaptureRunning + + " isSurfaceReady " + isSurfaceReady + + " isCaptureStarted " + isCaptureStarted); - if (isCaptureRunning || !isCaptureStarted) { + if (isCaptureRunning || !isSurfaceReady || !isCaptureStarted) { return 0; } - CaptureCapabilityAndroid currentCapability = - new CaptureCapabilityAndroid(); - currentCapability.width = width; - currentCapability.height = height; - currentCapability.maxFPS = frameRate; - PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat); - - Camera.Parameters parameters = camera.getParameters(); - parameters.setPreviewSize(currentCapability.width, - currentCapability.height); - parameters.setPreviewFormat(PIXEL_FORMAT); - parameters.setPreviewFrameRate(currentCapability.maxFPS); try { + if (surfaceHolder != null) + camera.setPreviewDisplay(surfaceHolder); + if (surfaceTexture != null) + camera.setPreviewTexture(surfaceTexture); + if (surfaceHolder == null && surfaceTexture == null) { + // No local renderer. Camera won't capture without + // setPreview{Texture,Display}, so we create a dummy SurfaceTexture + // and hand it over to Camera, but never listen for frame-ready + // callbacks, and never call updateTexImage on it. + try { + dummySurfaceTexture = new SurfaceTexture(42); + camera.setPreviewTexture(dummySurfaceTexture); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + CaptureCapabilityAndroid currentCapability = + new CaptureCapabilityAndroid(); + currentCapability.width = width; + currentCapability.height = height; + currentCapability.maxFPS = frameRate; + PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat); + + + Camera.Parameters parameters = camera.getParameters(); + parameters.setPreviewSize(currentCapability.width, + currentCapability.height); + parameters.setPreviewFormat(PIXEL_FORMAT); + parameters.setPreviewFrameRate(currentCapability.maxFPS); camera.setParameters(parameters); - } catch (RuntimeException e) { - Log.e(TAG, "setParameters failed", e); + + int bufSize = width * height * pixelFormat.bitsPerPixel / 8; + byte[] buffer = null; + for (int i = 0; i < numCaptureBuffers; i++) { + buffer = new byte[bufSize]; + camera.addCallbackBuffer(buffer); + } + camera.setPreviewCallbackWithBuffer(this); + ownsBuffers = true; + + camera.startPreview(); + previewBufferLock.lock(); + expectedFrameSize = bufSize; + isCaptureRunning = true; + previewBufferLock.unlock(); + + } + catch (Exception ex) { + Log.e(TAG, "Failed to start camera: " + ex.getMessage()); return -1; } - int bufSize = width * height * pixelFormat.bitsPerPixel / 8; - byte[] buffer = null; - for (int i = 0; i < numCaptureBuffers; i++) { - buffer = new byte[bufSize]; - camera.addCallbackBuffer(buffer); - } - camera.setPreviewCallbackWithBuffer(this); - ownsBuffers = true; - - camera.startPreview(); - previewBufferLock.lock(); - expectedFrameSize = bufSize; isCaptureRunning = true; - previewBufferLock.unlock(); - return 0; } public int StartCapture(int width, int height, int frameRate) { Log.d(TAG, "StartCapture width " + width + " height " + height +" frame rate " + frameRate); - // Get the local preview SurfaceHolder from the static render class - localPreview = ViERenderer.GetLocalRenderer(); - if (localPreview != null) { - if (localPreview.getSurface() != null && - localPreview.getSurface().isValid()) { - surfaceCreated(localPreview); - } - localPreview.addCallback(this); - } else { - // No local renderer. Camera won't capture without - // setPreview{Texture,Display}, so we create a dummy SurfaceTexture - // and hand it over to Camera, but never listen for frame-ready - // callbacks, and never call updateTexImage on it. - captureLock.lock(); - try { - dummySurfaceTexture = new SurfaceTexture(42); - camera.setPreviewTexture(dummySurfaceTexture); - } catch (IOException e) { - throw new RuntimeException(e); - } - captureLock.unlock(); - } - captureLock.lock(); isCaptureStarted = true; mCaptureWidth = width; @@ -171,36 +328,48 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { return res; } - public int StopCapture() { - Log.d(TAG, "StopCapture"); + public int DetachCamera() { try { previewBufferLock.lock(); isCaptureRunning = false; previewBufferLock.unlock(); - camera.stopPreview(); - camera.setPreviewCallbackWithBuffer(null); - } catch (RuntimeException e) { - Log.e(TAG, "Failed to stop camera", e); + if (camera != null) { + camera.setPreviewCallbackWithBuffer(null); + camera.stopPreview(); + } + } catch (Exception ex) { + Log.e(TAG, "Failed to stop camera: " + ex.getMessage()); return -1; } - - isCaptureStarted = false; return 0; } - native void ProvideCameraFrame(byte[] data, int length, long captureObject); + public int StopCapture() { + Log.d(TAG, "StopCapture"); + isCaptureStarted = false; + return DetachCamera(); + } + + native void ProvideCameraFrame(byte[] data, int length, int rotation, + long captureObject); public void onPreviewFrame(byte[] data, Camera camera) { previewBufferLock.lock(); + String dataLengthStr = "does not exist"; + if(data != null) { + dataLengthStr = Integer.toString(data.length); + } + // The following line is for debug only - // Log.v(TAG, "preview frame length " + data.length + - // " context" + context); + Log.v(TAG, "preview frame length " + data.length + + " context" + context); if (isCaptureRunning) { // If StartCapture has been called but not StopCapture // Call the C++ layer with the captured frame - if (data.length == expectedFrameSize) { - ProvideCameraFrame(data, expectedFrameSize, context); + if (data != null && data.length == expectedFrameSize) { + ProvideCameraFrame(data, expectedFrameSize, mCaptureRotation, + context); if (ownsBuffers) { // Give the video buffer to the camera service again. camera.addCallbackBuffer(data); @@ -210,58 +379,26 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { previewBufferLock.unlock(); } - // Sets the rotation of the preview render window. - // Does not affect the captured video image. - public void SetPreviewRotation(int rotation) { - Log.v(TAG, "SetPreviewRotation:" + rotation); - - if (camera == null) { - return; - } - - int resultRotation = 0; - if (currentDevice.frontCameraType == - VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) { - // this is a 2.3 or later front facing camera. - // SetDisplayOrientation will flip the image horizontally - // before doing the rotation. - resultRotation = ( 360 - rotation ) % 360; // compensate the mirror - } - else { - // Back facing or 2.2 or previous front camera - resultRotation = rotation; - } - camera.setDisplayOrientation(resultRotation); - } - public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { Log.d(TAG, "VideoCaptureAndroid::surfaceChanged"); + + captureLock.lock(); + isSurfaceReady = true; + surfaceHolder = holder; + + tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS); + captureLock.unlock(); + return; } public void surfaceCreated(SurfaceHolder holder) { Log.d(TAG, "VideoCaptureAndroid::surfaceCreated"); - captureLock.lock(); - try { - if (camera != null) { - camera.setPreviewDisplay(holder); - } - } catch (IOException e) { - Log.e(TAG, "Failed to set preview surface!", e); - } - captureLock.unlock(); } public void surfaceDestroyed(SurfaceHolder holder) { Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed"); - captureLock.lock(); - try { - if (camera != null) { - camera.setPreviewDisplay(null); - } - } catch (IOException e) { - Log.e(TAG, "Failed to clear preview surface!", e); - } - captureLock.unlock(); + isSurfaceReady = false; + DetachCamera(); } } diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java index d1a744f46b4..74d21d935bf 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java @@ -252,6 +252,7 @@ public class VideoCaptureDeviceInfoAndroid { Log.d(TAG, "AllocateCamera " + deviceUniqueId); Camera camera = null; + int cameraId = 0; AndroidVideoCaptureDevice deviceToUse = null; for (AndroidVideoCaptureDevice device: deviceList) { if(device.deviceUniqueName.equals(deviceUniqueId)) { @@ -266,10 +267,12 @@ public class VideoCaptureDeviceInfoAndroid { break; default: // From Android 2.3 and onwards) - if(android.os.Build.VERSION.SDK_INT>8) - camera=Camera.open(device.index); - else - camera=Camera.open(); // Default camera + if(android.os.Build.VERSION.SDK_INT>8) { + cameraId = device.index; + camera = Camera.open(device.index); + } else { + camera = Camera.open(); // Default_ camera + } } } } @@ -279,7 +282,7 @@ public class VideoCaptureDeviceInfoAndroid { } Log.v(TAG, "AllocateCamera - creating VideoCaptureAndroid"); - return new VideoCaptureAndroid(id, context, camera, deviceToUse); + return new VideoCaptureAndroid(id, context, camera, deviceToUse, cameraId); } catch (NoSuchMethodException e) { Log.e(TAG, "AllocateCamera Failed to open camera", e); } catch (ClassNotFoundException e) { diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc index a66ce5c3265..b7c5414a0d5 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc @@ -16,6 +16,9 @@ #include "webrtc/system_wrappers/interface/ref_count.h" #include "webrtc/system_wrappers/interface/trace.h" +#include "AndroidJNIWrapper.h" +#include "mozilla/Assertions.h" + namespace webrtc { #if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) @@ -45,12 +48,19 @@ VideoCaptureModule* VideoCaptureImpl::Create( return implementation; } +#ifdef DEBUG // Android logging, uncomment to print trace to // logcat instead of trace file/callback -// #include +#include // #undef WEBRTC_TRACE // #define WEBRTC_TRACE(a,b,c,...) // __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__) +// Some functions are called before before the WebRTC logging can be brought up, +// log those to the Android log. +#define EARLY_WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC-VCA", __VA_ARGS__) +#else +#define EARLY_WEBRTC_TRACE(a,b,c,...) +#endif JavaVM* VideoCaptureAndroid::g_jvm = NULL; //VideoCaptureAndroid.java @@ -59,7 +69,6 @@ jclass VideoCaptureAndroid::g_javaCmClass = NULL; jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL; //static instance of VideoCaptureDeviceInfoAndroid.java jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL; -jobject VideoCaptureAndroid::g_javaContext = NULL; /* * Register references to Java Capture class. @@ -67,87 +76,59 @@ jobject VideoCaptureAndroid::g_javaContext = NULL; int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM, void* javaContext) { + MOZ_ASSERT(javaVM != nullptr || g_javaCmDevInfoClass != nullptr); + EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: running", __FUNCTION__); + g_jvm = static_cast (javaVM); - g_javaContext = static_cast (javaContext); if (javaVM) { + // Already done? Exit early. + if (g_javaCmClass != NULL + && g_javaCmDevInfoClass != NULL + && g_javaCmDevInfoObject != NULL) { + EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: early exit", __FUNCTION__); + return 0; + } + JNIEnv* env = NULL; if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, "%s: could not get Java environment", __FUNCTION__); return -1; } // get java capture class type (note path to class packet) - jclass javaCmClassLocal = env->FindClass(AndroidJavaCaptureClass); - if (!javaCmClassLocal) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + g_javaCmClass = jsjni_GetGlobalClassRef(AndroidJavaCaptureClass); + if (!g_javaCmClass) { + EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, "%s: could not find java class", __FUNCTION__); return -1; } - // create a global reference to the class - // (to tell JNI that we are referencing it - // after this function has returned) - g_javaCmClass = static_cast - (env->NewGlobalRef(javaCmClassLocal)); - if (!g_javaCmClass) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: InitVideoEngineJava(): could not create" - " Java Camera class reference", - __FUNCTION__); - return -1; - } - // Delete local class ref, we only use the global ref - env->DeleteLocalRef(javaCmClassLocal); JNINativeMethod nativeFunctions = - { "ProvideCameraFrame", "([BIJ)V", + { "ProvideCameraFrame", "([BIIJ)V", (void*) &VideoCaptureAndroid::ProvideCameraFrame }; if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0) { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, + EARLY_WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s: Registered native functions", __FUNCTION__); } else { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, "%s: Failed to register native functions", __FUNCTION__); return -1; } - jclass capabilityClassLocal = env->FindClass( - "org/webrtc/videoengine/CaptureCapabilityAndroid"); - if (!capabilityClassLocal) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not find java class", __FUNCTION__); - return -1; - } - jclass capabilityClassGlobal = reinterpret_cast(env->NewGlobalRef( - capabilityClassLocal)); - DeviceInfoAndroid::SetAndroidCaptureClasses(capabilityClassGlobal); - // get java capture class type (note path to class packet) - jclass javaCmDevInfoClassLocal = env->FindClass( - "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid"); - if (!javaCmDevInfoClassLocal) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + g_javaCmDevInfoClass = jsjni_GetGlobalClassRef( + AndroidJavaCaptureDeviceInfoClass); + if (!g_javaCmDevInfoClass) { + EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, "%s: could not find java class", __FUNCTION__); return -1; } - // create a global reference to the class - // (to tell JNI that we are referencing it - // after this function has returned) - g_javaCmDevInfoClass = static_cast - (env->NewGlobalRef(javaCmDevInfoClassLocal)); - if (!g_javaCmDevInfoClass) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: InitVideoEngineJava(): could not create Java " - "Camera Device info class reference", - __FUNCTION__); - return -1; - } - // Delete local class ref, we only use the global ref - env->DeleteLocalRef(javaCmDevInfoClassLocal); - - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, + EARLY_WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "VideoCaptureDeviceInfoAndroid get method id"); // get the method ID for the Android Java CaptureClass static @@ -158,23 +139,23 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM, "(ILandroid/content/Context;)" "Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;"); if (cid == NULL) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, "%s: could not get java" "VideoCaptureDeviceInfoAndroid constructor ID", __FUNCTION__); return -1; } - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, + EARLY_WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s: construct static java device object", __FUNCTION__); // construct the object by calling the static constructor object jobject javaCameraDeviceInfoObjLocal = env->CallStaticObjectMethod(g_javaCmDevInfoClass, cid, (int) -1, - g_javaContext); + javaContext); if (!javaCameraDeviceInfoObjLocal) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, + EARLY_WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, "%s: could not create Java Capture Device info object", __FUNCTION__); return -1; @@ -183,7 +164,7 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM, // we are referencing it after this function has returned) g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal); if (!g_javaCmDevInfoObject) { - WEBRTC_TRACE(webrtc::kTraceError, + EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, -1, "%s: could not create Java" @@ -193,13 +174,16 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM, } // Delete local object ref, we only use the global ref env->DeleteLocalRef(javaCameraDeviceInfoObjLocal); + + EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: success", __FUNCTION__); return 0; } else { - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + EARLY_WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, "%s: JVM is NULL, assuming deinit", __FUNCTION__); if (!g_jvm) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, "%s: SetAndroidObjects not called with a valid JVM.", __FUNCTION__); return -1; @@ -211,7 +195,7 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM, // Attach this thread to JVM jint res = g_jvm->AttachCurrentThread(&env, NULL); if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, + EARLY_WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env); return -1; @@ -222,7 +206,7 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM, env->DeleteGlobalRef(g_javaCmDevInfoClass); env->DeleteGlobalRef(g_javaCmClass); if (attached && g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, + EARLY_WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, "%s: Could not detach thread from JVM", __FUNCTION__); return -1; } @@ -232,47 +216,6 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM, return 0; } -int32_t VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - JNIEnv*& env, - jclass& javaCmDevInfoClass, - jobject& javaCmDevInfoObject, - bool& attached) { - // get the JNI env for this thread - if (!g_jvm) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: SetAndroidObjects not called with a valid JVM.", - __FUNCTION__); - return -1; - } - attached = false; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } - attached = true; - } - javaCmDevInfoClass = g_javaCmDevInfoClass; - javaCmDevInfoObject = g_javaCmDevInfoObject; - return 0; - -} - -int32_t VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects( - bool attached) { - if (attached && g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, - "%s: Could not detach thread from JVM", __FUNCTION__); - return -1; - } - return 0; -} - /* * JNI callback from Java class. Called * when the camera has a new frame to deliver @@ -284,11 +227,29 @@ void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env, jobject, jbyteArray javaCameraFrame, jint length, + jint rotation, jlong context) { VideoCaptureAndroid* captureModule = reinterpret_cast(context); WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, -1, "%s: IncomingFrame %d", __FUNCTION__,length); + + switch (rotation) { + case 90: + captureModule->SetCaptureRotation(kCameraRotate90); + break; + case 180: + captureModule->SetCaptureRotation(kCameraRotate180); + break; + case 270: + captureModule->SetCaptureRotation(kCameraRotate270); + break; + case 0: + default: + captureModule->SetCaptureRotation(kCameraRotate0); + break; + } + jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL); captureModule->IncomingFrame((uint8_t*) cameraFrame, length,captureModule->_frameInfo,0); @@ -311,7 +272,7 @@ VideoCaptureAndroid::VideoCaptureAndroid(const int32_t id) // VideoCaptureAndroid.java // ---------------------------------------------------------------------------- int32_t VideoCaptureAndroid::Init(const int32_t id, - const char* deviceUniqueIdUTF8) { + const char* deviceUniqueIdUTF8) { const int nameLength = strlen(deviceUniqueIdUTF8); if (nameLength >= kVideoCaptureUniqueNameLength) { return -1; @@ -338,33 +299,25 @@ int32_t VideoCaptureAndroid::Init(const int32_t id, "%s: Not a valid Java VM pointer", __FUNCTION__); return -1; } - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - // get the JNI env for this thread - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) return -1; - } - isAttached = true; - } + + jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); + jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); + + int32_t rotation = 0; WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, "get method id"); - // get the method ID for the Android Java // CaptureDeviceInfoClass AllocateCamera factory method. char signature[256]; sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass); - jmethodID cid = env->GetMethodID(g_javaCmDevInfoClass, "AllocateCamera", + jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "AllocateCamera", signature); if (cid == NULL) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, @@ -374,7 +327,7 @@ int32_t VideoCaptureAndroid::Init(const int32_t id, jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); // construct the object by calling the static constructor object - jobject javaCameraObjLocal = env->CallObjectMethod(g_javaCmDevInfoObject, + jobject javaCameraObjLocal = env->CallObjectMethod(javaCmDevInfoObject, cid, (jint) id, (jlong) this, capureIdString); @@ -394,17 +347,6 @@ int32_t VideoCaptureAndroid::Init(const int32_t id, return -1; } - // Delete local object ref, we only use the global ref - env->DeleteLocalRef(javaCameraObjLocal); - - // Detach this thread if it was attached - if (isAttached) { - if (g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - return 0; } @@ -416,31 +358,17 @@ VideoCaptureAndroid::~VideoCaptureAndroid() { "%s: Nothing to clean", __FUNCTION__); } else { - bool isAttached = false; - // get the JNI env for this thread - JNIEnv *env; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, - _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - } - else { - isAttached = true; - } - } + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return; // get the method ID for the Android Java CaptureClass static // DeleteVideoCaptureAndroid method. Call this to release the camera so // another application can use it. - jmethodID cid = env->GetStaticMethodID( - g_javaCmClass, - "DeleteVideoCaptureAndroid", - "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V"); + jmethodID cid = env->GetStaticMethodID(g_javaCmClass, + "DeleteVideoCaptureAndroid", + "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V"); if (cid != NULL) { WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__); @@ -450,20 +378,10 @@ VideoCaptureAndroid::~VideoCaptureAndroid() { // Delete global object ref to the camera. env->DeleteGlobalRef(_javaCaptureObj); _javaCaptureObj = NULL; - } - else { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to find DeleteVideoCaptureAndroid id", - __FUNCTION__); - } - - // Detach this thread if it was attached - if (isAttached) { - if (g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, - _id, "%s: Could not detach thread from JVM", + } else { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Failed to find DeleteVideoCaptureAndroid id", __FUNCTION__); - } } } } @@ -474,23 +392,13 @@ int32_t VideoCaptureAndroid::StartCapture( WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, "%s: ", __FUNCTION__); - bool isAttached = false; int32_t result = 0; - // get the JNI env for this thread - JNIEnv *env; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - } - else { - isAttached = true; - } - } + int32_t rotation = 0; + + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return -1; if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability, _frameInfo) < 0) { @@ -522,13 +430,6 @@ int32_t VideoCaptureAndroid::StartCapture( "%s: Failed to find StartCapture id", __FUNCTION__); } - // Detach this thread if it was attached - if (isAttached) { - if (g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } if (result == 0) { _requestedCapability = capability; _captureStarted = true; @@ -543,23 +444,12 @@ int32_t VideoCaptureAndroid::StopCapture() { WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, "%s: ", __FUNCTION__); - bool isAttached = false; int32_t result = 0; - // get the JNI env for this thread - JNIEnv *env = NULL; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - } - else { - isAttached = true; - } - } + + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return -1; memset(&_requestedCapability, 0, sizeof(_requestedCapability)); memset(&_frameInfo, 0, sizeof(_frameInfo)); @@ -577,13 +467,6 @@ int32_t VideoCaptureAndroid::StopCapture() { "%s: Failed to find StopCapture id", __FUNCTION__); } - // Detach this thread if it was attached - if (isAttached) { - if (g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } _captureStarted = false; WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, @@ -610,65 +493,7 @@ int32_t VideoCaptureAndroid::CaptureSettings( int32_t VideoCaptureAndroid::SetCaptureRotation( VideoCaptureRotation rotation) { CriticalSectionScoped cs(&_apiCs); - if (VideoCaptureImpl::SetCaptureRotation(rotation) == 0) { - if (!g_jvm) - return -1; - - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - // get the JNI env for this thread - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, - _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } - isAttached = true; - } - - jmethodID cid = env->GetMethodID(g_javaCmClass, "SetPreviewRotation", - "(I)V"); - if (cid == NULL) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not get java SetPreviewRotation ID", - __FUNCTION__); - return -1; - } - jint rotateFrame = 0; - switch (rotation) { - case kCameraRotate0: - rotateFrame = 0; - break; - case kCameraRotate90: - rotateFrame = 90; - break; - case kCameraRotate180: - rotateFrame = 180; - break; - case kCameraRotate270: - rotateFrame = 270; - break; - } - env->CallVoidMethod(_javaCaptureObj, cid, rotateFrame); - - // Detach this thread if it was attached - if (isAttached) { - if (g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, - _id, "%s: Could not detach thread from JVM", - __FUNCTION__); - } - } - - } - return 0; + return VideoCaptureImpl::SetCaptureRotation(rotation); } } // namespace videocapturemodule diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h index 31cf76d64d6..09997c076a3 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h @@ -12,6 +12,8 @@ #define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_ #include +#include +#include "trace.h" #include "webrtc/modules/video_capture/android/device_info_android.h" #include "webrtc/modules/video_capture/video_capture_impl.h" @@ -24,17 +26,9 @@ namespace videocapturemodule { class VideoCaptureAndroid : public VideoCaptureImpl { public: static int32_t SetAndroidObjects(void* javaVM, void* javaContext); - static int32_t AttachAndUseAndroidDeviceInfoObjects( - JNIEnv*& env, - jclass& javaCmDevInfoClass, - jobject& javaCmDevInfoObject, - bool& attached); - static int32_t ReleaseAndroidDeviceInfoObjects(bool attached); - VideoCaptureAndroid(const int32_t id); virtual int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8); - virtual int32_t StartCapture( const VideoCaptureCapability& capability); virtual int32_t StopCapture(); @@ -42,12 +36,16 @@ class VideoCaptureAndroid : public VideoCaptureImpl { virtual int32_t CaptureSettings(VideoCaptureCapability& settings); virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation); + friend class AutoLocalJNIFrame; + protected: virtual ~VideoCaptureAndroid(); static void JNICALL ProvideCameraFrame (JNIEnv * env, jobject, jbyteArray javaCameraFrame, - jint length, jlong context); + jint length, + jint rotation, + jlong context); DeviceInfoAndroid _capInfo; jobject _javaCaptureObj; // Java Camera object. VideoCaptureCapability _frameInfo; @@ -58,7 +56,120 @@ class VideoCaptureAndroid : public VideoCaptureImpl { static jclass g_javaCmDevInfoClass; //Static java object implementing the needed device info functions; static jobject g_javaCmDevInfoObject; - static jobject g_javaContext; // Java Application context +}; + +// Reworked version of what is available in AndroidBridge, +// can attach/deatch in addition to push/pop frames. +class AutoLocalJNIFrame { +public: + AutoLocalJNIFrame(int nEntries = 128) + : mEntries(nEntries), mHasFrameBeenPushed(false), mAttached(false) + { + mJNIEnv = InitJNIEnv(); + Push(); + } + + JNIEnv* GetEnv() { + return mJNIEnv; + } + + jclass GetCmDevInfoClass() { + assert(VideoCaptureAndroid::g_javaCmDevInfoClass != nullptr); + return VideoCaptureAndroid::g_javaCmDevInfoClass; + } + + jobject GetCmDevInfoObject() { + assert(VideoCaptureAndroid::g_javaCmDevInfoObject != nullptr); + return VideoCaptureAndroid::g_javaCmDevInfoObject; + } + + bool CheckForException() { + if (mJNIEnv->ExceptionCheck()) { + mJNIEnv->ExceptionDescribe(); + mJNIEnv->ExceptionClear(); + return true; + } + + return false; + } + + ~AutoLocalJNIFrame() { + if (!mJNIEnv) + return; + + CheckForException(); + + if (mHasFrameBeenPushed) + mJNIEnv->PopLocalFrame(NULL); + + if (mAttached) { + int res = VideoCaptureAndroid::g_jvm->DetachCurrentThread(); + if (res < 0) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: JVM Detach failed.", __FUNCTION__); + } + } + } + +private: + void Push() { + if (!mJNIEnv) + return; + + // Make sure there is enough space to store a local ref to the + // exception. I am not completely sure this is needed, but does + // not hurt. + jint ret = mJNIEnv->PushLocalFrame(mEntries + 1); + assert(ret == 0); + if (ret < 0) + CheckForException(); + else + mHasFrameBeenPushed = true; + } + + JNIEnv* InitJNIEnv() + { + JNIEnv* env = nullptr; + + // Get the JNI env for this thread. + if (!VideoCaptureAndroid::g_jvm) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: SetAndroidObjects not called with a valid JVM.", + __FUNCTION__); + return nullptr; + } + + jint res = VideoCaptureAndroid::g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4); + if (res == JNI_EDETACHED) { + // Try to attach this thread to the JVM and get the env. + res = VideoCaptureAndroid::g_jvm->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) { + // Attaching failed, error out. + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return nullptr; + } + mAttached = true; + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: attach success", __FUNCTION__); + } else if (res == JNI_OK) { + // Already attached, GetEnv succeeded. + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: did not attach because JVM Env already present", + __FUNCTION__); + } else { + // Non-recoverable error in GetEnv. + return nullptr; + } + + return env; + } + + int mEntries; + JNIEnv* mJNIEnv; + bool mHasFrameBeenPushed; + bool mAttached; }; } // namespace videocapturemodule diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc b/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc index 1bdaa14f3c8..edf1d1ad533 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc @@ -55,7 +55,7 @@ int32_t DeviceInfoImpl::NumberOfCapabilities( if (_lastUsedDeviceNameLength == strlen((char*) deviceUniqueIdUTF8)) { // Is it the same device that is asked for again. -#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) +#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) if(strncasecmp((char*)_lastUsedDeviceName, (char*) deviceUniqueIdUTF8, _lastUsedDeviceNameLength)==0) @@ -92,7 +92,7 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8, ReadLockScoped cs(_apiLock); if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8)) -#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) +#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || (strncasecmp((char*)_lastUsedDeviceName, (char*) deviceUniqueIdUTF8, _lastUsedDeviceNameLength)!=0)) @@ -156,7 +156,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability( ReadLockScoped cs(_apiLock); if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8)) -#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) +#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || (strncasecmp((char*)_lastUsedDeviceName, (char*) deviceUniqueIdUTF8, _lastUsedDeviceNameLength)!=0)) diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc b/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc index 0b16ca32d71..362b129b57d 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc @@ -93,9 +93,10 @@ int32_t DeviceInfoLinux::GetDeviceName( char device[20]; int fd = -1; bool found = false; - for (int n = 0; n < 64; n++) + int device_index; + for (device_index = 0; device_index < 64; device_index++) { - sprintf(device, "/dev/video%d", n); + sprintf(device, "/dev/video%d", device_index); if ((fd = open(device, O_RDONLY)) != -1) { if (count == deviceNumber) { @@ -154,6 +155,15 @@ int32_t DeviceInfoLinux::GetDeviceName( "buffer passed is too small"); return -1; } + } else { + // if there's no bus info to use for uniqueId, invent one - and it has to be repeatable + if (snprintf(deviceUniqueIdUTF8, deviceUniqueIdUTF8Length, "fake_%u", device_index) >= + deviceUniqueIdUTF8Length) + { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "buffer passed is too small"); + return -1; + } } return 0; @@ -165,6 +175,7 @@ int32_t DeviceInfoLinux::CreateCapabilityMap( int fd; char device[32]; bool found = false; + int device_index; const int32_t deviceUniqueIdUTF8Length = (int32_t) strlen((char*) deviceUniqueIdUTF8); @@ -177,40 +188,41 @@ int32_t DeviceInfoLinux::CreateCapabilityMap( "CreateCapabilityMap called for device %s", deviceUniqueIdUTF8); /* detect /dev/video [0-63] entries */ - for (int n = 0; n < 64; ++n) + if (sscanf(deviceUniqueIdUTF8,"fake_%d",&device_index) == 1) { - sprintf(device, "/dev/video%d", n); + sprintf(device, "/dev/video%d", device_index); fd = open(device, O_RDONLY); - if (fd == -1) - continue; - - // query device capabilities - struct v4l2_capability cap; - if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) - { - if (cap.bus_info[0] != 0) - { - if (strncmp((const char*) cap.bus_info, - (const char*) deviceUniqueIdUTF8, - strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id - { - found = true; - break; // fd matches with device unique id supplied - } - } - else //match for device name - { - if (IsDeviceNameMatches((const char*) cap.card, - (const char*) deviceUniqueIdUTF8)) - { - found = true; - break; - } - } + if (fd != -1) { + found = true; } - close(fd); // close since this is not the matching device - } + } else { + /* detect /dev/video [0-63] entries */ + for (int n = 0; n < 64; ++n) + { + sprintf(device, "/dev/video%d", n); + fd = open(device, O_RDONLY); + if (fd == -1) + continue; + // query device capabilities + struct v4l2_capability cap; + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) + { + if (cap.bus_info[0] != 0) + { + if (strncmp((const char*) cap.bus_info, + (const char*) deviceUniqueIdUTF8, + strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id + { + found = true; + break; // fd matches with device unique id supplied + } + } + // else can't be a match as the test for fake_* above would have matched it + } + close(fd); // close since this is not the matching device + } + } if (!found) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found"); diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/linux/video_capture_linux.cc b/media/webrtc/trunk/webrtc/modules/video_capture/linux/video_capture_linux.cc index 6cad637f2db..407cf8b5862 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/linux/video_capture_linux.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/linux/video_capture_linux.cc @@ -10,7 +10,6 @@ #include #include -#include #include #include #include @@ -18,7 +17,15 @@ #include #include -#include +//v4l includes +#if defined(__DragonFly__) || defined(__NetBSD__) || defined(__OpenBSD__) +#include +#elif defined(__sun) +#include +#else +#include +#endif + #include #include "webrtc/modules/video_capture/linux/video_capture_linux.h" @@ -71,6 +78,13 @@ int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1); } + int device_index; + if (sscanf(deviceUniqueIdUTF8,"fake_%d", &device_index) == 1) + { + _deviceId = device_index; + return 0; + } + int fd; char device[32]; bool found = false; diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm index 4f03e5e1139..3344bde9c1a 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm +++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm @@ -15,6 +15,20 @@ #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/trace.h" +class nsAutoreleasePool { +public: + nsAutoreleasePool() + { + mLocalPool = [[NSAutoreleasePool alloc] init]; + } + ~nsAutoreleasePool() + { + [mLocalPool release]; + } +private: + NSAutoreleasePool *mLocalPool; +}; + namespace webrtc { @@ -41,6 +55,7 @@ VideoCaptureMacQTKit::VideoCaptureMacQTKit(const int32_t id) : VideoCaptureMacQTKit::~VideoCaptureMacQTKit() { + nsAutoreleasePool localPool; WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, "~VideoCaptureMacQTKit() called"); if(_captureDevice) @@ -71,6 +86,8 @@ int32_t VideoCaptureMacQTKit::Init( _deviceUniqueId = new char[nameLength+1]; memcpy(_deviceUniqueId, iDeviceUniqueIdUTF8,nameLength+1); + nsAutoreleasePool localPool; + _captureDevice = [[VideoCaptureMacQTKitObjC alloc] init]; if(NULL == _captureDevice) { @@ -164,6 +181,7 @@ int32_t VideoCaptureMacQTKit::StartCapture( const VideoCaptureCapability& capability) { + nsAutoreleasePool localPool; _captureWidth = capability.width; _captureHeight = capability.height; _captureFrameRate = capability.maxFPS; @@ -180,6 +198,7 @@ int32_t VideoCaptureMacQTKit::StartCapture( int32_t VideoCaptureMacQTKit::StopCapture() { + nsAutoreleasePool localPool; [_captureDevice stopCapture]; _isCapturing = false; return 0; diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm index 311f8ac7708..7636b27abed 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm +++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm @@ -13,6 +13,20 @@ #include "webrtc/modules/video_capture/video_capture_config.h" #include "webrtc/system_wrappers/interface/trace.h" +class nsAutoreleasePool { +public: + nsAutoreleasePool() + { + mLocalPool = [[NSAutoreleasePool alloc] init]; + } + ~nsAutoreleasePool() + { + [mLocalPool release]; + } +private: + NSAutoreleasePool *mLocalPool; +}; + namespace webrtc { namespace videocapturemodule @@ -21,13 +35,14 @@ namespace videocapturemodule VideoCaptureMacQTKitInfo::VideoCaptureMacQTKitInfo(const int32_t id) : DeviceInfoImpl(id) { + nsAutoreleasePool localPool; _captureInfo = [[VideoCaptureMacQTKitInfoObjC alloc] init]; } VideoCaptureMacQTKitInfo::~VideoCaptureMacQTKitInfo() { + nsAutoreleasePool localPool; [_captureInfo release]; - } int32_t VideoCaptureMacQTKitInfo::Init() @@ -39,6 +54,7 @@ int32_t VideoCaptureMacQTKitInfo::Init() uint32_t VideoCaptureMacQTKitInfo::NumberOfDevices() { + nsAutoreleasePool localPool; uint32_t captureDeviceCount = [[_captureInfo getCaptureDeviceCount]intValue]; return captureDeviceCount; @@ -51,6 +67,7 @@ int32_t VideoCaptureMacQTKitInfo::GetDeviceName( uint32_t deviceUniqueIdUTF8Length, char* productUniqueIdUTF8, uint32_t productUniqueIdUTF8Length) { + nsAutoreleasePool localPool; int errNum = [[_captureInfo getDeviceNamesFromIndex:deviceNumber DefaultName:deviceNameUTF8 WithLength:deviceNameLength AndUniqueID:deviceUniqueIdUTF8 @@ -104,6 +121,7 @@ int32_t VideoCaptureMacQTKitInfo::DisplayCaptureSettingsDialogBox( uint32_t positionX, uint32_t positionY) { + nsAutoreleasePool localPool; return [[_captureInfo displayCaptureSettingsDialogBoxWithDevice:deviceUniqueIdUTF8 AndTitle:dialogTitleUTF8 diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h index c03aa935f5f..0a9513465ce 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h @@ -25,7 +25,6 @@ @interface VideoCaptureMacQTKitInfoObjC : NSObject{ bool _OSSupportedInfo; NSArray* _captureDevicesInfo; - NSAutoreleasePool* _poolInfo; int _captureDeviceCountInfo; } diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm index 7b46aec1925..730b15769a2 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm +++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm @@ -93,11 +93,15 @@ using namespace webrtc; return [NSNumber numberWithInt:-1]; } - QTCaptureDevice* tempCaptureDevice = - (QTCaptureDevice*)[_captureDevicesInfo objectAtIndex:index]; + if ([_captureDevicesInfo count] <= index) + { + return [NSNumber numberWithInt:-1]; + } + + QTCaptureDevice* tempCaptureDevice = (QTCaptureDevice*)[_captureDevicesInfo objectAtIndex:index]; if(!tempCaptureDevice) { - return [NSNumber numberWithInt:-1]; + return [NSNumber numberWithInt:-1]; } memset(deviceName, 0, deviceNameLength); @@ -137,7 +141,6 @@ using namespace webrtc; return [NSNumber numberWithInt:0]; } - _poolInfo = [[NSAutoreleasePool alloc]init]; _captureDeviceCountInfo = 0; [self getCaptureDevices]; diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.mm b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.mm index 4120335fa66..5bca315c392 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.mm +++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.mm @@ -151,7 +151,17 @@ using namespace videocapturemodule; if (!_capturing) return; - [_captureSession stopRunning]; + // This method is often called on a secondary thread. Which means + // that the following can sometimes run "too early", causing crashes + // and/or weird errors concerning initialization. On OS X 10.7 and + // 10.8, the CoreMediaIO method CMIOUninitializeGraph() is called from + // -[QTCaptureSession stopRunning]. If this is called too early, + // low-level session data gets uninitialized before low-level code + // is finished trying to use it. The solution is to make stopRunning + // always run on the main thread. See bug 837539. + [_captureSession performSelectorOnMainThread:@selector(stopRunning) + withObject:nil + waitUntilDone:NO]; _capturing = NO; } diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/mac/video_capture_mac.mm b/media/webrtc/trunk/webrtc/modules/video_capture/mac/video_capture_mac.mm index 661b0f7b4ed..4babbbaf5fa 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/video_capture_mac.mm +++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/video_capture_mac.mm @@ -27,8 +27,8 @@ #include #include #else -#include -#include +#include "qtkit/video_capture_qtkit.h" +#include "qtkit/video_capture_qtkit_info.h" #endif namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi index f2cecd1a402..a8e1ce3e73c 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi +++ b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi @@ -16,6 +16,11 @@ '<(webrtc_root)/common_video/common_video.gyp:common_video', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], + + 'cflags_mozilla': [ + '$(NSPR_CFLAGS)', + ], + 'include_dirs': [ 'include', '../interface', @@ -41,7 +46,7 @@ ], }, { # include_internal_video_capture == 1 'conditions': [ - ['OS=="linux"', { + ['include_v4l2_video_capture==1', { 'include_dirs': [ 'linux', ], @@ -77,8 +82,12 @@ }, }], # mac ['OS=="win"', { - 'dependencies': [ - '<(DEPTH)/third_party/winsdk_samples/winsdk_samples.gyp:directshow_baseclasses', + 'conditions': [ + ['build_with_mozilla==0', { + 'dependencies': [ + '<(DEPTH)/third_party/winsdk_samples/winsdk_samples.gyp:directshow_baseclasses', + ], + }], ], 'include_dirs': [ 'windows', @@ -97,6 +106,10 @@ 'windows/video_capture_factory_windows.cc', 'windows/video_capture_mf.cc', 'windows/video_capture_mf.h', + 'windows/BasePin.cpp', + 'windows/BaseFilter.cpp', + 'windows/BaseInputPin.cpp', + 'windows/MediaType.cpp', ], 'link_settings': { 'libraries': [ @@ -162,7 +175,7 @@ 'test/video_capture_main_mac.mm', ], 'conditions': [ - ['OS=="mac" or OS=="linux"', { + ['OS!="win" and OS!="android"', { 'cflags': [ '-Wno-write-strings', ], @@ -170,11 +183,15 @@ '-lpthread -lm', ], }], + ['include_v4l2_video_capture==1', { + 'libraries': [ + '-lXext', + '-lX11', + ], + }], ['OS=="linux"', { 'libraries': [ '-lrt', - '-lXext', - '-lX11', ], }], ['OS=="mac"', { diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc b/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc index 11e1f41603b..046115de038 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc @@ -17,7 +17,6 @@ #include "webrtc/system_wrappers/interface/trace.h" #include -#include namespace webrtc { @@ -42,6 +41,23 @@ const DelayValues WindowsCaptureDelays[NoWindowsCaptureDelays] = { }, }; + + void _FreeMediaType(AM_MEDIA_TYPE& mt) +{ + if (mt.cbFormat != 0) + { + CoTaskMemFree((PVOID)mt.pbFormat); + mt.cbFormat = 0; + mt.pbFormat = NULL; + } + if (mt.pUnk != NULL) + { + // pUnk should not be used. + mt.pUnk->Release(); + mt.pUnk = NULL; + } +} + // static DeviceInfoDS* DeviceInfoDS::Create(const int32_t id) { @@ -565,7 +581,7 @@ int32_t DeviceInfoDS::CreateCapabilityMap( if (hrVC == S_OK) { - LONGLONG *frameDurationList; + LONGLONG *frameDurationList = NULL; LONGLONG maxFPS; long listSize; SIZE size; @@ -584,7 +600,9 @@ int32_t DeviceInfoDS::CreateCapabilityMap( // On some odd cameras, you may get a 0 for duration. // GetMaxOfFrameArray returns the lowest duration (highest FPS) - if (hrVC == S_OK && listSize > 0 && + // Initialize and check the returned list for null since + // some broken drivers don't modify it. + if (hrVC == S_OK && listSize > 0 && frameDurationList && 0 != (maxFPS = GetMaxOfFrameArray(frameDurationList, listSize))) { @@ -679,7 +697,7 @@ int32_t DeviceInfoDS::CreateCapabilityMap( capability->width, capability->height, capability->rawType, capability->maxFPS); } - DeleteMediaType(pmt); + _FreeMediaType(*pmt); pmt = NULL; } RELEASE_AND_CLEAR(streamConfig); diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.cc b/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.cc index 144f8833b75..d305f5d3a74 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.cc @@ -21,6 +21,9 @@ DEFINE_GUID(CLSID_SINKFILTER, 0x88cdbbdc, 0xa73b, 0x4afa, 0xac, 0xbf, 0x15, 0xd5, 0xe2, 0xce, 0x12, 0xc3); +using namespace mozilla::media; +using namespace mozilla; + namespace webrtc { namespace videocapturemodule @@ -37,10 +40,10 @@ typedef struct tagTHREADNAME_INFO CaptureInputPin::CaptureInputPin (int32_t moduleId, IN TCHAR * szName, IN CaptureSinkFilter* pFilter, - IN CCritSec * pLock, + IN CriticalSection * pLock, OUT HRESULT * pHr, IN LPCWSTR pszName) - : CBaseInputPin (szName, pFilter, pLock, pHr, pszName), + : BaseInputPin (szName, pFilter, pLock, pHr, pszName), _requestedCapability(), _resultingCapability() { @@ -53,7 +56,7 @@ CaptureInputPin::~CaptureInputPin() } HRESULT -CaptureInputPin::GetMediaType (IN int iPosition, OUT CMediaType * pmt) +CaptureInputPin::GetMediaType (IN int iPosition, OUT MediaType * pmt) { // reset the thread handle _threadHandle = NULL; @@ -161,7 +164,7 @@ CaptureInputPin::GetMediaType (IN int iPosition, OUT CMediaType * pmt) } HRESULT -CaptureInputPin::CheckMediaType ( IN const CMediaType * pMediaType) +CaptureInputPin::CheckMediaType ( IN const MediaType * pMediaType) { // reset the thread handle _threadHandle = NULL; @@ -319,8 +322,8 @@ CaptureInputPin::Receive ( IN IMediaSample * pIMediaSample ) { HRESULT hr = S_OK; - ASSERT (m_pFilter); - ASSERT (pIMediaSample); + assert (mFilter); + assert (pIMediaSample); // get the thread handle of the delivering thread inc its priority if( _threadHandle == NULL) @@ -348,8 +351,8 @@ CaptureInputPin::Receive ( IN IMediaSample * pIMediaSample ) } - reinterpret_cast (m_pFilter)->LockReceive(); - hr = CBaseInputPin::Receive (pIMediaSample); + reinterpret_cast (mFilter)->LockReceive(); + hr = BaseInputPin::Receive (pIMediaSample); if (SUCCEEDED (hr)) { @@ -358,17 +361,17 @@ CaptureInputPin::Receive ( IN IMediaSample * pIMediaSample ) unsigned char* pBuffer = NULL; if(S_OK != pIMediaSample->GetPointer(&pBuffer)) { - reinterpret_cast (m_pFilter)->UnlockReceive(); + reinterpret_cast (mFilter)->UnlockReceive(); return S_FALSE; } // NOTE: filter unlocked within Send call - reinterpret_cast (m_pFilter)->ProcessCapturedFrame( + reinterpret_cast (mFilter)->ProcessCapturedFrame( pBuffer,length,_resultingCapability); } else { - reinterpret_cast (m_pFilter)->UnlockReceive(); + reinterpret_cast (mFilter)->UnlockReceive(); } return hr; @@ -389,13 +392,15 @@ CaptureSinkFilter::CaptureSinkFilter (IN TCHAR * tszName, OUT HRESULT * phr, VideoCaptureExternal& captureObserver, int32_t moduleId) - : CBaseFilter(tszName,punk,& m_crtFilter,CLSID_SINKFILTER), + : BaseFilter(tszName, CLSID_SINKFILTER), + m_crtFilter("CaptureSinkFilter::m_crtFilter"), + m_crtRecv("CaptureSinkFilter::m_crtRecv"), m_pInput(NULL), _captureObserver(captureObserver), _moduleId(moduleId) { (* phr) = S_OK; - m_pInput = new CaptureInputPin(moduleId,NAME ("VideoCaptureInputPin"), + m_pInput = new CaptureInputPin(moduleId, L"VideoCaptureInputPin", this, & m_crtFilter, phr, L"VideoCapture"); @@ -418,10 +423,10 @@ int CaptureSinkFilter::GetPinCount() return 1; } -CBasePin * +BasePin * CaptureSinkFilter::GetPin(IN int Index) { - CBasePin * pPin; + BasePin * pPin; LockFilter (); if (Index == 0) { @@ -438,22 +443,22 @@ CaptureSinkFilter::GetPin(IN int Index) STDMETHODIMP CaptureSinkFilter::Pause() { LockFilter(); - if (m_State == State_Stopped) + if (mState == State_Stopped) { // change the state, THEN activate the input pin - m_State = State_Paused; + mState = State_Paused; if (m_pInput && m_pInput->IsConnected()) { m_pInput->Active(); } if (m_pInput && !m_pInput->IsConnected()) { - m_State = State_Running; + mState = State_Running; } } - else if (m_State == State_Running) + else if (mState == State_Running) { - m_State = State_Paused; + mState = State_Paused; } UnlockFilter(); return S_OK; @@ -465,7 +470,7 @@ STDMETHODIMP CaptureSinkFilter::Stop() LockFilter(); // set the state - m_State = State_Stopped; + mState = State_Stopped; // inactivate the pins if (m_pInput) @@ -479,7 +484,7 @@ STDMETHODIMP CaptureSinkFilter::Stop() void CaptureSinkFilter::SetFilterGraph(IGraphBuilder* graph) { LockFilter(); - m_pGraph = graph; + mGraph = graph; UnlockFilter(); } @@ -488,7 +493,7 @@ void CaptureSinkFilter::ProcessCapturedFrame(unsigned char* pBuffer, const VideoCaptureCapability& frameInfo) { // we have the receiver lock - if (m_State == State_Running) + if (mState == State_Running) { _captureObserver.IncomingFrame(pBuffer, length, frameInfo); diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.h b/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.h index d1f0ed3eb80..f673208110a 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.h @@ -11,9 +11,10 @@ #ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_ #define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_ -#include // Include base DS filter header files - #include "webrtc/modules/video_capture/include/video_capture_defines.h" +#include "BaseInputPin.h" +#include "BaseFilter.h" +#include "MediaType.h" namespace webrtc { @@ -26,7 +27,7 @@ class CaptureSinkFilter; * input pin for camera input * */ -class CaptureInputPin: public CBaseInputPin +class CaptureInputPin: public mozilla::media::BaseInputPin { public: int32_t _moduleId; @@ -38,18 +39,18 @@ public: CaptureInputPin(int32_t moduleId, IN TCHAR* szName, IN CaptureSinkFilter* pFilter, - IN CCritSec * pLock, + IN mozilla::CriticalSection * pLock, OUT HRESULT * pHr, IN LPCWSTR pszName); virtual ~CaptureInputPin(); - HRESULT GetMediaType (IN int iPos, OUT CMediaType * pmt); - HRESULT CheckMediaType (IN const CMediaType * pmt); + HRESULT GetMediaType (IN int iPos, OUT mozilla::media::MediaType * pmt); + HRESULT CheckMediaType (IN const mozilla::media::MediaType * pmt); STDMETHODIMP Receive (IN IMediaSample *); HRESULT SetMatchingMediaType(const VideoCaptureCapability& capability); }; -class CaptureSinkFilter: public CBaseFilter +class CaptureSinkFilter: public mozilla::media::BaseFilter { public: @@ -66,34 +67,55 @@ public: void ProcessCapturedFrame(unsigned char* pBuffer, int32_t length, const VideoCaptureCapability& frameInfo); // explicit receiver lock aquisition and release - void LockReceive() { m_crtRecv.Lock();} - void UnlockReceive() {m_crtRecv.Unlock();} + void LockReceive() { m_crtRecv.Enter();} + void UnlockReceive() {m_crtRecv.Leave();} + // explicit filter lock aquisition and release - void LockFilter() {m_crtFilter.Lock();} - void UnlockFilter() { m_crtFilter.Unlock(); } + void LockFilter() {m_crtFilter.Enter();} + void UnlockFilter() { m_crtFilter.Leave(); } void SetFilterGraph(IGraphBuilder* graph); // Used if EVR // -------------------------------------------------------------------- // COM interfaces -DECLARE_IUNKNOWN ; + STDMETHODIMP QueryInterface(REFIID aIId, void **aInterface) + { + return mozilla::media::BaseFilter::QueryInterface(aIId, aInterface); + } + STDMETHODIMP_(ULONG) AddRef() + { + return ::InterlockedIncrement(&mRefCnt); + } + + STDMETHODIMP_(ULONG) Release() + { + unsigned long newRefCnt = ::InterlockedDecrement(&mRefCnt); + + if (!newRefCnt) { + delete this; + } + + return newRefCnt; + } + STDMETHODIMP SetMatchingMediaType(const VideoCaptureCapability& capability); // -------------------------------------------------------------------- // CBaseFilter methods int GetPinCount (); - CBasePin * GetPin ( IN int Index); + mozilla::media::BasePin * GetPin ( IN int Index); STDMETHODIMP Pause (); STDMETHODIMP Stop (); STDMETHODIMP GetClassID ( OUT CLSID * pCLSID); // -------------------------------------------------------------------- // class factory calls this - static CUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr); + static IUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr); private: - CCritSec m_crtFilter; // filter lock - CCritSec m_crtRecv; // receiver lock; always acquire before filter lock + mozilla::CriticalSection m_crtFilter; // filter lock + mozilla::CriticalSection m_crtRecv; // receiver lock; always acquire before filter lock CaptureInputPin * m_pInput; VideoCaptureExternal& _captureObserver; int32_t _moduleId; + unsigned long mRefCnt; }; } // namespace videocapturemodule } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp index 972b3818269..97295a59d0f 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp +++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp @@ -30,6 +30,12 @@ 'dependencies': [ '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx', ], + },{ + 'link_settings': { + 'libraries': [ + '$(LIBVPX_OBJ)/libvpx.a', + ], + }, }], ], 'direct_dependent_settings': { diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc index 1d911a54e44..44a3e04711c 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc @@ -56,9 +56,9 @@ VCMTimestampExtrapolator::Reset() _firstTimestamp = 0; _w[0] = 90.0; _w[1] = 0; - _P[0][0] = 1; - _P[1][1] = _P11; - _P[0][1] = _P[1][0] = 0; + _pp[0][0] = 1; + _pp[1][1] = _P11; + _pp[0][1] = _pp[1][0] = 0; _firstAfterReset = true; _prevUnwrappedTimestamp = -1; _prevWrapTimestamp = -1; @@ -121,14 +121,14 @@ VCMTimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz, bool trace) // A sudden change of average network delay has been detected. // Force the filter to adjust its offset parameter by changing // the offset uncertainty. Don't do this during startup. - _P[1][1] = _P11; + _pp[1][1] = _P11; } //T = [t(k) 1]'; //that = T'*w; //K = P*T/(lambda + T'*P*T); double K[2]; - K[0] = _P[0][0] * tMs + _P[0][1]; - K[1] = _P[1][0] * tMs + _P[1][1]; + K[0] = _pp[0][0] * tMs + _pp[0][1]; + K[1] = _pp[1][0] * tMs + _pp[1][1]; double TPT = _lambda + tMs * K[0] + K[1]; K[0] /= TPT; K[1] /= TPT; @@ -136,12 +136,12 @@ VCMTimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz, bool trace) _w[0] = _w[0] + K[0] * residual; _w[1] = _w[1] + K[1] * residual; //P = 1/lambda*(P - K*T'*P); - double p00 = 1 / _lambda * (_P[0][0] - (K[0] * tMs * _P[0][0] + K[0] * _P[1][0])); - double p01 = 1 / _lambda * (_P[0][1] - (K[0] * tMs * _P[0][1] + K[0] * _P[1][1])); - _P[1][0] = 1 / _lambda * (_P[1][0] - (K[1] * tMs * _P[0][0] + K[1] * _P[1][0])); - _P[1][1] = 1 / _lambda * (_P[1][1] - (K[1] * tMs * _P[0][1] + K[1] * _P[1][1])); - _P[0][0] = p00; - _P[0][1] = p01; + double p00 = 1 / _lambda * (_pp[0][0] - (K[0] * tMs * _pp[0][0] + K[0] * _pp[1][0])); + double p01 = 1 / _lambda * (_pp[0][1] - (K[0] * tMs * _pp[0][1] + K[0] * _pp[1][1])); + _pp[1][0] = 1 / _lambda * (_pp[1][0] - (K[1] * tMs * _pp[0][0] + K[1] * _pp[1][0])); + _pp[1][1] = 1 / _lambda * (_pp[1][1] - (K[1] * tMs * _pp[0][1] + K[1] * _pp[1][1])); + _pp[0][0] = p00; + _pp[0][1] = p01; _prevUnwrappedTimestamp = unwrapped_ts90khz; if (_packetCount < _startUpFilterDelayInPackets) { diff --git a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h index 4565186a353..f8eda2c8c58 100644 --- a/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h +++ b/media/webrtc/trunk/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h @@ -38,7 +38,7 @@ private: int32_t _id; Clock* _clock; double _w[2]; - double _P[2][2]; + double _pp[2][2]; int64_t _startMs; int64_t _prevMs; uint32_t _firstTimestamp; diff --git a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing.gypi b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing.gypi index cb7dd6fb5c3..f576b4d07f3 100644 --- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing.gypi +++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing.gypi @@ -73,6 +73,7 @@ 'conditions': [ ['os_posix==1 and OS!="mac"', { 'cflags': [ '-msse2', ], + 'cflags_mozilla': [ '-msse2', ], }], ['OS=="mac"', { 'xcode_settings': { diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/asm_defines.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/asm_defines.h index c432ec0e3f7..ad44072de39 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/asm_defines.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/asm_defines.h @@ -11,7 +11,7 @@ #ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ASM_DEFINES_H_ #define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ASM_DEFINES_H_ -#if defined(__linux__) && defined(__ELF__) +#if (defined(__linux__) || defined(__FreeBSD__)) && defined(__ELF__) .section .note.GNU-stack,"",%progbits #endif diff --git a/media/webrtc/trunk/webrtc/system_wrappers/interface/tick_util.h b/media/webrtc/trunk/webrtc/system_wrappers/interface/tick_util.h index 0b7890e7c89..5b320550065 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/interface/tick_util.h +++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/tick_util.h @@ -173,7 +173,7 @@ inline int64_t TickTime::MillisecondTimestamp() { #else return ticks; #endif -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) return ticks / 1000000LL; #else return ticks / 1000LL; @@ -190,7 +190,7 @@ inline int64_t TickTime::MicrosecondTimestamp() { #else return ticks * 1000LL; #endif -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) return ticks / 1000LL; #else return ticks; @@ -210,7 +210,7 @@ inline int64_t TickTime::MillisecondsToTicks(const int64_t ms) { #else return ms; #endif -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) return ms * 1000000LL; #else return ms * 1000LL; @@ -226,7 +226,7 @@ inline int64_t TickTime::TicksToMilliseconds(const int64_t ticks) { #else return ticks; #endif -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) return ticks / 1000000LL; #else return ticks / 1000LL; @@ -255,7 +255,7 @@ inline int64_t TickInterval::Milliseconds() const { // interval_ is in ms return interval_; #endif -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) // interval_ is in ns return interval_ / 1000000; #else @@ -274,7 +274,7 @@ inline int64_t TickInterval::Microseconds() const { // interval_ is in ms return interval_ * 1000LL; #endif -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) // interval_ is in ns return interval_ / 1000; #else diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_posix.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_posix.cc index e6a491c59a6..3c2bd8508a5 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_posix.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_posix.cc @@ -12,7 +12,6 @@ #include #include -#include #include "webrtc/common_types.h" diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable.cc index 577949d3291..d57542cd2de 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable.cc @@ -14,7 +14,7 @@ #include #include "webrtc/system_wrappers/source/condition_variable_event_win.h" #include "webrtc/system_wrappers/source/condition_variable_native_win.h" -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) #include #include "webrtc/system_wrappers/source/condition_variable_posix.h" #endif @@ -31,7 +31,7 @@ ConditionVariableWrapper* ConditionVariableWrapper::CreateConditionVariable() { ret_val = new ConditionVariableEventWin(); } return ret_val; -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) return ConditionVariablePosix::Create(); #else return NULL; diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable_posix.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable_posix.cc index b21304245cd..e2def946b6f 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable_posix.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable_posix.cc @@ -79,7 +79,7 @@ bool ConditionVariablePosix::SleepCS(CriticalSectionWrapper& crit_sect, unsigned long max_time_inMS) { const unsigned long INFINITE = 0xFFFFFFFF; const int MILLISECONDS_PER_SECOND = 1000; -#ifndef WEBRTC_LINUX +#if !defined(WEBRTC_LINUX) && !defined(WEBRTC_BSD) const int MICROSECONDS_PER_MILLISECOND = 1000; #endif const int NANOSECONDS_PER_SECOND = 1000000000; diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/cpu_info.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/cpu_info.cc index c482d879cfb..16bf20b56f3 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/cpu_info.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/cpu_info.cc @@ -12,10 +12,12 @@ #if defined(_WIN32) #include -#elif defined(WEBRTC_MAC) -#include +#elif defined(WEBRTC_BSD) || defined(WEBRTC_MAC) #include -#else // defined(WEBRTC_LINUX) or defined(WEBRTC_ANDROID) +#include +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) +#include +#else // defined(_SC_NPROCESSORS_ONLN) #include #endif @@ -34,13 +36,20 @@ uint32_t CpuInfo::DetectNumberOfCores() { WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1, "Available number of cores:%d", number_of_cores_); -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) +#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) && !defined(WEBRTC_GONK) number_of_cores_ = static_cast(sysconf(_SC_NPROCESSORS_ONLN)); WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1, "Available number of cores:%d", number_of_cores_); -#elif defined(WEBRTC_MAC) - int name[] = {CTL_HW, HW_AVAILCPU}; +#elif defined(WEBRTC_BSD) || defined(WEBRTC_MAC) + int name[] = { + CTL_HW, +#ifdef HW_AVAILCPU + HW_AVAILCPU, +#else + HW_NCPU, +#endif + }; int ncpu; size_t size = sizeof(ncpu); if (0 == sysctl(name, 2, &ncpu, &size, NULL, 0)) { @@ -52,6 +61,8 @@ uint32_t CpuInfo::DetectNumberOfCores() { "Failed to get number of cores"); number_of_cores_ = 1; } +#elif defined(_SC_NPROCESSORS_ONLN) + number_of_cores_ = sysconf(_SC_NPROCESSORS_ONLN); #else WEBRTC_TRACE(kTraceWarning, kTraceUtility, -1, "No function to get number of cores"); diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/rw_lock.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/rw_lock.cc index 8b76eb86147..02296b6d1f5 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/rw_lock.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/rw_lock.cc @@ -15,6 +15,8 @@ #if defined(_WIN32) #include "webrtc/system_wrappers/source/rw_lock_generic.h" #include "webrtc/system_wrappers/source/rw_lock_win.h" +#elif defined(ANDROID) +#include "webrtc/system_wrappers/source/rw_lock_generic.h" #else #include "webrtc/system_wrappers/source/rw_lock_posix.h" #endif @@ -29,6 +31,9 @@ RWLockWrapper* RWLockWrapper::CreateRWLock() { return lock; } return new RWLockGeneric(); +#elif defined(ANDROID) + // Android 2.2 and before do not have POSIX pthread rwlocks. + return new RWLockGeneric(); #else return RWLockPosix::Create(); #endif diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers.gyp b/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers.gyp index 4f6d083383b..3580b68a0bb 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers.gyp +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers.gyp @@ -130,7 +130,12 @@ 'trace_win.h', ], }], - ['OS=="android"', { + ['enable_lazy_trace_alloc==0', { + 'defines': [ + 'WEBRTC_LAZY_TRACE_ALLOC', + ], + }], + ['OS=="android" or moz_widget_toolkit_gonk==1', { 'defines': [ 'WEBRTC_THREAD_RR', # TODO(leozwang): Investigate CLOCK_REALTIME and CLOCK_MONOTONIC @@ -140,6 +145,11 @@ 'WEBRTC_CLOCK_TYPE_REALTIME', ], 'dependencies': [ 'cpu_features_android', ], + 'sources!': [ + # Android doesn't have these in <=2.2 + 'rw_lock_posix.cc', + 'rw_lock_posix.h', + ], }], ['OS=="linux"', { 'defines': [ @@ -194,7 +204,7 @@ }, ], # targets 'conditions': [ - ['OS=="android"', { + ['OS=="android" or moz_widget_toolkit_gonk==1', { 'targets': [ { 'variables': { diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/thread_posix.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/thread_posix.cc index 8a24ae58229..b59016955eb 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/thread_posix.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/thread_posix.cc @@ -58,6 +58,17 @@ #include #endif +#if defined(__NetBSD__) +#include +#elif defined(__FreeBSD__) +#include +#include +#endif + +#if defined(WEBRTC_BSD) && !defined(__NetBSD__) +#include +#endif + #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/event_wrapper.h" #include "webrtc/system_wrappers/interface/sleep.h" @@ -123,7 +134,7 @@ ThreadPosix::ThreadPosix(ThreadRunFunction func, ThreadObj obj, event_(EventWrapper::Create()), name_(), set_thread_name_(false), -#if (defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID)) +#if (defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) || defined(WEBRTC_GONK)) pid_(-1), #endif attr_(), @@ -136,10 +147,24 @@ ThreadPosix::ThreadPosix(ThreadRunFunction func, ThreadObj obj, } uint32_t ThreadWrapper::GetThreadId() { -#if defined(WEBRTC_ANDROID) || defined(WEBRTC_LINUX) +#if defined(WEBRTC_ANDROID) || defined(WEBRTC_LINUX) || defined(WEBRTC_GONK) return static_cast(syscall(__NR_gettid)); #elif defined(WEBRTC_MAC) || defined(WEBRTC_IOS) return pthread_mach_thread_np(pthread_self()); +#elif defined(__NetBSD__) + return _lwp_self(); +#elif defined(__DragonFly__) + return lwp_gettid(); +#elif defined(__OpenBSD__) + return reinterpret_cast (pthread_self()); +#elif defined(__FreeBSD__) +# if __FreeBSD_version > 900030 + return pthread_getthreadid_np(); +# else + long lwpid; + thr_self(&lwpid); + return lwpid; +# endif #else return reinterpret_cast(pthread_self()); #endif @@ -147,7 +172,7 @@ uint32_t ThreadWrapper::GetThreadId() { int ThreadPosix::Construct() { int result = 0; -#if !defined(WEBRTC_ANDROID) +#if !defined(WEBRTC_ANDROID) && !defined(WEBRTC_GONK) // Enable immediate cancellation if requested, see Shutdown(). result = pthread_setcancelstate(PTHREAD_CANCEL_ENABLE, NULL); if (result != 0) { @@ -171,18 +196,28 @@ ThreadPosix::~ThreadPosix() { delete crit_state_; } -#define HAS_THREAD_ID !defined(WEBRTC_IOS) && !defined(WEBRTC_MAC) +#define HAS_THREAD_ID !defined(WEBRTC_IOS) && !defined(WEBRTC_MAC) && !defined(WEBRTC_BSD) bool ThreadPosix::Start(unsigned int& thread_id) { int result = pthread_attr_setdetachstate(&attr_, PTHREAD_CREATE_DETACHED); // Set the stack stack size to 1M. result |= pthread_attr_setstacksize(&attr_, 1024 * 1024); +#if 0 +// Temporarily remove the attempt to set this to real-time scheduling. +// +// See: https://code.google.com/p/webrtc/issues/detail?id=1956 +// +// To be removed when upstream is fixed. #ifdef WEBRTC_THREAD_RR const int policy = SCHED_RR; #else const int policy = SCHED_FIFO; #endif +#else + const int policy = SCHED_OTHER; +#endif + event_->Reset(); // If pthread_create was successful, a thread was created and is running. // Don't return false if it was successful since if there are any other @@ -235,13 +270,17 @@ bool ThreadPosix::Start(unsigned int& thread_id) // CPU_ZERO and CPU_SET are not available in NDK r7, so disable // SetAffinity on Android for now. -#if (defined(WEBRTC_LINUX) && (!defined(WEBRTC_ANDROID))) +#if defined(__FreeBSD__) || (defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) && !defined(WEBRTC_GONK)) bool ThreadPosix::SetAffinity(const int* processor_numbers, const unsigned int amount_of_processors) { if (!processor_numbers || (amount_of_processors == 0)) { return false; } +#if defined(__FreeBSD__) + cpuset_t mask; +#else cpu_set_t mask; +#endif CPU_ZERO(&mask); for (unsigned int processor = 0; @@ -249,7 +288,11 @@ bool ThreadPosix::SetAffinity(const int* processor_numbers, ++processor) { CPU_SET(processor_numbers[processor], &mask); } -#if defined(WEBRTC_ANDROID) +#if defined(__FreeBSD__) + const int result = pthread_setaffinity_np(thread_, + sizeof(mask), + &mask); +#elif defined(WEBRTC_ANDROID) || defined(WEBRTC_GONK) // Android. const int result = syscall(__NR_sched_setaffinity, pid_, @@ -310,7 +353,7 @@ void ThreadPosix::Run() { CriticalSectionScoped cs(crit_state_); alive_ = true; } -#if (defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID)) +#if (defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) || defined(WEBRTC_GONK)) pid_ = GetThreadId(); #endif // The event the Start() is waiting for. @@ -319,6 +362,10 @@ void ThreadPosix::Run() { if (set_thread_name_) { #ifdef WEBRTC_LINUX prctl(PR_SET_NAME, (unsigned long)name_, 0, 0, 0); +#elif defined(__NetBSD__) + pthread_setname_np(pthread_self(), "%s", (void *)name_); +#elif defined(WEBRTC_BSD) + pthread_set_name_np(pthread_self(), name_); #endif WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1, "Thread with name:%s started ", name_); diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/tick_util.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/tick_util.cc index 8895b9172d1..4b5f71aa374 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/tick_util.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/tick_util.cc @@ -58,7 +58,7 @@ int64_t TickTime::QueryOsForTicks() { } result.ticks_ = now + (num_wrap_time_get_time << 32); #endif -#elif defined(WEBRTC_LINUX) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) struct timespec ts; // TODO(wu): Remove CLOCK_REALTIME implementation. #ifdef WEBRTC_CLOCK_TYPE_REALTIME diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc index a3d53f3bb50..38018d4c0ab 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc @@ -485,13 +485,13 @@ void TraceImpl::AddMessageToList( length_[active_queue_][idx] = length; memcpy(message_queue_[active_queue_][idx], trace_message, length); - if (next_free_idx_[active_queue_] == WEBRTC_TRACE_MAX_QUEUE - 1) { + if (next_free_idx_[active_queue_] >= WEBRTC_TRACE_MAX_QUEUE - 1) { // Logging more messages than can be worked off. Log a warning. const char warning_msg[] = "WARNING MISSING TRACE MESSAGES\n"; - level_[active_queue_][next_free_idx_[active_queue_]] = kTraceWarning; - length_[active_queue_][next_free_idx_[active_queue_]] = strlen(warning_msg); - memcpy(message_queue_[active_queue_][next_free_idx_[active_queue_]], - warning_msg, strlen(warning_msg)); + level_[active_queue_][WEBRTC_TRACE_MAX_QUEUE-1] = kTraceWarning; + length_[active_queue_][WEBRTC_TRACE_MAX_QUEUE-1] = strlen(warning_msg); + memcpy(message_queue_[active_queue_][WEBRTC_TRACE_MAX_QUEUE-1], + warning_msg, length_[active_queue_][WEBRTC_TRACE_MAX_QUEUE-1]); next_free_idx_[active_queue_]++; } } diff --git a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc index 22db3fa51af..bfce2af8f6b 100644 --- a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc +++ b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc @@ -19,8 +19,6 @@ #ifdef WEBRTC_ANDROID #include -#else -#include #endif #if defined(_DEBUG) @@ -58,7 +56,7 @@ int32_t TracePosix::AddTime(char* trace_message, const TraceLevel level) const { } struct tm buffer; const struct tm* system_time = - localtime_r(&system_time_high_res.tv_sec, &buffer); + localtime_r((const time_t *)(&system_time_high_res.tv_sec), &buffer); const uint32_t ms_time = system_time_high_res.tv_usec / 1000; uint32_t prev_tickCount = 0; diff --git a/media/webrtc/trunk/webrtc/test/channel_transport/udp_transport_impl.cc b/media/webrtc/trunk/webrtc/test/channel_transport/udp_transport_impl.cc index c0e897e1bde..8e5da5ba1f1 100644 --- a/media/webrtc/trunk/webrtc/test/channel_transport/udp_transport_impl.cc +++ b/media/webrtc/trunk/webrtc/test/channel_transport/udp_transport_impl.cc @@ -18,16 +18,16 @@ #if defined(_WIN32) #include #include -#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) #include #include #include #include #include +#include #include #include #include -#include #include #include #ifndef WEBRTC_IOS @@ -36,9 +36,11 @@ #endif // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) #if defined(WEBRTC_MAC) -#include #include #endif +#if defined(WEBRTC_BSD) || defined(WEBRTC_MAC) +#include +#endif #if defined(WEBRTC_LINUX) #include #include @@ -51,7 +53,7 @@ #include "webrtc/test/channel_transport/udp_socket_manager_wrapper.h" #include "webrtc/typedefs.h" -#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) #define GetLastError() errno #define IFRSIZE ((int)(size * sizeof (struct ifreq))) @@ -61,7 +63,7 @@ (int)(nlh)->nlmsg_len >= (int)sizeof(struct nlmsghdr) && \ (int)(nlh)->nlmsg_len <= (len)) -#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) namespace webrtc { namespace test { @@ -2330,7 +2332,7 @@ int32_t UdpTransport::InetPresentationToNumeric(int32_t af, const char* src, void* dst) { -#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) +#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) const int32_t result = inet_pton(af, src, dst); return result > 0 ? 0 : -1; @@ -2452,7 +2454,7 @@ int32_t UdpTransport::LocalHostAddressIPV6(char n_localIP[16]) "getaddrinfo failed to find address"); return -1; -#elif defined(WEBRTC_MAC) +#elif defined(WEBRTC_BSD) || defined(WEBRTC_MAC) struct ifaddrs* ptrIfAddrs = NULL; struct ifaddrs* ptrIfAddrsStart = NULL; @@ -2644,7 +2646,7 @@ int32_t UdpTransport::LocalHostAddress(uint32_t& localIP) "gethostbyname failed, error:%d", error); return -1; } -#elif (defined(WEBRTC_MAC)) +#elif (defined(WEBRTC_BSD) || defined(WEBRTC_MAC)) char localname[255]; if (gethostname(localname, 255) != -1) { @@ -2783,7 +2785,7 @@ int32_t UdpTransport::IPAddress(const SocketAddress& address, sourcePort = htons(source_port); return 0; - #elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC) + #elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC) int32_t ipFamily = address._sockaddr_storage.sin_family; const void* ptrNumericIP = NULL; diff --git a/media/webrtc/trunk/webrtc/typedefs.h b/media/webrtc/trunk/webrtc/typedefs.h index 37c8fc9ce3d..22edf454fa3 100644 --- a/media/webrtc/trunk/webrtc/typedefs.h +++ b/media/webrtc/trunk/webrtc/typedefs.h @@ -17,7 +17,7 @@ // For access to standard POSIXish features, use WEBRTC_POSIX instead of a // more specific macro. #if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || \ - defined(WEBRTC_ANDROID) + defined(WEBRTC_ANDROID) || defined(WEBRTC_BSD) #define WEBRTC_POSIX #endif @@ -49,10 +49,70 @@ #define WEBRTC_ARCH_32_BITS #define WEBRTC_ARCH_LITTLE_ENDIAN #define WEBRTC_LITTLE_ENDIAN -#elif defined(__MIPSEL__) -#define WEBRTC_ARCH_32_BITS +#elif defined(__powerpc64__) +#define WEBRTC_ARCH_PPC64 1 +#define WEBRTC_ARCH_64_BITS 1 +#define WEBRTC_ARCH_BIG_ENDIAN +#define WEBRTC_BIG_ENDIAN +#elif defined(__ppc__) || defined(__powerpc__) +#define WEBRTC_ARCH_PPC 1 +#define WEBRTC_ARCH_32_BITS 1 +#define WEBRTC_ARCH_BIG_ENDIAN +#define WEBRTC_BIG_ENDIAN +#elif defined(__sparc64__) +#define WEBRTC_ARCH_SPARC 1 +#define WEBRTC_ARCH_64_BITS 1 +#define WEBRTC_ARCH_BIG_ENDIAN +#define WEBRTC_BIG_ENDIAN +#elif defined(__sparc__) +#define WEBRTC_ARCH_SPARC 1 +#define WEBRTC_ARCH_32_BITS 1 +#define WEBRTC_ARCH_BIG_ENDIAN +#define WEBRTC_BIG_ENDIAN +#elif defined(__mips__) +#define WEBRTC_ARCH_MIPS 1 +#if defined(_ABI64) && _MIPS_SIM == _ABI64 +#define WEBRTC_ARCH_64_BITS 1 +#else +#define WEBRTC_ARCH_32_BITS 1 +#endif +#if defined(__MIPSEB__) +#define WEBRTC_ARCH_BIG_ENDIAN +#define WEBRTC_BIG_ENDIAN +#else #define WEBRTC_ARCH_LITTLE_ENDIAN #define WEBRTC_LITTLE_ENDIAN +#endif +#elif defined(__hppa__) +#define WEBRTC_ARCH_HPPA 1 +#define WEBRTC_ARCH_32_BITS 1 +#define WEBRTC_ARCH_BIG_ENDIAN +#define WEBRTC_BIG_ENDIAN +#elif defined(__ia64__) +#define WEBRTC_ARCH_IA64 1 +#define WEBRTC_ARCH_64_BITS 1 +#define WEBRTC_ARCH_LITTLE_ENDIAN +#define WEBRTC_LITTLE_ENDIAN +#elif defined(__s390x__) +#define WEBRTC_ARCH_S390X 1 +#define WEBRTC_ARCH_64_BITS 1 +#define WEBRTC_ARCH_BIG_ENDIAN +#define WEBRTC_BIG_ENDIAN +#elif defined(__s390__) +#define WEBRTC_ARCH_S390 1 +#define WEBRTC_ARCH_32_BITS 1 +#define WEBRTC_ARCH_BIG_ENDIAN +#define WEBRTC_BIG_ENDIAN +#elif defined(__alpha__) +#define WEBRTC_ARCH_ALPHA 1 +#define WEBRTC_ARCH_64_BITS 1 +#define WEBRTC_ARCH_LITTLE_ENDIAN +#define WEBRTC_LITTLE_ENDIAN +#elif defined(__avr32__) +#define WEBRTC_ARCH_AVR32 1 +#define WEBRTC_ARCH_32_BITS 1 +#define WEBRTC_ARCH_BIG_ENDIAN +#define WEBRTC_BIG_ENDIAN #else #error Please add support for your architecture in typedefs.h #endif diff --git a/media/webrtc/trunk/webrtc/video_engine/stream_synchronization.cc b/media/webrtc/trunk/webrtc/video_engine/stream_synchronization.cc index fcff7820005..91cd7638d02 100644 --- a/media/webrtc/trunk/webrtc/video_engine/stream_synchronization.cc +++ b/media/webrtc/trunk/webrtc/video_engine/stream_synchronization.cc @@ -12,6 +12,7 @@ #include #include +#include #include diff --git a/media/webrtc/trunk/webrtc/video_engine/vie_channel.cc b/media/webrtc/trunk/webrtc/video_engine/vie_channel.cc index e80cb4b26ca..1a6bd58a602 100644 --- a/media/webrtc/trunk/webrtc/video_engine/vie_channel.cc +++ b/media/webrtc/trunk/webrtc/video_engine/vie_channel.cc @@ -1402,6 +1402,7 @@ int32_t ViEChannel::StartSend() { rtp_rtcp->SetSendingMediaStatus(true); rtp_rtcp->SetSendingStatus(true); } + vie_receiver_.StartRTCPReceive(); return 0; } @@ -1437,6 +1438,7 @@ int32_t ViEChannel::StopSend() { rtp_rtcp->ResetSendDataCountersRTP(); rtp_rtcp->SetSendingStatus(false); } + vie_receiver_.StopRTCPReceive(); return 0; } diff --git a/media/webrtc/trunk/webrtc/video_engine/vie_impl.cc b/media/webrtc/trunk/webrtc/video_engine/vie_impl.cc index e2763093308..135df758466 100644 --- a/media/webrtc/trunk/webrtc/video_engine/vie_impl.cc +++ b/media/webrtc/trunk/webrtc/video_engine/vie_impl.cc @@ -173,11 +173,13 @@ int VideoEngine::SetAndroidObjects(void* javaVM, void* javaContext) { "Could not set capture Android VM"); return -1; } +#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER if (SetRenderAndroidVM(javaVM) != 0) { WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId, "Could not set render Android VM"); return -1; } +#endif return 0; #else WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId, diff --git a/media/webrtc/trunk/webrtc/video_engine/vie_receiver.cc b/media/webrtc/trunk/webrtc/video_engine/vie_receiver.cc index d7ac3c06d41..8b12e0534b0 100644 --- a/media/webrtc/trunk/webrtc/video_engine/vie_receiver.cc +++ b/media/webrtc/trunk/webrtc/video_engine/vie_receiver.cc @@ -49,6 +49,7 @@ ViEReceiver::ViEReceiver(const int32_t channel_id, decryption_buffer_(NULL), rtp_dump_(NULL), receiving_(false), + receiving_rtcp_(false), restored_packet_in_use_(false) { assert(remote_bitrate_estimator); } @@ -387,6 +388,16 @@ void ViEReceiver::StopReceive() { receiving_ = false; } +void ViEReceiver::StartRTCPReceive() { + CriticalSectionScoped cs(receive_cs_.get()); + receiving_rtcp_ = true; +} + +void ViEReceiver::StopRTCPReceive() { + CriticalSectionScoped cs(receive_cs_.get()); + receiving_rtcp_ = false; +} + int ViEReceiver::StartRTPDump(const char file_nameUTF8[1024]) { CriticalSectionScoped cs(receive_cs_.get()); if (rtp_dump_) { diff --git a/media/webrtc/trunk/webrtc/video_engine/vie_receiver.h b/media/webrtc/trunk/webrtc/video_engine/vie_receiver.h index 0fac8ed6fcf..b26723c404f 100644 --- a/media/webrtc/trunk/webrtc/video_engine/vie_receiver.h +++ b/media/webrtc/trunk/webrtc/video_engine/vie_receiver.h @@ -66,6 +66,9 @@ class ViEReceiver : public RtpData { void StartReceive(); void StopReceive(); + void StartRTCPReceive(); + void StopRTCPReceive(); + int StartRTPDump(const char file_nameUTF8[1024]); int StopRTPDump(); @@ -114,6 +117,7 @@ class ViEReceiver : public RtpData { uint8_t* decryption_buffer_; RtpDump* rtp_dump_; bool receiving_; + bool receiving_rtcp_; uint8_t restored_packet_[kViEMaxMtu]; bool restored_packet_in_use_; }; diff --git a/media/webrtc/trunk/webrtc/voice_engine/include/voe_base.h b/media/webrtc/trunk/webrtc/voice_engine/include/voe_base.h index 6859ddbe4ed..0de90772c9e 100644 --- a/media/webrtc/trunk/webrtc/voice_engine/include/voe_base.h +++ b/media/webrtc/trunk/webrtc/voice_engine/include/voe_base.h @@ -84,6 +84,7 @@ public: // receives callbacks for generated trace messages. static int SetTraceCallback(TraceCallback* callback); + static int SetAndroidObjects(void* javaVM, void* context); static int SetAndroidObjects(void* javaVM, void* env, void* context); protected: diff --git a/media/webrtc/trunk/webrtc/voice_engine/include/voe_external_media.h b/media/webrtc/trunk/webrtc/voice_engine/include/voe_external_media.h index 1051d66e1db..ea69855468c 100644 --- a/media/webrtc/trunk/webrtc/voice_engine/include/voe_external_media.h +++ b/media/webrtc/trunk/webrtc/voice_engine/include/voe_external_media.h @@ -96,11 +96,12 @@ public: const int16_t speechData10ms[], int lengthSamples, int samplingFreqHz, int current_delay_ms) = 0; + // This function gets audio for an external playout sink. // During transmission, this function should be called every ~10 ms // to obtain a new 10 ms frame of audio. The length of the block will - // be 160, 320, 440 or 480 samples (for 16, 32, 44 or 48 kHz sampling - // rates respectively). + // be 160, 320, 440 or 480 samples (for 16000, 32000, 44100 or 48000 + // kHz sampling rates respectively). virtual int ExternalPlayoutGetData( int16_t speechData10ms[], int samplingFreqHz, int current_delay_ms, int& lengthSamples) = 0; @@ -108,7 +109,7 @@ public: // Pulls an audio frame from the specified |channel| for external mixing. // If the |desired_sample_rate_hz| is 0, the signal will be returned with // its native frequency, otherwise it will be resampled. Valid frequencies - // are 16, 22, 32, 44 or 48 kHz. + // are 16000, 22050, 32000, 44100 or 48000 kHz. virtual int GetAudioFrame(int channel, int desired_sample_rate_hz, AudioFrame* frame) = 0; diff --git a/media/webrtc/trunk/webrtc/voice_engine/include/voe_volume_control.h b/media/webrtc/trunk/webrtc/voice_engine/include/voe_volume_control.h index c67d984b564..6aa603bb8dd 100644 --- a/media/webrtc/trunk/webrtc/voice_engine/include/voe_volume_control.h +++ b/media/webrtc/trunk/webrtc/voice_engine/include/voe_volume_control.h @@ -118,8 +118,8 @@ public: virtual int GetOutputVolumePan(int channel, float& left, float& right) = 0; protected: - VoEVolumeControl() {}; - virtual ~VoEVolumeControl() {}; + VoEVolumeControl() {} + virtual ~VoEVolumeControl() {} }; } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/voice_engine/output_mixer_unittest.cc b/media/webrtc/trunk/webrtc/voice_engine/output_mixer_unittest.cc index 006c45fa1eb..78de8d77baf 100644 --- a/media/webrtc/trunk/webrtc/voice_engine/output_mixer_unittest.cc +++ b/media/webrtc/trunk/webrtc/voice_engine/output_mixer_unittest.cc @@ -149,12 +149,14 @@ void OutputMixerTest::RunResampleTest(int src_channels, SetStereoFrame(&golden_frame_, dst_left, dst_right, dst_sample_rate_hz); } - // The sinc resampler has a known delay, which we compute here. Multiplying by - // two gives us a crude maximum for any resampling, as the old resampler - // typically (but not always) has lower delay. - static const int kInputKernelDelaySamples = 16; - const int max_delay = static_cast(dst_sample_rate_hz) - / src_sample_rate_hz * kInputKernelDelaySamples * dst_channels * 2; + // The speex resampler has a known delay dependent on quality and rates, + // which we approximate here. Multiplying by two gives us a crude maximum + // for any resampling, as the old resampler typically (but not always) + // has lower delay. The actual delay is calculated internally based on the + // filter length in the QualityMap. + static const int kInputKernelDelaySamples = 16*3; + const int max_delay = std::min(1.0f, 1/kResamplingFactor) * + kInputKernelDelaySamples * dst_channels * 2; printf("(%d, %d Hz) -> (%d, %d Hz) ", // SNR reported on the same line later. src_channels, src_sample_rate_hz, dst_channels, dst_sample_rate_hz); EXPECT_EQ(0, RemixAndResample(src_frame_, &resampler, &dst_frame_)); @@ -167,6 +169,9 @@ void OutputMixerTest::RunResampleTest(int src_channels, } } +// These two tests assume memcpy() (no delay and no filtering) for input +// freq == output freq && same channels. RemixAndResample uses 'Fixed' +// resamplers to enable this behavior TEST_F(OutputMixerTest, RemixAndResampleCopyFrameSucceeds) { // Stereo -> stereo. SetStereoFrame(&src_frame_, 10, 10); diff --git a/media/webrtc/trunk/webrtc/voice_engine/transmit_mixer.cc b/media/webrtc/trunk/webrtc/voice_engine/transmit_mixer.cc index 5e67e200e0a..735611f339e 100644 --- a/media/webrtc/trunk/webrtc/voice_engine/transmit_mixer.cc +++ b/media/webrtc/trunk/webrtc/voice_engine/transmit_mixer.cc @@ -1182,6 +1182,8 @@ bool TransmitMixer::IsRecordingMic() } // TODO(andrew): use RemixAndResample for this. +// Note that if drift compensation is done here, a buffering stage will be +// needed and this will need to switch to non-fixed resamples. int TransmitMixer::GenerateAudioFrame(const int16_t audio[], int samples_per_channel, int num_channels, diff --git a/media/webrtc/trunk/webrtc/voice_engine/voe_external_media_impl.cc b/media/webrtc/trunk/webrtc/voice_engine/voe_external_media_impl.cc index c76c280b05f..e4feca4caa9 100644 --- a/media/webrtc/trunk/webrtc/voice_engine/voe_external_media_impl.cc +++ b/media/webrtc/trunk/webrtc/voice_engine/voe_external_media_impl.cc @@ -193,7 +193,7 @@ int VoEExternalMediaImpl::ExternalRecordingInsertData( return -1; } if ((16000 != samplingFreqHz) && (32000 != samplingFreqHz) && - (48000 != samplingFreqHz) && (44000 != samplingFreqHz)) + (48000 != samplingFreqHz) && (44100 != samplingFreqHz)) { shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError, "SetExternalRecordingStatus() invalid sample rate"); @@ -303,7 +303,7 @@ int VoEExternalMediaImpl::ExternalPlayoutGetData( return -1; } if ((16000 != samplingFreqHz) && (32000 != samplingFreqHz) && - (48000 != samplingFreqHz) && (44000 != samplingFreqHz)) + (48000 != samplingFreqHz) && (44100 != samplingFreqHz)) { shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError, "ExternalPlayoutGetData() invalid sample rate"); diff --git a/media/webrtc/trunk/webrtc/voice_engine/voice_engine.gyp b/media/webrtc/trunk/webrtc/voice_engine/voice_engine.gyp index e099ff9ac48..ef2b8894f38 100644 --- a/media/webrtc/trunk/webrtc/voice_engine/voice_engine.gyp +++ b/media/webrtc/trunk/webrtc/voice_engine/voice_engine.gyp @@ -25,6 +25,9 @@ '<(webrtc_root)/modules/modules.gyp:webrtc_utility', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], + 'defines': [ + 'WEBRTC_EXTERNAL_TRANSPORT', + ], 'sources': [ '../common_types.h', '../engine_configurations.h', diff --git a/media/webrtc/trunk/webrtc/voice_engine/voice_engine_defines.h b/media/webrtc/trunk/webrtc/voice_engine/voice_engine_defines.h index 42851d444b3..82b787b2edb 100644 --- a/media/webrtc/trunk/webrtc/voice_engine/voice_engine_defines.h +++ b/media/webrtc/trunk/webrtc/voice_engine/voice_engine_defines.h @@ -300,9 +300,16 @@ inline int VoEChannelId(int moduleId) // Always excluded for Android builds #undef WEBRTC_CODEC_ISAC - #undef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT + // We need WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT to make things work on Android. + // Motivation for the commented-out undef below is unclear. + // + // #undef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT - #define ANDROID_NOT_SUPPORTED(stat) NOT_SUPPORTED(stat) + // This macro used to cause the calling function to set an error code and return. + // However, not doing that seems to cause the unit tests to pass / behave reasonably, + // so it's disabled for now; see bug 819856. + #define ANDROID_NOT_SUPPORTED(stat) + //#define ANDROID_NOT_SUPPORTED(stat) NOT_SUPPORTED(stat) #else // LINUX PC @@ -321,9 +328,11 @@ inline int VoEChannelId(int moduleId) // *** WEBRTC_MAC *** // including iPhone -#ifdef WEBRTC_MAC +#if defined(WEBRTC_BSD) || defined(WEBRTC_MAC) +#if !defined(WEBRTC_BSD) #include +#endif #include #include #include @@ -339,7 +348,7 @@ inline int VoEChannelId(int moduleId) #include #include #include -#if !defined(WEBRTC_IOS) +#if !defined(WEBRTC_BSD) && !defined(WEBRTC_IOS) #include #include #include @@ -347,6 +356,7 @@ inline int VoEChannelId(int moduleId) #include #endif + #define DWORD unsigned long int #define WINAPI #define LPVOID void * @@ -401,6 +411,6 @@ inline int VoEChannelId(int moduleId) #else #define IPHONE_NOT_SUPPORTED(stat) -#endif // #ifdef WEBRTC_MAC +#endif // #if defined(WEBRTC_BSD) || defined(WEBRTC_MAC) #endif // WEBRTC_VOICE_ENGINE_VOICE_ENGINE_DEFINES_H diff --git a/media/webrtc/trunk/webrtc/voice_engine/voice_engine_impl.cc b/media/webrtc/trunk/webrtc/voice_engine/voice_engine_impl.cc index 703b6a4470e..af70caf1f57 100644 --- a/media/webrtc/trunk/webrtc/voice_engine/voice_engine_impl.cc +++ b/media/webrtc/trunk/webrtc/voice_engine/voice_engine_impl.cc @@ -8,10 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ -#if defined(WEBRTC_ANDROID) +#if !defined(WEBRTC_GONK) #if defined(WEBRTC_ANDROID_OPENSLES) #include "webrtc/modules/audio_device/android/audio_manager_jni.h" -#else +#elif defined(WEBRTC_ANDROID) #include "webrtc/modules/audio_device/android/audio_device_jni_android.h" #endif #endif @@ -147,16 +147,18 @@ bool VoiceEngine::Delete(VoiceEngine*& voiceEngine) int VoiceEngine::SetAndroidObjects(void* javaVM, void* env, void* context) { -#ifdef WEBRTC_ANDROID -#ifdef WEBRTC_ANDROID_OPENSLES - AudioManagerJni::SetAndroidAudioDeviceObjects(javaVM, env, context); - return 0; +#if !defined(WEBRTC_GONK) +#if defined(WEBRTC_ANDROID_OPENSLES) + AudioManagerJni::SetAndroidAudioDeviceObjects(javaVM, env, context); + return 0; +#elif defined(ANDROID) + return AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects( + javaVM, env, context); #else - return AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects( - javaVM, env, context); + return -1; #endif #else - return -1; + return -1; #endif } diff --git a/mobile/android/base/moz.build b/mobile/android/base/moz.build index 33e1f1ed226..16db328bd7c 100644 --- a/mobile/android/base/moz.build +++ b/mobile/android/base/moz.build @@ -61,14 +61,15 @@ stjar.sources += [ thirdparty_source_dir + f for f in sync_thirdparty_java_files stjar.javac_flags = '-Xlint:none' if CONFIG['MOZ_WEBRTC']: - video_root = TOPSRCDIR + '/media/webrtc/trunk/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/' - audio_root = TOPSRCDIR + '/media/webrtc/trunk/webrtc/modules/audio_device/android/org/webrtc/voiceengine/' + video_root = TOPSRCDIR + '/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/' + audio_root = TOPSRCDIR + '/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/' wrjar = add_java_jar('webrtc') wrjar.sources += [ video_root + 'CaptureCapabilityAndroid.java', video_root + 'VideoCaptureAndroid.java', video_root + 'VideoCaptureDeviceInfoAndroid.java', audio_root + 'WebRTCAudioDevice.java', + audio_root + 'AudioManagerAndroid.java', ] wrjar.extra_jars = [ 'gecko-browser.jar', diff --git a/widget/android/AndroidJNIWrapper.cpp b/widget/android/AndroidJNIWrapper.cpp index 32df22917c8..b0398cee10c 100644 --- a/widget/android/AndroidJNIWrapper.cpp +++ b/widget/android/AndroidJNIWrapper.cpp @@ -129,4 +129,9 @@ extern "C" { JavaVM* jsjni_GetVM() { return mozilla::AndroidBridge::GetVM(); } + + __attribute__ ((visibility("default"))) + JNIEnv* jsjni_GetJNIForThread() { + return GetJNIForThread(); + } } diff --git a/widget/android/AndroidJNIWrapper.h b/widget/android/AndroidJNIWrapper.h index 186a1ba8d3c..c9677128784 100644 --- a/widget/android/AndroidJNIWrapper.h +++ b/widget/android/AndroidJNIWrapper.h @@ -28,5 +28,6 @@ extern "C" void jsjni_CallStaticVoidMethodA(jclass cls, jmethodID method, jvalue extern "C" int jsjni_CallStaticIntMethodA(jclass cls, jmethodID method, jvalue *values); extern "C" jobject jsjni_GetGlobalContextRef(); extern "C" JavaVM* jsjni_GetVM(); +extern "C" JNIEnv* jsjni_GetJNIForThread(); #endif /* AndroidJNIWrapper_h__ */