Bug 983504 - Enumeration & MediaEngine changes for screen sharing. r=jesup,mt

This commit is contained in:
Gian-Carlo Pascutto 2014-07-07 09:46:00 +02:00
parent f15606e0c4
commit d4515d519b
5 changed files with 123 additions and 40 deletions

View File

@ -9,6 +9,7 @@
#include "nsIDOMFile.h"
#include "DOMMediaStream.h"
#include "MediaStreamGraph.h"
#include "mozilla/dom/MediaStreamTrackBinding.h"
namespace mozilla {
@ -54,11 +55,13 @@ public:
/* Populate an array of video sources in the nsTArray. Also include devices
* that are currently unavailable. */
virtual void EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSource> >*) = 0;
virtual void EnumerateVideoDevices(dom::MediaSourceEnum,
nsTArray<nsRefPtr<MediaEngineVideoSource> >*) = 0;
/* Populate an array of audio sources in the nsTArray. Also include devices
* that are currently unavailable. */
virtual void EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSource> >*) = 0;
virtual void EnumerateAudioDevices(dom::MediaSourceEnum,
nsTArray<nsRefPtr<MediaEngineAudioSource> >*) = 0;
protected:
virtual ~MediaEngine() {}
@ -182,6 +185,9 @@ class MediaEngineVideoSource : public MediaEngineSource
public:
virtual ~MediaEngineVideoSource() {}
virtual const dom::MediaSourceEnum GetMediaSource() {
return dom::MediaSourceEnum::Camera;
}
/* This call reserves but does not start the device. */
virtual nsresult Allocate(const VideoTrackConstraintsN &aConstraints,
const MediaEnginePrefs &aPrefs) = 0;

View File

@ -478,9 +478,15 @@ MediaEngineDefaultAudioSource::Notify(nsITimer* aTimer)
}
void
MediaEngineDefault::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources) {
MediaEngineDefault::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources) {
MutexAutoLock lock(mMutex);
// only supports camera sources (for now). See Bug 1038241
if (aMediaSource != dom::MediaSourceEnum::Camera) {
return;
}
// We once had code here to find a VideoSource with the same settings and re-use that.
// This no longer is possible since the resolution is being set in Allocate().
@ -492,10 +498,13 @@ MediaEngineDefault::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSour
}
void
MediaEngineDefault::EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSource> >* aASources) {
MediaEngineDefault::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource,
nsTArray<nsRefPtr<MediaEngineAudioSource> >* aASources) {
MutexAutoLock lock(mMutex);
int32_t len = mASources.Length();
// aMediaSource is ignored for audio devices (for now).
for (int32_t i = 0; i < len; i++) {
nsRefPtr<MediaEngineAudioSource> source = mASources.ElementAt(i);
if (source->IsAvailable()) {

View File

@ -138,8 +138,10 @@ public:
: mMutex("mozilla::MediaEngineDefault")
{}
virtual void EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSource> >*);
virtual void EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSource> >*);
virtual void EnumerateVideoDevices(dom::MediaSourceEnum,
nsTArray<nsRefPtr<MediaEngineVideoSource> >*);
virtual void EnumerateAudioDevices(dom::MediaSourceEnum,
nsTArray<nsRefPtr<MediaEngineAudioSource> >*);
private:
~MediaEngineDefault() {}

View File

@ -45,12 +45,15 @@ GetUserMediaLog()
namespace mozilla {
MediaEngineWebRTC::MediaEngineWebRTC(MediaEnginePrefs &aPrefs)
: mMutex("mozilla::MediaEngineWebRTC")
, mVideoEngine(nullptr)
, mVoiceEngine(nullptr)
, mVideoEngineInit(false)
, mAudioEngineInit(false)
, mHasTabVideoSource(false)
: mMutex("mozilla::MediaEngineWebRTC")
, mScreenEngine(nullptr)
, mAppEngine(nullptr)
, mVideoEngine(nullptr)
, mVoiceEngine(nullptr)
, mVideoEngineInit(false)
, mAudioEngineInit(false)
, mScreenEngineInit(false)
, mAppEngineInit(false)
{
#ifndef MOZ_B2G_CAMERA
nsCOMPtr<nsIComponentRegistrar> compMgr;
@ -69,11 +72,18 @@ MediaEngineWebRTC::MediaEngineWebRTC(MediaEnginePrefs &aPrefs)
}
void
MediaEngineWebRTC::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources)
MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources)
{
#ifdef MOZ_B2G_CAMERA
// We spawn threads to handle gUM runnables, so we must protect the member vars
MutexAutoLock lock(mMutex);
#ifdef MOZ_B2G_CAMERA
if (aMediaSource != dom::MediaSourceEnum::Camera) {
// only supports camera sources
return;
}
/**
* We still enumerate every time, in case a new device was plugged in since
* the last call. TODO: Verify that WebRTC actually does deal with hotplugging
@ -102,7 +112,7 @@ MediaEngineWebRTC::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSourc
// We've already seen this device, just append.
aVSources->AppendElement(vSource.get());
} else {
vSource = new MediaEngineWebRTCVideoSource(i);
vSource = new MediaEngineWebRTCVideoSource(i, mediaSourceType);
mVideoSources.Put(uuid, vSource); // Hashtable takes ownership.
aVSources->AppendElement(vSource);
}
@ -112,9 +122,9 @@ MediaEngineWebRTC::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSourc
#else
ScopedCustomReleasePtr<webrtc::ViEBase> ptrViEBase;
ScopedCustomReleasePtr<webrtc::ViECapture> ptrViECapture;
// We spawn threads to handle gUM runnables, so we must protect the member vars
MutexAutoLock lock(mMutex);
webrtc::Config configSet;
webrtc::VideoEngine *videoEngine = nullptr;
bool *videoEngineInit = nullptr;
#ifdef MOZ_WIDGET_ANDROID
// get the JVM
@ -125,25 +135,53 @@ MediaEngineWebRTC::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSourc
return;
}
#endif
if (!mVideoEngine) {
if (!(mVideoEngine = webrtc::VideoEngine::Create())) {
return;
}
switch (aMediaSource) {
case dom::MediaSourceEnum::Application:
mAppEngineConfig.Set<webrtc::CaptureDeviceInfo>(
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Application));
if (!mAppEngine) {
if (!(mAppEngine = webrtc::VideoEngine::Create(mAppEngineConfig))) {
return;
}
}
videoEngine = mAppEngine;
videoEngineInit = &mAppEngineInit;
break;
case dom::MediaSourceEnum::Screen:
mScreenEngineConfig.Set<webrtc::CaptureDeviceInfo>(
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Screen));
if (!mScreenEngine) {
if (!(mScreenEngine = webrtc::VideoEngine::Create(mScreenEngineConfig))) {
return;
}
}
videoEngine = mScreenEngine;
videoEngineInit = &mScreenEngineInit;
break;
case dom::MediaSourceEnum::Camera:
// fall through
default:
if (!mVideoEngine) {
if (!(mVideoEngine = webrtc::VideoEngine::Create())) {
return;
}
}
videoEngine = mVideoEngine;
videoEngineInit = &mVideoEngineInit;
break;
}
ptrViEBase = webrtc::ViEBase::GetInterface(mVideoEngine);
ptrViEBase = webrtc::ViEBase::GetInterface(videoEngine);
if (!ptrViEBase) {
return;
}
if (!mVideoEngineInit) {
if (ptrViEBase->Init() < 0) {
return;
}
mVideoEngineInit = true;
if (ptrViEBase->Init() < 0) {
return;
}
*videoEngineInit = true;
ptrViECapture = webrtc::ViECapture::GetInterface(mVideoEngine);
ptrViECapture = webrtc::ViECapture::GetInterface(videoEngine);
if (!ptrViECapture) {
return;
}
@ -207,7 +245,7 @@ MediaEngineWebRTC::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSourc
// We've already seen this device, just append.
aVSources->AppendElement(vSource.get());
} else {
vSource = new MediaEngineWebRTCVideoSource(mVideoEngine, i);
vSource = new MediaEngineWebRTCVideoSource(videoEngine, i, aMediaSource);
mVideoSources.Put(uuid, vSource); // Hashtable takes ownership.
aVSources->AppendElement(vSource);
}
@ -221,7 +259,8 @@ MediaEngineWebRTC::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSourc
}
void
MediaEngineWebRTC::EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSource> >* aASources)
MediaEngineWebRTC::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource,
nsTArray<nsRefPtr<MediaEngineAudioSource> >* aASources)
{
ScopedCustomReleasePtr<webrtc::VoEBase> ptrVoEBase;
ScopedCustomReleasePtr<webrtc::VoEHardware> ptrVoEHw;
@ -323,6 +362,13 @@ MediaEngineWebRTC::Shutdown()
webrtc::VideoEngine::Delete(mVideoEngine);
}
if (mScreenEngine) {
webrtc::VideoEngine::Delete(mScreenEngine);
}
if (mAppEngine) {
webrtc::VideoEngine::Delete(mAppEngine);
}
if (mVoiceEngine) {
mAudioSources.Clear();
mVoiceEngine->SetTraceCallback(nullptr);
@ -331,6 +377,8 @@ MediaEngineWebRTC::Shutdown()
mVideoEngine = nullptr;
mVoiceEngine = nullptr;
mScreenEngine = nullptr;
mAppEngine = nullptr;
if (mThread) {
mThread->Shutdown();

View File

@ -28,9 +28,9 @@
#include "MediaStreamGraph.h"
#include "MediaEngineWrapper.h"
#include "mozilla/dom/MediaStreamTrackBinding.h"
// WebRTC library includes follow
#include "webrtc/common.h"
// Audio Engine
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
@ -95,12 +95,14 @@ class MediaEngineWebRTCVideoSource : public MediaEngineVideoSource
{
public:
#ifdef MOZ_B2G_CAMERA
MediaEngineWebRTCVideoSource(int aIndex)
MediaEngineWebRTCVideoSource(int aIndex,
dom::MediaSourceEnum aMediaSource = dom::MediaSourceEnum::Camera)
: mCameraControl(nullptr)
, mCallbackMonitor("WebRTCCamera.CallbackMonitor")
, mRotation(0)
, mBackCamera(false)
, mCaptureIndex(aIndex)
, mMediaSource(aMediaSource)
, mMonitor("WebRTCCamera.Monitor")
, mWidth(0)
, mHeight(0)
@ -124,11 +126,13 @@ public:
*/
virtual bool IsTextureSupported() { return false; }
MediaEngineWebRTCVideoSource(webrtc::VideoEngine* aVideoEnginePtr, int aIndex)
MediaEngineWebRTCVideoSource(webrtc::VideoEngine* aVideoEnginePtr, int aIndex,
dom::MediaSourceEnum aMediaSource = dom::MediaSourceEnum::Camera)
: mVideoEngine(aVideoEnginePtr)
, mCaptureIndex(aIndex)
, mFps(-1)
, mMinFps(-1)
, mMediaSource(aMediaSource)
, mMonitor("WebRTCCamera.Monitor")
, mWidth(0)
, mHeight(0)
@ -165,6 +169,10 @@ public:
return false;
}
virtual const dom::MediaSourceEnum GetMediaSource() {
return mMediaSource;
}
#ifndef MOZ_B2G_CAMERA
NS_DECL_THREADSAFE_ISUPPORTS
#else
@ -239,6 +247,7 @@ private:
int mCaptureIndex;
int mFps; // Track rate (30 fps by default)
int mMinFps; // Min rate we want to accept
dom::MediaSourceEnum mMediaSource; // source of media (camera | application | screen)
// mMonitor protects mImage access/changes, and transitions of mState
// from kStarted to kStopped (which are combined with EndTrack() and
@ -381,9 +390,10 @@ public:
// before invoking Shutdown on this class.
void Shutdown();
virtual void EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSource> >*);
virtual void EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSource> >*);
virtual void EnumerateVideoDevices(dom::MediaSourceEnum,
nsTArray<nsRefPtr<MediaEngineVideoSource> >*);
virtual void EnumerateAudioDevices(dom::MediaSourceEnum,
nsTArray<nsRefPtr<MediaEngineAudioSource> >*);
private:
~MediaEngineWebRTC() {
Shutdown();
@ -397,14 +407,22 @@ private:
nsCOMPtr<nsIThread> mThread;
Mutex mMutex;
// protected with mMutex:
// protected with mMutex:
webrtc::VideoEngine* mScreenEngine;
webrtc::VideoEngine* mAppEngine;
webrtc::VideoEngine* mVideoEngine;
webrtc::VoiceEngine* mVoiceEngine;
// specialized configurations
webrtc::Config mAppEngineConfig;
webrtc::Config mScreenEngineConfig;
// Need this to avoid unneccesary WebRTC calls while enumerating.
bool mVideoEngineInit;
bool mAudioEngineInit;
bool mScreenEngineInit;
bool mAppEngineInit;
bool mHasTabVideoSource;
// Store devices we've already seen in a hashtable for quick return.