Merge m-c to inbound.

This commit is contained in:
Ryan VanderMeulen 2013-05-02 15:17:39 -04:00
commit dee43def22
31 changed files with 716 additions and 135 deletions

View File

@ -518,14 +518,25 @@ protected:
class WakeLockBoolWrapper {
public:
WakeLockBoolWrapper(bool val = false) : mValue(val), mOuter(nullptr), mWakeLock(nullptr) {}
WakeLockBoolWrapper(bool val = false)
: mValue(val), mCanPlay(true), mOuter(nullptr) {}
void SetOuter(HTMLMediaElement* outer) { mOuter = outer; }
void SetCanPlay(bool aCanPlay);
operator bool() const { return mValue; }
WakeLockBoolWrapper& operator=(bool val);
bool operator !() const { return !mValue; }
private:
void UpdateWakeLock();
bool mValue;
bool mCanPlay;
HTMLMediaElement* mOuter;
nsCOMPtr<nsIDOMMozWakeLock> mWakeLock;
};

View File

@ -2088,21 +2088,47 @@ NS_IMETHODIMP HTMLMediaElement::Play()
return rv.ErrorCode();
}
HTMLMediaElement::WakeLockBoolWrapper& HTMLMediaElement::WakeLockBoolWrapper::operator=(bool val) {
if (mValue == val)
HTMLMediaElement::WakeLockBoolWrapper&
HTMLMediaElement::WakeLockBoolWrapper::operator=(bool val) {
if (mValue == val) {
return *this;
if (!mWakeLock && !val && mOuter) {
}
mValue = val;
UpdateWakeLock();
return *this;
}
void
HTMLMediaElement::WakeLockBoolWrapper::SetCanPlay(bool aCanPlay)
{
mCanPlay = aCanPlay;
UpdateWakeLock();
}
void
HTMLMediaElement::WakeLockBoolWrapper::UpdateWakeLock()
{
if (!mOuter) {
return;
}
bool playing = (!mValue && mCanPlay);
if (playing) {
nsCOMPtr<nsIPowerManagerService> pmService =
do_GetService(POWERMANAGERSERVICE_CONTRACTID);
NS_ENSURE_TRUE(pmService, *this);
NS_ENSURE_TRUE_VOID(pmService);
pmService->NewWakeLock(NS_LITERAL_STRING("Playing_media"), mOuter->OwnerDoc()->GetWindow(), getter_AddRefs(mWakeLock));
} else if (mWakeLock && val) {
mWakeLock->Unlock();
if (!mWakeLock) {
pmService->NewWakeLock(NS_LITERAL_STRING("cpu"),
mOuter->OwnerDoc()->GetWindow(),
getter_AddRefs(mWakeLock));
}
} else if (mWakeLock) {
// Wakelock 'unlocks' itself in its destructor.
mWakeLock = nullptr;
}
mValue = val;
return *this;
}
bool HTMLMediaElement::ParseAttribute(int32_t aNamespaceID,
@ -3715,6 +3741,7 @@ void HTMLMediaElement::UpdateAudioChannelPlayingState()
if (mPlayingThroughTheAudioChannel) {
bool canPlay;
mAudioChannelAgent->StartPlaying(&canPlay);
mPaused.SetCanPlay(canPlay);
} else {
mAudioChannelAgent->StopPlaying();
mAudioChannelAgent = nullptr;
@ -3728,6 +3755,7 @@ NS_IMETHODIMP HTMLMediaElement::CanPlayChanged(bool canPlay)
NS_ENSURE_TRUE(nsContentUtils::IsCallerChrome(), NS_ERROR_NOT_AVAILABLE);
UpdateChannelMuteState(canPlay);
mPaused.SetCanPlay(canPlay);
return NS_OK;
}

View File

@ -31,11 +31,14 @@ endif
FORCE_STATIC_LIB = 1
include $(topsrcdir)/config/rules.mk
include $(topsrcdir)/ipc/chromium/chromium-config.mk
ifdef MOZ_WEBRTC
LOCAL_INCLUDES += \
-I$(topsrcdir)/media/webrtc/trunk/webrtc \
-I$(topsrcdir)/media/webrtc/signaling/src/common \
-I$(topsrcdir)/media/webrtc/signaling/src/common/browser_logging \
-I$(topsrcdir)/dom/base \
-I$(topsrcdir)/dom/camera \
$(NULL)
endif

View File

@ -27,20 +27,62 @@ GetUserMediaLog()
}
#endif
#undef LOG
#define LOG(args) PR_LOG(GetUserMediaLog(), PR_LOG_DEBUG, args)
#include "MediaEngineWebRTC.h"
#include "ImageContainer.h"
#ifdef MOZ_WIDGET_ANDROID
#include "AndroidBridge.h"
#endif
#undef LOG
#define LOG(args) PR_LOG(GetUserMediaLog(), PR_LOG_DEBUG, args)
namespace mozilla {
void
MediaEngineWebRTC::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources)
{
#ifdef MOZ_WIDGET_GONK
MutexAutoLock lock(mMutex);
if (!mCameraManager) {
return;
}
/**
* We still enumerate every time, in case a new device was plugged in since
* the last call. TODO: Verify that WebRTC actually does deal with hotplugging
* new devices (with or without new engine creation) and accordingly adjust.
* Enumeration is not neccessary if GIPS reports the same set of devices
* for a given instance of the engine. Likewise, if a device was plugged out,
* mVideoSources must be updated.
*/
int num = 0;
nsresult result;
result = mCameraManager->GetNumberOfCameras(num);
if (num <= 0 || result != NS_OK) {
return;
}
for (int i = 0; i < num; i++) {
nsCString cameraName;
result = mCameraManager->GetCameraName(i, cameraName);
if (result != NS_OK) {
continue;
}
nsRefPtr<MediaEngineWebRTCVideoSource> vSource;
NS_ConvertUTF8toUTF16 uuid(cameraName);
if (mVideoSources.Get(uuid, getter_AddRefs(vSource))) {
// We've already seen this device, just append.
aVSources->AppendElement(vSource.get());
} else {
vSource = new MediaEngineWebRTCVideoSource(mCameraManager, i, mWindowId);
mVideoSources.Put(uuid, vSource); // Hashtable takes ownership.
aVSources->AppendElement(vSource);
}
}
return;
#else
webrtc::ViEBase* ptrViEBase;
webrtc::ViECapture* ptrViECapture;
// We spawn threads to handle gUM runnables, so we must protect the member vars
@ -168,6 +210,7 @@ MediaEngineWebRTC::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSourc
ptrViECapture->Release();
return;
#endif
}
void
@ -233,7 +276,6 @@ MediaEngineWebRTC::EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSourc
continue;
}
LOG((" Capture Device Index %d, Name %s Uuid %s", i, deviceName, uniqueId));
if (uniqueId[0] == '\0') {
// Mac and Linux don't set uniqueId!
MOZ_ASSERT(sizeof(deviceName) == sizeof(uniqueId)); // total paranoia

View File

@ -45,19 +45,76 @@
#include "video_engine/include/vie_render.h"
#include "video_engine/include/vie_capture.h"
#include "video_engine/include/vie_file.h"
#ifdef MOZ_WIDGET_GONK
#include "CameraPreviewMediaStream.h"
#include "DOMCameraManager.h"
#include "GonkCameraControl.h"
#include "ImageContainer.h"
#include "nsGlobalWindow.h"
#include "prprf.h"
#endif
#include "NullTransport.h"
namespace mozilla {
#ifdef MOZ_WIDGET_GONK
class CameraAllocateRunnable;
class GetCameraNameRunnable;
#endif
/**
* The WebRTC implementation of the MediaEngine interface.
*
* On B2G platform, member data may accessed from different thread after construction:
*
* MediaThread:
* mState, mImage, mWidth, mHeight, mCapability, mPrefs, mDeviceName, mUniqueId, mInitDone,
* mSources, mImageContainer, mSources, mState, mImage, mLastCapture
*
* MainThread:
* mDOMCameraControl, mCaptureIndex, mCameraThread, mWindowId, mCameraManager,
* mNativeCameraControl, mPreviewStream, mState, mLastCapture, mWidth, mHeight
*
* Where mWidth, mHeight, mImage are protected by mMonitor
* mState, mLastCapture is protected by mCallbackMonitor
* Other variable is accessed only from single thread
*/
class MediaEngineWebRTCVideoSource : public MediaEngineVideoSource,
public webrtc::ExternalRenderer,
public nsRunnable
class MediaEngineWebRTCVideoSource : public MediaEngineVideoSource
, public nsRunnable
#ifdef MOZ_WIDGET_GONK
, public nsICameraGetCameraCallback
, public nsICameraPreviewStreamCallback
, public nsICameraTakePictureCallback
, public nsICameraReleaseCallback
, public nsICameraErrorCallback
, public CameraPreviewFrameCallback
#else
, public webrtc::ExternalRenderer
#endif
{
public:
#ifdef MOZ_WIDGET_GONK
MediaEngineWebRTCVideoSource(nsDOMCameraManager* aCameraManager,
int aIndex, uint64_t aWindowId)
: mCameraManager(aCameraManager)
, mNativeCameraControl(nullptr)
, mPreviewStream(nullptr)
, mWindowId(aWindowId)
, mCallbackMonitor("WebRTCCamera.CallbackMonitor")
, mCaptureIndex(aIndex)
, mMonitor("WebRTCCamera.Monitor")
, mWidth(0)
, mHeight(0)
, mInitDone(false)
, mInSnapshotMode(false)
, mSnapshotPath(nullptr)
{
mState = kReleased;
NS_NewNamedThread("CameraThread", getter_AddRefs(mCameraThread), nullptr);
Init();
}
#else
// ViEExternalRenderer.
virtual int FrameSizeChange(unsigned int, unsigned int, unsigned int);
virtual int DeliverFrame(unsigned char*, int, uint32_t, int64_t);
@ -65,11 +122,11 @@ public:
MediaEngineWebRTCVideoSource(webrtc::VideoEngine* aVideoEnginePtr, int aIndex)
: mVideoEngine(aVideoEnginePtr)
, mCaptureIndex(aIndex)
, mWidth(0)
, mHeight(0)
, mFps(-1)
, mMinFps(-1)
, mMonitor("WebRTCCamera.Monitor")
, mWidth(0)
, mHeight(0)
, mInitDone(false)
, mInSnapshotMode(false)
, mSnapshotPath(NULL) {
@ -77,6 +134,8 @@ public:
mState = kReleased;
Init();
}
#endif
~MediaEngineWebRTCVideoSource() { Shutdown(); }
virtual void GetName(nsAString&);
@ -96,6 +155,22 @@ public:
TrackTicks &aLastEndTime);
NS_DECL_ISUPPORTS
#ifdef MOZ_WIDGET_GONK
NS_DECL_NSICAMERAGETCAMERACALLBACK
NS_DECL_NSICAMERAPREVIEWSTREAMCALLBACK
NS_DECL_NSICAMERATAKEPICTURECALLBACK
NS_DECL_NSICAMERARELEASECALLBACK
NS_DECL_NSICAMERAERRORCALLBACK
void AllocImpl();
void DeallocImpl();
void StartImpl(webrtc::CaptureCapability aCapability);
void StopImpl();
void SnapshotImpl();
virtual void OnNewFrame(const gfxIntSize& aIntrinsicSize, layers::Image* aImage);
#endif
// This runnable is for creating a temporary file on the main thread.
NS_IMETHODIMP
@ -125,15 +200,31 @@ private:
void Shutdown();
// Engine variables.
#ifdef MOZ_WIDGET_GONK
// MediaEngine hold this DOM object, and the MediaEngine is hold by Navigator
// Their life time is always much longer than this object. Use a raw-pointer
// here should be safe.
// We need raw pointer here since such DOM-object should not addref/release on
// any thread other than main thread, but we must use this object for now. To
// avoid any bad thing do to addref/release DOM-object on other thread, we use
// raw-pointer for now.
nsDOMCameraManager* mCameraManager;
nsRefPtr<nsDOMCameraControl> mDOMCameraControl;
nsRefPtr<nsGonkCameraControl> mNativeCameraControl;
nsRefPtr<DOMCameraPreview> mPreviewStream;
uint64_t mWindowId;
mozilla::ReentrantMonitor mCallbackMonitor; // Monitor for camera callback handling
nsRefPtr<nsIThread> mCameraThread;
nsRefPtr<nsIDOMFile> mLastCapture;
#else
webrtc::VideoEngine* mVideoEngine; // Weak reference, don't free.
webrtc::ViEBase* mViEBase;
webrtc::ViECapture* mViECapture;
webrtc::ViERender* mViERender;
#endif
webrtc::CaptureCapability mCapability; // Doesn't work on OS X.
int mCaptureIndex;
int mWidth, mHeight;
int mFps; // Track rate (30 fps by default)
int mMinFps; // Min rate we want to accept
@ -142,16 +233,16 @@ private:
// image changes). Note that mSources is not accessed from other threads
// for video and is not protected.
Monitor mMonitor; // Monitor for processing WebRTC frames.
int mWidth, mHeight;
nsRefPtr<layers::Image> mImage;
nsRefPtr<layers::ImageContainer> mImageContainer;
nsTArray<SourceMediaStream *> mSources; // When this goes empty, we shut down HW
bool mInitDone;
bool mInSnapshotMode;
nsString* mSnapshotPath;
nsRefPtr<layers::Image> mImage;
nsRefPtr<layers::ImageContainer> mImageContainer;
// These are in UTF-8 but webrtc api uses char arrays
char mDeviceName[KMaxDeviceNameLength];
char mUniqueId[KMaxUniqueIdLength];
@ -246,16 +337,31 @@ private:
class MediaEngineWebRTC : public MediaEngine
{
public:
#ifdef MOZ_WIDGET_GONK
MediaEngineWebRTC(nsDOMCameraManager* aCameraManager, uint64_t aWindowId)
: mMutex("mozilla::MediaEngineWebRTC")
, mVideoEngine(nullptr)
, mVoiceEngine(nullptr)
, mVideoEngineInit(false)
, mAudioEngineInit(false)
, mCameraManager(aCameraManager)
, mWindowId(aWindowId)
{
mVideoSources.Init();
mAudioSources.Init();
}
#else
MediaEngineWebRTC()
: mMutex("mozilla::MediaEngineWebRTC")
, mVideoEngine(NULL)
, mVoiceEngine(NULL)
, mVideoEngineInit(false)
, mAudioEngineInit(false)
: mMutex("mozilla::MediaEngineWebRTC")
, mVideoEngine(nullptr)
, mVoiceEngine(nullptr)
, mVideoEngineInit(false)
, mAudioEngineInit(false)
{
mVideoSources.Init();
mAudioSources.Init();
}
#endif
~MediaEngineWebRTC() { Shutdown(); }
// Clients should ensure to clean-up sources video/audio sources
@ -280,6 +386,18 @@ private:
// Maps UUID to MediaEngineSource (one set for audio, one for video).
nsRefPtrHashtable<nsStringHashKey, MediaEngineWebRTCVideoSource > mVideoSources;
nsRefPtrHashtable<nsStringHashKey, MediaEngineWebRTCAudioSource > mAudioSources;
#ifdef MOZ_WIDGET_GONK
// MediaEngine hold this DOM object, and the MediaEngine is hold by Navigator
// Their life time is always much longer than this object. Use a raw-pointer
// here should be safe.
// We need raw pointer here since such DOM-object should not addref/release on
// any thread other than main thread, but we must use this object for now. To
// avoid any bad thing do to addref/release DOM-object on other thread, we use
// raw-pointer for now.
nsDOMCameraManager* mCameraManager;
uint64_t mWindowId;
#endif
};
}

View File

@ -25,6 +25,7 @@ extern PRLogModuleInfo* GetMediaManagerLog();
NS_IMPL_THREADSAFE_ISUPPORTS1(MediaEngineWebRTCVideoSource, nsIRunnable)
// ViEExternalRenderer Callback.
#ifndef MOZ_WIDGET_GONK
int
MediaEngineWebRTCVideoSource::FrameSizeChange(
unsigned int w, unsigned int h, unsigned int streams)
@ -102,6 +103,7 @@ MediaEngineWebRTCVideoSource::DeliverFrame(
return 0;
}
#endif
// Called if the graph thinks it's running out of buffered video; repeat
// the last frame for whatever minimum period it think it needs. Note that
@ -154,6 +156,10 @@ MediaEngineWebRTCVideoSource::NotifyPull(MediaStreamGraph* aGraph,
void
MediaEngineWebRTCVideoSource::ChooseCapability(const MediaEnginePrefs &aPrefs)
{
#ifdef MOZ_WIDGET_GONK
mCapability.width = aPrefs.mWidth;
mCapability.height = aPrefs.mHeight;
#else
int num = mViECapture->NumberOfCapabilities(mUniqueId, KMaxUniqueIdLength);
LOG(("ChooseCapability: prefs: %dx%d @%d-%dfps", aPrefs.mWidth, aPrefs.mHeight, aPrefs.mFPS, aPrefs.mMinFPS));
@ -197,6 +203,7 @@ MediaEngineWebRTCVideoSource::ChooseCapability(const MediaEnginePrefs &aPrefs)
}
}
LOG(("chose cap %dx%d @%dfps", mCapability.width, mCapability.height, mCapability.maxFPS));
#endif
}
void
@ -217,6 +224,18 @@ nsresult
MediaEngineWebRTCVideoSource::Allocate(const MediaEnginePrefs &aPrefs)
{
LOG((__FUNCTION__));
#ifdef MOZ_WIDGET_GONK
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
if (mState == kReleased && mInitDone) {
ChooseCapability(aPrefs);
NS_DispatchToMainThread(WrapRunnable(this,
&MediaEngineWebRTCVideoSource::AllocImpl));
mCallbackMonitor.Wait();
if (mState != kAllocated) {
return NS_ERROR_FAILURE;
}
}
#else
if (mState == kReleased && mInitDone) {
// Note: if shared, we don't allow a later opener to affect the resolution.
// (This may change depending on spec changes for Constraints/settings)
@ -233,6 +252,7 @@ MediaEngineWebRTCVideoSource::Allocate(const MediaEnginePrefs &aPrefs)
} else {
LOG(("Video device %d allocated shared", mCaptureIndex));
}
#endif
return NS_OK;
}
@ -242,11 +262,22 @@ MediaEngineWebRTCVideoSource::Deallocate()
{
LOG((__FUNCTION__));
if (mSources.IsEmpty()) {
#ifdef MOZ_WIDGET_GONK
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
#endif
if (mState != kStopped && mState != kAllocated) {
return NS_ERROR_FAILURE;
}
#ifdef MOZ_WIDGET_GONK
// We do not register success callback here
#ifdef XP_MACOSX
NS_DispatchToMainThread(WrapRunnable(this,
&MediaEngineWebRTCVideoSource::DeallocImpl));
mCallbackMonitor.Wait();
if (mState != kReleased) {
return NS_ERROR_FAILURE;
}
#elif XP_MACOSX
// Bug 829907 - on mac, in shutdown, the mainthread stops processing
// 'native' events, and the QTKit code uses events to the main native CFRunLoop
// in order to provide thread safety. In order to avoid this locking us up,
@ -288,13 +319,25 @@ MediaEngineWebRTCVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
aStream->AddTrack(aID, USECS_PER_S, 0, new VideoSegment());
aStream->AdvanceKnownTracksTime(STREAM_TIME_MAX);
#ifdef MOZ_WIDGET_GONK
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
#endif
if (mState == kStarted) {
return NS_OK;
}
mState = kStarted;
mImageContainer = layers::LayerManager::CreateImageContainer();
#ifdef MOZ_WIDGET_GONK
NS_DispatchToMainThread(WrapRunnable(this,
&MediaEngineWebRTCVideoSource::StartImpl,
mCapability));
mCallbackMonitor.Wait();
if (mState != kStarted) {
return NS_ERROR_FAILURE;
}
#else
mState = kStarted;
error = mViERender->AddRenderer(mCaptureIndex, webrtc::kVideoI420, (webrtc::ExternalRenderer*)this);
if (error == -1) {
return NS_ERROR_FAILURE;
@ -308,6 +351,7 @@ MediaEngineWebRTCVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
if (mViECapture->StartCapture(mCaptureIndex, mCapability) < 0) {
return NS_ERROR_FAILURE;
}
#endif
return NS_OK;
}
@ -323,7 +367,9 @@ MediaEngineWebRTCVideoSource::Stop(SourceMediaStream *aSource, TrackID aID)
if (!mSources.IsEmpty()) {
return NS_OK;
}
#ifdef MOZ_WIDGET_GONK
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
#endif
if (mState != kStarted) {
return NS_ERROR_FAILURE;
}
@ -336,10 +382,14 @@ MediaEngineWebRTCVideoSource::Stop(SourceMediaStream *aSource, TrackID aID)
// usage
mImage = nullptr;
}
#ifdef MOZ_WIDGET_GONK
NS_DispatchToMainThread(WrapRunnable(this,
&MediaEngineWebRTCVideoSource::StopImpl));
#else
mViERender->StopRender(mCaptureIndex);
mViERender->RemoveRenderer(mCaptureIndex);
mViECapture->StopCapture(mCaptureIndex);
#endif
return NS_OK;
}
@ -363,11 +413,39 @@ MediaEngineWebRTCVideoSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile)
* return from this function after cleaning up the temporary stream object
* and caling Stop() on the media source.
*/
#ifdef MOZ_WIDGET_GONK
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
#endif
*aFile = nullptr;
if (!mInitDone || mState != kAllocated) {
return NS_ERROR_FAILURE;
}
#ifdef MOZ_WIDGET_GONK
mLastCapture = nullptr;
NS_DispatchToMainThread(WrapRunnable(this,
&MediaEngineWebRTCVideoSource::StartImpl,
mCapability));
mCallbackMonitor.Wait();
if (mState != kStarted) {
return NS_ERROR_FAILURE;
}
NS_DispatchToMainThread(WrapRunnable(this,
&MediaEngineWebRTCVideoSource::SnapshotImpl));
mCallbackMonitor.Wait();
if (mLastCapture == nullptr)
return NS_ERROR_FAILURE;
mState = kStopped;
NS_DispatchToMainThread(WrapRunnable(this,
&MediaEngineWebRTCVideoSource::StopImpl));
// The camera return nsDOMMemoryFile indeed, and the inheritance tree is:
// nsIDOMBlob <- nsIDOMFile <- nsDOMFileBase <- nsDOMFile <- nsDOMMemoryFile
*aFile = mLastCapture.get();
return NS_OK;
#else
{
MonitorAutoLock lock(mMonitor);
mInSnapshotMode = true;
@ -437,7 +515,7 @@ MediaEngineWebRTCVideoSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile)
NS_ENSURE_SUCCESS(rv, rv);
NS_ADDREF(*aFile = new nsDOMFileFile(file));
#endif
return NS_OK;
}
@ -451,7 +529,17 @@ MediaEngineWebRTCVideoSource::Init()
{
mDeviceName[0] = '\0'; // paranoia
mUniqueId[0] = '\0';
#ifdef MOZ_WIDGET_GONK
nsCString deviceName;
mCameraManager->GetCameraName(mCaptureIndex, deviceName);
nsString deviceNameUTF16;
deviceNameUTF16.AssignASCII(deviceName.get());
char* UTF8Name = ToNewUTF8String(deviceNameUTF16);
memcpy(mDeviceName, UTF8Name, strlen(UTF8Name));
memcpy(mUniqueId, UTF8Name, strlen(UTF8Name));
NS_Free(UTF8Name);
#else
// fix compile warning for these being unused. (remove once used)
(void) mFps;
(void) mMinFps;
@ -479,6 +567,7 @@ MediaEngineWebRTCVideoSource::Init()
mUniqueId, sizeof(mUniqueId))) {
return;
}
#endif
mInitDone = true;
}
@ -490,7 +579,9 @@ MediaEngineWebRTCVideoSource::Shutdown()
if (!mInitDone) {
return;
}
#ifdef MOZ_WIDGET_GONK
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
#endif
if (mState == kStarted) {
while (!mSources.IsEmpty()) {
Stop(mSources[0], kVideoTrack); // XXX change to support multiple tracks
@ -501,12 +592,139 @@ MediaEngineWebRTCVideoSource::Shutdown()
if (mState == kAllocated || mState == kStopped) {
Deallocate();
}
#ifndef MOZ_WIDGET_GONK
mViECapture->Release();
mViERender->Release();
mViEBase->Release();
#endif
mState = kReleased;
mInitDone = false;
}
#ifdef MOZ_WIDGET_GONK
// All these functions must be run on MainThread!
void
MediaEngineWebRTCVideoSource::AllocImpl() {
MOZ_ASSERT(NS_IsMainThread());
mDOMCameraControl = new nsDOMCameraControl(mCaptureIndex,
mCameraThread,
this,
this,
mWindowId);
mCameraManager->Register(mDOMCameraControl);
}
void
MediaEngineWebRTCVideoSource::DeallocImpl() {
MOZ_ASSERT(NS_IsMainThread());
mNativeCameraControl->ReleaseHardware(this, this);
mNativeCameraControl = nullptr;
}
void
MediaEngineWebRTCVideoSource::StartImpl(webrtc::CaptureCapability aCapability) {
MOZ_ASSERT(NS_IsMainThread());
idl::CameraSize size;
size.width = aCapability.width;
size.height = aCapability.height;
mNativeCameraControl->GetPreviewStream(size, this, this);
}
void
MediaEngineWebRTCVideoSource::StopImpl() {
MOZ_ASSERT(NS_IsMainThread());
mNativeCameraControl->StopPreview();
mPreviewStream = nullptr;
}
void
MediaEngineWebRTCVideoSource::SnapshotImpl() {
MOZ_ASSERT(NS_IsMainThread());
idl::CameraSize size;
size.width = mCapability.width;
size.height = mCapability.height;
idl::CameraPosition cameraPosition;
cameraPosition.latitude = NAN;
cameraPosition.longitude = NAN;
cameraPosition.altitude = NAN;
cameraPosition.timestamp = NAN;
mNativeCameraControl->TakePicture(size, 0, NS_LITERAL_STRING("jpeg"), cameraPosition, PR_Now() / 1000000, this, this);
}
// nsICameraGetCameraCallback
nsresult
MediaEngineWebRTCVideoSource::HandleEvent(nsICameraControl* camera) {
MOZ_ASSERT(NS_IsMainThread());
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
mNativeCameraControl = static_cast<nsGonkCameraControl*>(mDOMCameraControl->GetNativeCameraControl().get());
mState = kAllocated;
mCallbackMonitor.Notify();
return NS_OK;
}
// nsICameraPreviewStreamCallback
nsresult
MediaEngineWebRTCVideoSource::HandleEvent(nsIDOMMediaStream* stream) {
MOZ_ASSERT(NS_IsMainThread());
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
mPreviewStream = static_cast<DOMCameraPreview*>(stream);
mPreviewStream->Start();
CameraPreviewMediaStream* cameraStream = static_cast<CameraPreviewMediaStream*>(mPreviewStream->GetStream());
cameraStream->SetFrameCallback(this);
mState = kStarted;
mCallbackMonitor.Notify();
return NS_OK;
}
// nsICameraTakePictureCallback
nsresult
MediaEngineWebRTCVideoSource::HandleEvent(nsIDOMBlob* picture) {
MOZ_ASSERT(NS_IsMainThread());
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
mLastCapture = static_cast<nsIDOMFile*>(picture);
mCallbackMonitor.Notify();
return NS_OK;
}
// nsICameraReleaseCallback
nsresult
MediaEngineWebRTCVideoSource::HandleEvent() {
MOZ_ASSERT(NS_IsMainThread());
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
mState = kReleased;
mCallbackMonitor.Notify();
return NS_OK;
}
// nsICameraErrorCallback
nsresult
MediaEngineWebRTCVideoSource::HandleEvent(const nsAString& error) {
MOZ_ASSERT(NS_IsMainThread());
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
mCallbackMonitor.Notify();
return NS_OK;
}
//Except this one. This callback should called on camera preview thread.
void
MediaEngineWebRTCVideoSource::OnNewFrame(const gfxIntSize& aIntrinsicSize, layers::Image* aImage) {
MonitorAutoLock enter(mMonitor);
mImage = aImage;
if (mWidth != aIntrinsicSize.width || mHeight != aIntrinsicSize.height) {
mWidth = aIntrinsicSize.width;
mHeight = aIntrinsicSize.height;
LOG(("Video FrameSizeChange: %ux%u", mWidth, mHeight));
}
}
#endif
}

View File

@ -106,6 +106,10 @@ CameraPreviewMediaStream::SetCurrentFrame(const gfxIntSize& aIntrinsicSize, Imag
NS_NewRunnableMethod(output, &VideoFrameContainer::Invalidate);
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
}
if (mFrameCallback) {
mFrameCallback->OnNewFrame(aIntrinsicSize, aImage);
}
}
}

View File

@ -11,6 +11,11 @@
namespace mozilla {
class CameraPreviewFrameCallback {
public:
virtual void OnNewFrame(const gfxIntSize& aIntrinsicSize, layers::Image* aImage);
};
/**
* This is a stream for camere preview.
*
@ -42,11 +47,16 @@ public:
// Call these on any thread.
void SetCurrentFrame(const gfxIntSize& aIntrinsicSize, Image* aImage);
void SetFrameCallback(CameraPreviewFrameCallback* aCallback) {
mFrameCallback = aCallback;
}
protected:
// mMutex protects all the class' fields.
// This class is not registered to MediaStreamGraph.
// It needs to protect all the fields.
Mutex mMutex;
CameraPreviewFrameCallback* mFrameCallback;
};

View File

@ -455,3 +455,9 @@ nsDOMCameraControl::Shutdown()
DOM_CAMERA_LOGI("%s:%d\n", __func__, __LINE__);
mCameraControl->Shutdown();
}
nsRefPtr<ICameraControl>
nsDOMCameraControl::GetNativeCameraControl()
{
return mCameraControl;
}

View File

@ -17,7 +17,6 @@
namespace mozilla {
// Main camera control.
class nsDOMCameraControl : public nsICameraControl
{
@ -30,6 +29,7 @@ public:
nsICameraGetCameraCallback* onSuccess,
nsICameraErrorCallback* onError, uint64_t aWindowId);
nsresult Result(nsresult aResult, nsICameraGetCameraCallback* onSuccess, nsICameraErrorCallback* onError, uint64_t aWindowId);
nsRefPtr<ICameraControl> GetNativeCameraControl();
void Shutdown();

View File

@ -46,6 +46,9 @@ public:
void Register(mozilla::nsDOMCameraControl* aDOMCameraControl);
void OnNavigation(uint64_t aWindowId);
nsresult GetNumberOfCameras(int32_t& aDeviceCount);
nsresult GetCameraName(uint32_t aDeviceNum, nsCString& aDeviceName);
protected:
void XpComShutdown();
void Shutdown(uint64_t aWindowId);

View File

@ -5,6 +5,17 @@
#include "DOMCameraManager.h"
// From nsDOMCameraManager.
nsresult
nsDOMCameraManager::GetNumberOfCameras(int32_t& aDeviceCount)
{
return NS_ERROR_NOT_IMPLEMENTED;
};
nsresult
nsDOMCameraManager::GetCameraName(uint32_t aDeviceNum, nsCString& aDeviceName)
{
return NS_ERROR_NOT_IMPLEMENTED;
}
/* [implicit_jscontext] jsval getListOfCameras (); */
NS_IMETHODIMP

View File

@ -22,6 +22,46 @@
#include "CameraCommon.h"
// From nsDOMCameraManager, but gonk-specific!
nsresult
nsDOMCameraManager::GetNumberOfCameras(int32_t& aDeviceCount)
{
aDeviceCount = android::Camera::getNumberOfCameras();
return NS_OK;
}
nsresult
nsDOMCameraManager::GetCameraName(uint32_t aDeviceNum, nsCString& aDeviceName)
{
int32_t count = android::Camera::getNumberOfCameras();
DOM_CAMERA_LOGI("getListOfCameras : getNumberOfCameras() returned %d\n", count);
if (aDeviceNum > count) {
DOM_CAMERA_LOGE("GetCameraName : invalid device number");
return NS_ERROR_NOT_AVAILABLE;
}
android::CameraInfo info;
int rv = android::Camera::getCameraInfo(aDeviceNum, &info);
if (rv != 0) {
DOM_CAMERA_LOGE("GetCameraName : get_camera_info(%d) failed: %d\n", aDeviceNum, rv);
return NS_ERROR_NOT_AVAILABLE;
}
switch (info.facing) {
case CAMERA_FACING_BACK:
aDeviceName.Assign("back");
break;
case CAMERA_FACING_FRONT:
aDeviceName.Assign("front");
break;
default:
aDeviceName.Assign("extra-camera-");
aDeviceName.AppendInt(aDeviceNum);
break;
}
return NS_OK;
}
/* [implicit_jscontext] jsval getListOfCameras (); */
NS_IMETHODIMP
@ -40,40 +80,25 @@ nsDOMCameraManager::GetListOfCameras(JSContext* cx, JS::Value* _retval)
return NS_ERROR_NOT_AVAILABLE;
}
DOM_CAMERA_LOGI("getListOfCameras : get_number_of_cameras() returned %d\n", count);
DOM_CAMERA_LOGI("getListOfCameras : getNumberOfCameras() returned %d\n", count);
while (count--) {
android::CameraInfo info;
int rv = android::Camera::getCameraInfo(count, &info);
if (rv != 0) {
DOM_CAMERA_LOGE("getListOfCameras : get_camera_info(%d) failed: %d\n", count, rv);
nsCString cameraName;
nsresult result = GetCameraName(count, cameraName);
if (result != NS_OK) {
continue;
}
JSString* v;
JSString* v = JS_NewStringCopyZ(cx, cameraName.get());
JS::Value jv;
switch (info.facing) {
case CAMERA_FACING_BACK:
v = JS_NewStringCopyZ(cx, "back");
index = 0;
break;
case CAMERA_FACING_FRONT:
v = JS_NewStringCopyZ(cx, "front");
index = 1;
break;
default:
// TODO: see bug 779143.
{
static uint32_t extraIndex = 2;
nsCString s;
s.AppendPrintf("extra-camera-%d", count);
v = JS_NewStringCopyZ(cx, s.get());
index = extraIndex++;
}
break;
if (!cameraName.Compare("back")) {
index = 0;
} else if (!cameraName.Compare("front")) {
index = 1;
} else {
static uint32_t extraIndex = 2;
index = extraIndex++;
}
if (!v) {
DOM_CAMERA_LOGE("getListOfCameras : out of memory populating camera list");
return NS_ERROR_NOT_AVAILABLE;

View File

@ -22,8 +22,10 @@ MODULE = 'dom'
EXPORTS += [
'CameraCommon.h',
'CameraPreviewMediaStream.h',
'DOMCameraManager.h',
'GonkNativeWindow.h',
'GonkNativeWindowClient.h',
'GonkCameraControl.h',
]

View File

@ -223,6 +223,7 @@ nsDOMIdentity.prototype = {
// Get an assertion by using our observer api: watch + request.
var self = this;
this.watch({
_internal: true,
oncancel: function get_oncancel() {
if (aCallback) {
aCallback(null);

View File

@ -529,7 +529,7 @@ public:
// Was a backend provided?
if (!mBackendChosen) {
mBackend = mManager->GetBackend();
mBackend = mManager->GetBackend(mWindowID);
}
// Was a device provided?
@ -772,10 +772,12 @@ class GetUserMediaDevicesRunnable : public nsRunnable
public:
GetUserMediaDevicesRunnable(
already_AddRefed<nsIGetUserMediaDevicesSuccessCallback> aSuccess,
already_AddRefed<nsIDOMGetUserMediaErrorCallback> aError)
already_AddRefed<nsIDOMGetUserMediaErrorCallback> aError,
uint64_t aWindowId)
: mSuccess(aSuccess)
, mError(aError)
, mManager(MediaManager::GetInstance())
, mWindowId(aWindowId)
{}
NS_IMETHOD
@ -786,11 +788,11 @@ public:
uint32_t audioCount, videoCount, i;
nsTArray<nsRefPtr<MediaEngineVideoSource> > videoSources;
mManager->GetBackend()->EnumerateVideoDevices(&videoSources);
mManager->GetBackend(mWindowId)->EnumerateVideoDevices(&videoSources);
videoCount = videoSources.Length();
nsTArray<nsRefPtr<MediaEngineAudioSource> > audioSources;
mManager->GetBackend()->EnumerateAudioDevices(&audioSources);
mManager->GetBackend(mWindowId)->EnumerateAudioDevices(&audioSources);
audioCount = audioSources.Length();
nsTArray<nsCOMPtr<nsIMediaDevice> > *devices =
@ -821,6 +823,7 @@ private:
already_AddRefed<nsIGetUserMediaDevicesSuccessCallback> mSuccess;
already_AddRefed<nsIDOMGetUserMediaErrorCallback> mError;
nsRefPtr<MediaManager> mManager;
uint64_t mWindowId;
};
MediaManager::MediaManager()
@ -1047,7 +1050,16 @@ MediaManager::GetUserMedia(bool aPrivileged, nsPIDOMWindow* aWindow,
);
}
#ifdef ANDROID
#ifdef MOZ_WIDGET_GONK
if (mCameraManager == nullptr) {
mCameraManager = nsDOMCameraManager::CheckPermissionAndCreateInstance(aWindow);
if (!mCameraManager) {
aPrivileged = false;
}
}
#endif
#if defined(ANDROID) && !defined(MOZ_WIDGET_GONK)
if (picture) {
// ShowFilePickerForMimeType() must run on the Main Thread! (on Android)
NS_DispatchToMainThread(gUMRunnable);
@ -1113,7 +1125,7 @@ MediaManager::GetUserMediaDevices(nsPIDOMWindow* aWindow,
nsCOMPtr<nsIDOMGetUserMediaErrorCallback> onError(aOnError);
nsCOMPtr<nsIRunnable> gUMDRunnable = new GetUserMediaDevicesRunnable(
onSuccess.forget(), onError.forget()
onSuccess.forget(), onError.forget(), aWindow->WindowID()
);
nsCOMPtr<nsIThread> deviceThread;
@ -1126,7 +1138,7 @@ MediaManager::GetUserMediaDevices(nsPIDOMWindow* aWindow,
}
MediaEngine*
MediaManager::GetBackend()
MediaManager::GetBackend(uint64_t aWindowId)
{
// Plugin backends as appropriate. The default engine also currently
// includes picture support for Android.
@ -1134,7 +1146,11 @@ MediaManager::GetBackend()
MutexAutoLock lock(mMutex);
if (!mBackend) {
#if defined(MOZ_WEBRTC)
#ifndef MOZ_WIDGET_GONK
mBackend = new MediaEngineWebRTC();
#else
mBackend = new MediaEngineWebRTC(mCameraManager, aWindowId);
#endif
#else
mBackend = new MediaEngineDefault();
#endif

View File

@ -27,6 +27,10 @@
#include "mtransport/runnable_utils.h"
#endif
#ifdef MOZ_WIDGET_GONK
#include "DOMCameraManager.h"
#endif
namespace mozilla {
#ifdef PR_LOGGING
@ -391,7 +395,7 @@ public:
NS_DECL_NSIOBSERVER
NS_DECL_NSIMEDIAMANAGERSERVICE
MediaEngine* GetBackend();
MediaEngine* GetBackend(uint64_t aWindowId = 0);
StreamListeners *GetWindowListeners(uint64_t aWindowId) {
NS_ASSERTION(NS_IsMainThread(), "Only access windowlist on main thread");
@ -449,6 +453,10 @@ private:
MediaEngine* mBackend;
static StaticRefPtr<MediaManager> sSingleton;
#ifdef MOZ_WIDGET_GONK
nsRefPtr<nsDOMCameraManager> mCameraManager;
#endif
};
} // namespace mozilla

View File

@ -13,7 +13,7 @@ dictionary MmsAttachment
nsIDOMBlob content;
};
[scriptable, builtinclass, uuid(210654af-4cc8-42e5-81b2-051f0f393c3a)]
[scriptable, builtinclass, uuid(e916d5c8-dbf4-4fdc-a463-793b67491863)]
interface nsIDOMMozMmsMessage : nsISupports
{
/**
@ -47,4 +47,8 @@ interface nsIDOMMozMmsMessage : nsISupports
[implicit_jscontext]
readonly attribute jsval attachments; // MmsAttachment[]
[implicit_jscontext]
readonly attribute jsval expiryDate; // Expiry date of manually
// downloading multimedia message.
};

View File

@ -14,7 +14,7 @@ interface nsIDOMMozSmsSegmentInfo;
#define MOBILE_MESSAGE_SERVICE_CONTRACTID "@mozilla.org/mobilemessage/mobilemessageservice;1"
%}
[scriptable, builtinclass, uuid(18e4b86e-4e19-4ee5-90bf-a29328149677)]
[scriptable, builtinclass, uuid(4d3fec1d-56c8-46f9-9400-3a95e5534357)]
interface nsIMobileMessageService : nsISupports
{
[implicit_jscontext]
@ -40,7 +40,8 @@ interface nsIMobileMessageService : nsISupports
in boolean read,
in DOMString subject,
in DOMString smil,
in jsval attachments);
in jsval attachments,
in jsval expiryDate);
nsIDOMMozSmsSegmentInfo createSmsSegmentInfo(in long segments,
in long charsPerSegment,

View File

@ -42,7 +42,8 @@ MmsMessage::MmsMessage(int32_t aId,
bool aRead,
const nsAString& aSubject,
const nsAString& aSmil,
const nsTArray<MmsAttachment>& aAttachments)
const nsTArray<MmsAttachment>& aAttachments,
uint64_t aExpiryDate)
: mId(aId),
mThreadId(aThreadId),
mDelivery(aDelivery),
@ -53,7 +54,8 @@ MmsMessage::MmsMessage(int32_t aId,
mRead(aRead),
mSubject(aSubject),
mSmil(aSmil),
mAttachments(aAttachments)
mAttachments(aAttachments),
mExpiryDate(aExpiryDate)
{
}
@ -67,6 +69,7 @@ MmsMessage::MmsMessage(const mobilemessage::MmsMessageData& aData)
, mRead(aData.read())
, mSubject(aData.subject())
, mSmil(aData.smil())
, mExpiryDate(aData.expiryDate())
{
uint32_t len = aData.attachments().Length();
mAttachments.SetCapacity(len);
@ -98,6 +101,7 @@ MmsMessage::Create(int32_t aId,
const nsAString& aSubject,
const nsAString& aSmil,
const JS::Value& aAttachments,
const JS::Value& aExpiryDate,
JSContext* aCx,
nsIDOMMozMmsMessage** aMessage)
{
@ -226,6 +230,25 @@ MmsMessage::Create(int32_t aId,
attachments.AppendElement(attachment);
}
// Set |expiryDate|.
uint64_t expiryDate;
if (aExpiryDate.isObject()) {
JSObject* expiryDateObj = &aExpiryDate.toObject();
if (!JS_ObjectIsDate(aCx, expiryDateObj)) {
return NS_ERROR_INVALID_ARG;
}
expiryDate = js_DateGetMsecSinceEpoch(expiryDateObj);
} else {
if (!aExpiryDate.isNumber()) {
return NS_ERROR_INVALID_ARG;
}
double number = aExpiryDate.toNumber();
if (static_cast<uint64_t>(number) != number) {
return NS_ERROR_INVALID_ARG;
}
expiryDate = static_cast<uint64_t>(number);
}
nsCOMPtr<nsIDOMMozMmsMessage> message = new MmsMessage(aId,
aThreadId,
delivery,
@ -236,7 +259,8 @@ MmsMessage::Create(int32_t aId,
aRead,
aSubject,
aSmil,
attachments);
attachments,
expiryDate);
message.forget(aMessage);
return NS_OK;
}
@ -256,6 +280,7 @@ MmsMessage::GetData(ContentParent* aParent,
aData.read() = mRead;
aData.subject() = mSubject;
aData.smil() = mSmil;
aData.expiryDate() = mExpiryDate;
aData.attachments().SetCapacity(mAttachments.Length());
for (uint32_t i = 0; i < mAttachments.Length(); i++) {
@ -487,5 +512,15 @@ MmsMessage::GetAttachments(JSContext* aCx, JS::Value* aAttachments)
return NS_OK;
}
NS_IMETHODIMP
MmsMessage::GetExpiryDate(JSContext* cx, JS::Value* aDate)
{
JSObject *obj = JS_NewDateObjectMsec(cx, mExpiryDate);
NS_ENSURE_TRUE(obj, NS_ERROR_FAILURE);
*aDate = OBJECT_TO_JSVAL(obj);
return NS_OK;
}
} // namespace dom
} // namespace mozilla

View File

@ -38,7 +38,8 @@ public:
bool aRead,
const nsAString& aSubject,
const nsAString& aSmil,
const nsTArray<idl::MmsAttachment>& aAttachments);
const nsTArray<idl::MmsAttachment>& aAttachments,
uint64_t aExpiryDate);
MmsMessage(const mobilemessage::MmsMessageData& aData);
@ -53,6 +54,7 @@ public:
const nsAString& aSubject,
const nsAString& aSmil,
const JS::Value& aAttachments,
const JS::Value& aExpiryDate,
JSContext* aCx,
nsIDOMMozMmsMessage** aMessage);
@ -72,6 +74,7 @@ private:
nsString mSubject;
nsString mSmil;
nsTArray<idl::MmsAttachment> mAttachments;
uint64_t mExpiryDate;
};
} // namespace dom

View File

@ -69,6 +69,7 @@ MobileMessageService::CreateMmsMessage(int32_t aId,
const nsAString& aSubject,
const nsAString& aSmil,
const JS::Value& aAttachments,
const JS::Value& aExpiryDate,
JSContext* aCx,
nsIDOMMozMmsMessage** aMessage)
{
@ -83,6 +84,7 @@ MobileMessageService::CreateMmsMessage(int32_t aId,
aSubject,
aSmil,
aAttachments,
aExpiryDate,
aCx,
aMessage);
}

View File

@ -56,6 +56,7 @@ struct MmsMessageData
nsString subject;
nsString smil;
MmsAttachmentData[] attachments;
uint64_t expiryDate;
};
union MobileMessageData

View File

@ -670,6 +670,7 @@ MobileMessageDatabaseService.prototype = {
});
}
}
let expiryDate = aMessageRecord.timestamp + headers["x-mms-expiry"] * 1000;
return gMobileMessageService.createMmsMessage(aMessageRecord.id,
aMessageRecord.threadId,
aMessageRecord.delivery,
@ -680,7 +681,8 @@ MobileMessageDatabaseService.prototype = {
aMessageRecord.read,
subject,
smil,
attachments);
attachments,
expiryDate);
}
},

View File

@ -543,11 +543,18 @@ WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
return kMediaConduitMalformedArgument;
}
if(video_type != kVideoI420)
{
CSFLogError(logTag, "%s VideoType Invalid. Only 1420 Supported",__FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
webrtc::RawVideoType type;
switch (video_type) {
case kVideoI420:
type = webrtc::kVideoI420;
break;
case kVideoNV21:
type = webrtc::kVideoNV21;
break;
default:
CSFLogError(logTag, "%s VideoType Invalid. Only 1420 and NV21 Supported",__FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
}
//Transmission should be enabled before we insert any frames.
if(!mEngineTransmitting)
@ -560,7 +567,7 @@ WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
if(mPtrExtCapture->IncomingFrame(video_frame,
video_frame_length,
width, height,
webrtc::kVideoI420,
type,
(unsigned long long)capture_time) == -1)
{
CSFLogError(logTag, "%s IncomingFrame Failed %d ", __FUNCTION__,

View File

@ -22,6 +22,9 @@
#include "ImageTypes.h"
#include "ImageContainer.h"
#include "VideoUtils.h"
#ifdef MOZ_WIDGET_GONK
#include "GonkIOSurfaceImage.h"
#endif
#endif
#include "logging.h"
@ -789,46 +792,61 @@ void MediaPipelineTransmit::PipelineListener::ProcessVideoChunk(
}
ImageFormat format = img->GetFormat();
#ifdef MOZ_WIDGET_GONK
if (format == GONK_IO_SURFACE) {
layers::GonkIOSurfaceImage *nativeImage = static_cast<layers::GonkIOSurfaceImage*>(img);
layers::SurfaceDescriptor handle = nativeImage->GetSurfaceDescriptor();
layers::SurfaceDescriptorGralloc grallocHandle = handle.get_SurfaceDescriptorGralloc();
if (format != PLANAR_YCBCR) {
MOZ_MTLOG(PR_LOG_ERROR, "Can't process non-YCBCR video");
android::sp<android::GraphicBuffer> graphicBuffer = layers::GrallocBufferActor::GetFrom(grallocHandle);
void *basePtr;
graphicBuffer->lock(android::GraphicBuffer::USAGE_SW_READ_MASK, &basePtr);
conduit->SendVideoFrame(static_cast<unsigned char*>(basePtr),
(graphicBuffer->getWidth() * graphicBuffer->getHeight() * 3) / 2,
graphicBuffer->getWidth(),
graphicBuffer->getHeight(),
mozilla::kVideoNV21, 0);
graphicBuffer->unlock();
} else
#endif
if (format == PLANAR_YCBCR) {
// Cast away constness b/c some of the accessors are non-const
layers::PlanarYCbCrImage* yuv =
const_cast<layers::PlanarYCbCrImage *>(
static_cast<const layers::PlanarYCbCrImage *>(img));
// Big-time assumption here that this is all contiguous data coming
// from getUserMedia or other sources.
const layers::PlanarYCbCrImage::Data *data = yuv->GetData();
uint8_t *y = data->mYChannel;
#ifdef DEBUG
uint8_t *cb = data->mCbChannel;
uint8_t *cr = data->mCrChannel;
#endif
uint32_t width = yuv->GetSize().width;
uint32_t height = yuv->GetSize().height;
uint32_t length = yuv->GetDataSize();
// SendVideoFrame only supports contiguous YCrCb 4:2:0 buffers
// Verify it's contiguous and in the right order
MOZ_ASSERT(cb == (y + width*height) &&
cr == (cb + width*height/4));
// XXX Consider making this a non-debug-only check if we ever implement
// any subclasses of PlanarYCbCrImage that allow disjoint buffers such
// that y+3(width*height)/2 might go outside the allocation.
// GrallocPlanarYCbCrImage can have wider strides, and so in some cases
// would encode as garbage. If we need to encode it we'll either want to
// modify SendVideoFrame or copy/move the data in the buffer.
// OK, pass it on to the conduit
MOZ_MTLOG(PR_LOG_DEBUG, "Sending a video frame");
// Not much for us to do with an error
conduit->SendVideoFrame(y, length, width, height, mozilla::kVideoI420, 0);
} else {
MOZ_MTLOG(PR_LOG_ERROR, "Unsupported video format");
MOZ_ASSERT(PR_FALSE);
return;
}
// Cast away constness b/c some of the accessors are non-const
layers::PlanarYCbCrImage* yuv =
const_cast<layers::PlanarYCbCrImage *>(
static_cast<const layers::PlanarYCbCrImage *>(img));
// Big-time assumption here that this is all contiguous data coming
// from getUserMedia or other sources.
const layers::PlanarYCbCrImage::Data *data = yuv->GetData();
uint8_t *y = data->mYChannel;
#ifdef DEBUG
uint8_t *cb = data->mCbChannel;
uint8_t *cr = data->mCrChannel;
#endif
uint32_t width = yuv->GetSize().width;
uint32_t height = yuv->GetSize().height;
uint32_t length = yuv->GetDataSize();
// SendVideoFrame only supports contiguous YCrCb 4:2:0 buffers
// Verify it's contiguous and in the right order
MOZ_ASSERT(cb == (y + width*height) &&
cr == (cb + width*height/4));
// XXX Consider making this a non-debug-only check if we ever implement
// any subclasses of PlanarYCbCrImage that allow disjoint buffers such
// that y+3(width*height)/2 might go outside the allocation.
// GrallocPlanarYCbCrImage can have wider strides, and so in some cases
// would encode as garbage. If we need to encode it we'll either want to
// modify SendVideoFrame or copy/move the data in the buffer.
// OK, pass it on to the conduit
MOZ_MTLOG(PR_LOG_DEBUG, "Sending a video frame");
// Not much for us to do with an error
conduit->SendVideoFrame(y, length, width, height, mozilla::kVideoI420, 0);
}
#endif

View File

@ -42,6 +42,7 @@ NO_MAKEFILE_RULE = 1
NO_SUBMAKEFILES_RULE = 1
include $(topsrcdir)/config/rules.mk
include $(topsrcdir)/ipc/chromium/chromium-config.mk
include %(common_mk_path)s
"""

View File

@ -42,7 +42,8 @@ var OfflineApps = {
}
}];
let message = strings.formatStringFromName("offlineApps.ask", [host], 1);
let requestor = chromeWin.BrowserApp.manifest ? "'" + chromeWin.BrowserApp.manifest.name + "'" : host;
let message = strings.formatStringFromName("offlineApps.ask", [requestor], 1);
let options = { checkbox: Strings.browser.GetStringFromName("offlineApps.dontAskAgain") };
NativeWindow.doorhanger.show(message, notificationID, buttons, tab.id, options);
},

View File

@ -68,13 +68,15 @@ let WebAppRT = {
// If so, get the launchUrl from the manifest and we'll launch with that
//let app = DOMApplicationRegistry.getAppByManifestURL(aUrl);
if (app.manifestURL == aUrl) {
BrowserApp.manifest = app.manifest;
BrowserApp.manifestUrl = aUrl;
aCallback(manifest.fullLaunchPath());
return;
}
// Otherwise, see if the apps launch path is this url
if (manifest.fullLaunchPath() == aUrl) {
BrowserApp.manifest = app.manifest;
BrowserApp.manifestUrl = app.manifestURL;
aCallback(aUrl);
return;

View File

@ -84,8 +84,9 @@ var WebrtcUI = {
return;
let host = aBrowser.contentDocument.documentURIObject.asciiHost;
let requestor = chromeWin.BrowserApp.manifest ? "'" + chromeWin.BrowserApp.manifest.name + "'" : host;
let stringBundle = Services.strings.createBundle("chrome://browser/locale/browser.properties");
let message = stringBundle.formatStringFromName("getUserMedia.share" + requestType + ".message", [ host ], 1);
let message = stringBundle.formatStringFromName("getUserMedia.share" + requestType + ".message", [ requestor ], 1);
if (audioDevices.length) {
let buttons = this.getDeviceButtons(audioDevices, aCallID, stringBundle);

View File

@ -74,8 +74,7 @@ ContentPermissionPrompt.prototype = {
} else if (entityName == "desktopNotification") {
// For notifications, it doesn't make sense to grant permission once. So when the user clicks allow,
// we let the requestor create notifications for the session.
Services.perms.addFromPrincipal(request.principal, request.type, Ci.nsIPermissionManager.ALLOW_ACTION,
Ci.nsIPermissionManager.EXPIRE_SESSION);
Services.perms.addFromPrincipal(request.principal, request.type, Ci.nsIPermissionManager.ALLOW_ACTION, Ci.nsIPermissionManager.EXPIRE_SESSION);
}
request.allow();
@ -92,13 +91,11 @@ ContentPermissionPrompt.prototype = {
}
}];
let message = browserBundle.formatStringFromName(entityName + ".ask",
[request.principal.URI.host], 1);
let requestor = chromeWin.BrowserApp.manifest ? "'" + chromeWin.BrowserApp.manifest.name + "'" : request.principal.URI.host;
let message = browserBundle.formatStringFromName(entityName + ".ask", [requestor], 1);
let options = { checkbox: browserBundle.GetStringFromName(entityName + ".dontAskAgain") };
chromeWin.NativeWindow.doorhanger.show(message,
entityName + request.principal.URI.host,
buttons, tab.id, options);
chromeWin.NativeWindow.doorhanger.show(message, entityName + request.principal.URI.host, buttons, tab.id, options);
}
};