2015-05-09 20:38:15 -07:00
|
|
|
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
|
|
|
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
|
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
|
2015-07-05 20:36:15 -07:00
|
|
|
#include "AudioSegment.h"
|
2015-08-16 16:52:28 -07:00
|
|
|
#include "DecodedStream.h"
|
2015-07-05 20:36:15 -07:00
|
|
|
#include "MediaData.h"
|
2015-08-16 16:52:28 -07:00
|
|
|
#include "MediaQueue.h"
|
|
|
|
#include "MediaStreamGraph.h"
|
2015-07-05 20:36:15 -07:00
|
|
|
#include "SharedBuffer.h"
|
2015-08-16 16:52:28 -07:00
|
|
|
#include "VideoSegment.h"
|
2015-07-05 20:36:15 -07:00
|
|
|
#include "VideoUtils.h"
|
2015-05-09 20:38:15 -07:00
|
|
|
|
|
|
|
namespace mozilla {
|
|
|
|
|
|
|
|
class DecodedStreamGraphListener : public MediaStreamListener {
|
|
|
|
typedef MediaStreamListener::MediaStreamGraphEvent MediaStreamGraphEvent;
|
|
|
|
public:
|
2015-08-16 16:52:42 -07:00
|
|
|
DecodedStreamGraphListener(MediaStream* aStream,
|
|
|
|
MozPromiseHolder<GenericPromise>&& aPromise)
|
2015-05-09 21:07:14 -07:00
|
|
|
: mMutex("DecodedStreamGraphListener::mMutex")
|
2015-05-09 20:38:15 -07:00
|
|
|
, mStream(aStream)
|
|
|
|
, mLastOutputTime(aStream->StreamTimeToMicroseconds(aStream->GetCurrentTime()))
|
2015-08-16 16:52:42 -07:00
|
|
|
, mStreamFinishedOnMainThread(false)
|
|
|
|
{
|
|
|
|
mFinishPromise = Move(aPromise);
|
|
|
|
}
|
2015-05-09 20:38:15 -07:00
|
|
|
|
|
|
|
void NotifyOutput(MediaStreamGraph* aGraph, GraphTime aCurrentTime) override
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
if (mStream) {
|
|
|
|
mLastOutputTime = mStream->StreamTimeToMicroseconds(mStream->GraphTimeToStreamTime(aCurrentTime));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void NotifyEvent(MediaStreamGraph* aGraph, MediaStreamGraphEvent event) override
|
|
|
|
{
|
|
|
|
if (event == EVENT_FINISHED) {
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
|
|
|
NS_NewRunnableMethod(this, &DecodedStreamGraphListener::DoNotifyFinished);
|
|
|
|
aGraph->DispatchToMainThreadAfterStreamStateUpdate(event.forget());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void DoNotifyFinished()
|
|
|
|
{
|
2015-08-16 16:52:42 -07:00
|
|
|
mFinishPromise.ResolveIfExists(true, __func__);
|
2015-05-09 20:38:15 -07:00
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
mStreamFinishedOnMainThread = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
int64_t GetLastOutputTime()
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
return mLastOutputTime;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Forget()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
2015-08-16 16:52:42 -07:00
|
|
|
mFinishPromise.ResolveIfExists(true, __func__);
|
2015-05-09 20:38:15 -07:00
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
mStream = nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool IsFinishedOnMainThread()
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
return mStreamFinishedOnMainThread;
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
Mutex mMutex;
|
|
|
|
// Members below are protected by mMutex.
|
|
|
|
nsRefPtr<MediaStream> mStream;
|
|
|
|
int64_t mLastOutputTime; // microseconds
|
|
|
|
bool mStreamFinishedOnMainThread;
|
2015-08-16 16:52:42 -07:00
|
|
|
// Main thread only.
|
|
|
|
MozPromiseHolder<GenericPromise> mFinishPromise;
|
2015-05-09 20:38:15 -07:00
|
|
|
};
|
|
|
|
|
2015-06-08 01:51:39 -07:00
|
|
|
static void
|
|
|
|
UpdateStreamBlocking(MediaStream* aStream, bool aBlocking)
|
|
|
|
{
|
|
|
|
int32_t delta = aBlocking ? 1 : -1;
|
|
|
|
if (NS_IsMainThread()) {
|
|
|
|
aStream->ChangeExplicitBlockerCount(delta);
|
|
|
|
} else {
|
|
|
|
nsCOMPtr<nsIRunnable> r = NS_NewRunnableMethodWithArg<int32_t>(
|
|
|
|
aStream, &MediaStream::ChangeExplicitBlockerCount, delta);
|
|
|
|
AbstractThread::MainThread()->Dispatch(r.forget());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-07-19 06:25:08 -07:00
|
|
|
/*
|
|
|
|
* All MediaStream-related data is protected by the decoder's monitor.
|
|
|
|
* We have at most one DecodedStreamDaata per MediaDecoder. Its stream
|
|
|
|
* is used as the input for each ProcessedMediaStream created by calls to
|
|
|
|
* captureStream(UntilEnded). Seeking creates a new source stream, as does
|
|
|
|
* replaying after the input as ended. In the latter case, the new source is
|
|
|
|
* not connected to streams created by captureStreamUntilEnded.
|
|
|
|
*/
|
|
|
|
class DecodedStreamData {
|
|
|
|
public:
|
2015-08-24 06:05:22 -07:00
|
|
|
DecodedStreamData(SourceMediaStream* aStream,
|
2015-08-19 22:02:43 -07:00
|
|
|
MozPromiseHolder<GenericPromise>&& aPromise);
|
2015-07-19 06:25:08 -07:00
|
|
|
~DecodedStreamData();
|
|
|
|
bool IsFinished() const;
|
|
|
|
int64_t GetPosition() const;
|
|
|
|
void SetPlaying(bool aPlaying);
|
|
|
|
|
|
|
|
/* The following group of fields are protected by the decoder's monitor
|
|
|
|
* and can be read or written on any thread.
|
|
|
|
*/
|
|
|
|
// Count of audio frames written to the stream
|
|
|
|
int64_t mAudioFramesWritten;
|
|
|
|
// mNextVideoTime is the end timestamp for the last packet sent to the stream.
|
|
|
|
// Therefore video packets starting at or after this time need to be copied
|
|
|
|
// to the output stream.
|
|
|
|
int64_t mNextVideoTime; // microseconds
|
|
|
|
int64_t mNextAudioTime; // microseconds
|
|
|
|
// The last video image sent to the stream. Useful if we need to replicate
|
|
|
|
// the image.
|
|
|
|
nsRefPtr<layers::Image> mLastVideoImage;
|
|
|
|
gfx::IntSize mLastVideoImageDisplaySize;
|
|
|
|
// This is set to true when the stream is initialized (audio and
|
|
|
|
// video tracks added).
|
|
|
|
bool mStreamInitialized;
|
|
|
|
bool mHaveSentFinish;
|
|
|
|
bool mHaveSentFinishAudio;
|
|
|
|
bool mHaveSentFinishVideo;
|
|
|
|
|
|
|
|
// The decoder is responsible for calling Destroy() on this stream.
|
|
|
|
const nsRefPtr<SourceMediaStream> mStream;
|
|
|
|
nsRefPtr<DecodedStreamGraphListener> mListener;
|
|
|
|
bool mPlaying;
|
|
|
|
// True if we need to send a compensation video frame to ensure the
|
|
|
|
// StreamTime going forward.
|
|
|
|
bool mEOSVideoCompensation;
|
|
|
|
};
|
|
|
|
|
2015-08-24 06:05:22 -07:00
|
|
|
DecodedStreamData::DecodedStreamData(SourceMediaStream* aStream,
|
2015-08-19 22:02:43 -07:00
|
|
|
MozPromiseHolder<GenericPromise>&& aPromise)
|
2015-05-09 20:38:15 -07:00
|
|
|
: mAudioFramesWritten(0)
|
|
|
|
, mNextVideoTime(-1)
|
|
|
|
, mNextAudioTime(-1)
|
|
|
|
, mStreamInitialized(false)
|
|
|
|
, mHaveSentFinish(false)
|
|
|
|
, mHaveSentFinishAudio(false)
|
|
|
|
, mHaveSentFinishVideo(false)
|
|
|
|
, mStream(aStream)
|
2015-08-24 06:05:22 -07:00
|
|
|
, mPlaying(true)
|
2015-05-09 20:38:15 -07:00
|
|
|
, mEOSVideoCompensation(false)
|
|
|
|
{
|
2015-08-16 16:52:42 -07:00
|
|
|
// DecodedStreamGraphListener will resolve this promise.
|
2015-08-19 22:02:43 -07:00
|
|
|
mListener = new DecodedStreamGraphListener(mStream, Move(aPromise));
|
2015-05-09 20:38:15 -07:00
|
|
|
mStream->AddListener(mListener);
|
2015-07-05 20:36:26 -07:00
|
|
|
|
2015-08-24 06:05:22 -07:00
|
|
|
// mPlaying is initially true because MDSM won't start playback until playing
|
|
|
|
// becomes true. This is consistent with the settings of AudioSink.
|
2015-05-09 20:38:15 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
DecodedStreamData::~DecodedStreamData()
|
|
|
|
{
|
|
|
|
mListener->Forget();
|
|
|
|
mStream->Destroy();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
DecodedStreamData::IsFinished() const
|
|
|
|
{
|
|
|
|
return mListener->IsFinishedOnMainThread();
|
|
|
|
}
|
|
|
|
|
|
|
|
int64_t
|
2015-06-08 01:51:36 -07:00
|
|
|
DecodedStreamData::GetPosition() const
|
2015-05-09 20:38:15 -07:00
|
|
|
{
|
2015-06-08 01:51:36 -07:00
|
|
|
return mListener->GetLastOutputTime();
|
2015-05-09 20:38:15 -07:00
|
|
|
}
|
|
|
|
|
2015-06-08 01:51:39 -07:00
|
|
|
void
|
|
|
|
DecodedStreamData::SetPlaying(bool aPlaying)
|
|
|
|
{
|
|
|
|
if (mPlaying != aPlaying) {
|
|
|
|
mPlaying = aPlaying;
|
|
|
|
UpdateStreamBlocking(mStream, !mPlaying);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-05-09 20:38:15 -07:00
|
|
|
class OutputStreamListener : public MediaStreamListener {
|
|
|
|
typedef MediaStreamListener::MediaStreamGraphEvent MediaStreamGraphEvent;
|
|
|
|
public:
|
2015-08-17 19:58:13 -07:00
|
|
|
explicit OutputStreamListener(OutputStreamData* aOwner) : mOwner(aOwner) {}
|
2015-05-09 20:38:15 -07:00
|
|
|
|
|
|
|
void NotifyEvent(MediaStreamGraph* aGraph, MediaStreamGraphEvent event) override
|
|
|
|
{
|
|
|
|
if (event == EVENT_FINISHED) {
|
|
|
|
nsCOMPtr<nsIRunnable> r = NS_NewRunnableMethod(
|
|
|
|
this, &OutputStreamListener::DoNotifyFinished);
|
|
|
|
aGraph->DispatchToMainThreadAfterStreamStateUpdate(r.forget());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Forget()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
2015-08-17 19:57:56 -07:00
|
|
|
mOwner = nullptr;
|
2015-05-09 20:38:15 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
void DoNotifyFinished()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
2015-08-17 19:57:56 -07:00
|
|
|
if (mOwner) {
|
2015-07-05 20:34:47 -07:00
|
|
|
// Remove the finished stream so it won't block the decoded stream.
|
2015-08-17 19:57:56 -07:00
|
|
|
mOwner->Remove();
|
2015-05-09 20:38:15 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Main thread only
|
2015-08-17 19:57:56 -07:00
|
|
|
OutputStreamData* mOwner;
|
2015-05-09 20:38:15 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
OutputStreamData::~OutputStreamData()
|
|
|
|
{
|
2015-08-17 19:57:56 -07:00
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
2015-05-09 20:38:15 -07:00
|
|
|
mListener->Forget();
|
2015-08-17 19:57:56 -07:00
|
|
|
// Break the connection to the input stream if necessary.
|
|
|
|
if (mPort) {
|
|
|
|
mPort->Destroy();
|
|
|
|
}
|
2015-05-09 20:38:15 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2015-08-17 19:58:13 -07:00
|
|
|
OutputStreamData::Init(OutputStreamManager* aOwner, ProcessedMediaStream* aStream)
|
2015-05-09 20:38:15 -07:00
|
|
|
{
|
2015-08-17 19:57:56 -07:00
|
|
|
mOwner = aOwner;
|
2015-05-09 20:38:15 -07:00
|
|
|
mStream = aStream;
|
2015-08-17 19:57:56 -07:00
|
|
|
mListener = new OutputStreamListener(this);
|
2015-05-09 20:38:15 -07:00
|
|
|
aStream->AddListener(mListener);
|
|
|
|
}
|
|
|
|
|
2015-08-17 19:57:56 -07:00
|
|
|
void
|
|
|
|
OutputStreamData::Connect(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
MOZ_ASSERT(!mPort, "Already connected?");
|
|
|
|
MOZ_ASSERT(!mStream->IsDestroyed(), "Can't connect a destroyed stream.");
|
|
|
|
|
|
|
|
// The output stream must stay in sync with the input stream, so if
|
|
|
|
// either stream is blocked, we block the other.
|
|
|
|
mPort = mStream->AllocateInputPort(aStream,
|
|
|
|
MediaInputPort::FLAG_BLOCK_INPUT | MediaInputPort::FLAG_BLOCK_OUTPUT);
|
|
|
|
// Unblock the output stream now. The input stream is responsible for
|
|
|
|
// controlling blocking from now on.
|
|
|
|
mStream->ChangeExplicitBlockerCount(-1);
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
OutputStreamData::Disconnect()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
|
|
|
|
// During cycle collection, DOMMediaStream can be destroyed and send
|
|
|
|
// its Destroy message before this decoder is destroyed. So we have to
|
|
|
|
// be careful not to send any messages after the Destroy().
|
|
|
|
if (mStream->IsDestroyed()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Disconnect the existing port if necessary.
|
|
|
|
if (mPort) {
|
|
|
|
mPort->Destroy();
|
|
|
|
mPort = nullptr;
|
|
|
|
}
|
|
|
|
// Block the stream again. It will be unlocked when connecting
|
|
|
|
// to the input stream.
|
|
|
|
mStream->ChangeExplicitBlockerCount(1);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
OutputStreamData::Remove()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
mOwner->Remove(mStream);
|
|
|
|
}
|
|
|
|
|
2015-08-19 22:02:31 -07:00
|
|
|
MediaStreamGraph*
|
|
|
|
OutputStreamData::Graph() const
|
|
|
|
{
|
|
|
|
return mStream->Graph();
|
|
|
|
}
|
|
|
|
|
2015-08-17 19:58:13 -07:00
|
|
|
void
|
|
|
|
OutputStreamManager::Add(ProcessedMediaStream* aStream, bool aFinishWhenEnded)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
2015-08-19 22:02:31 -07:00
|
|
|
// All streams must belong to the same graph.
|
|
|
|
MOZ_ASSERT(!Graph() || Graph() == aStream->Graph());
|
|
|
|
|
2015-08-17 19:58:13 -07:00
|
|
|
// Ensure that aStream finishes the moment mDecodedStream does.
|
|
|
|
if (aFinishWhenEnded) {
|
|
|
|
aStream->SetAutofinish(true);
|
|
|
|
}
|
|
|
|
|
|
|
|
OutputStreamData* p = mStreams.AppendElement();
|
|
|
|
p->Init(this, aStream);
|
|
|
|
|
|
|
|
// Connect to the input stream if we have one. Otherwise the output stream
|
|
|
|
// will be connected in Connect().
|
|
|
|
if (mInputStream) {
|
|
|
|
p->Connect(mInputStream);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
OutputStreamManager::Remove(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
for (int32_t i = mStreams.Length() - 1; i >= 0; --i) {
|
|
|
|
if (mStreams[i].Equals(aStream)) {
|
|
|
|
mStreams.RemoveElementAt(i);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
OutputStreamManager::Connect(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
mInputStream = aStream;
|
|
|
|
for (auto&& os : mStreams) {
|
|
|
|
os.Connect(aStream);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
OutputStreamManager::Disconnect()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
mInputStream = nullptr;
|
|
|
|
for (int32_t i = mStreams.Length() - 1; i >= 0; --i) {
|
|
|
|
if (!mStreams[i].Disconnect()) {
|
|
|
|
// Probably the DOMMediaStream was GCed. Clean up.
|
|
|
|
mStreams.RemoveElementAt(i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-08-23 18:55:30 -07:00
|
|
|
DecodedStream::DecodedStream(AbstractThread* aOwnerThread,
|
|
|
|
MediaQueue<MediaData>& aAudioQueue,
|
2015-07-27 09:21:33 -07:00
|
|
|
MediaQueue<MediaData>& aVideoQueue)
|
2015-08-23 18:55:30 -07:00
|
|
|
: mOwnerThread(aOwnerThread)
|
2015-08-24 06:05:22 -07:00
|
|
|
, mShuttingDown(false)
|
2015-07-05 20:36:26 -07:00
|
|
|
, mPlaying(false)
|
2015-08-06 19:29:31 -07:00
|
|
|
, mVolume(1.0)
|
2015-07-03 18:30:15 -07:00
|
|
|
, mAudioQueue(aAudioQueue)
|
|
|
|
, mVideoQueue(aVideoQueue)
|
2015-05-09 21:07:14 -07:00
|
|
|
{
|
2015-07-19 06:25:08 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
DecodedStream::~DecodedStream()
|
|
|
|
{
|
2015-08-19 22:02:43 -07:00
|
|
|
MOZ_ASSERT(mStartTime.isNothing(), "playback should've ended.");
|
2015-05-09 21:07:14 -07:00
|
|
|
}
|
|
|
|
|
2015-08-24 06:05:22 -07:00
|
|
|
void
|
|
|
|
DecodedStream::Shutdown()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
mShuttingDown = true;
|
|
|
|
}
|
|
|
|
|
2015-08-16 16:52:28 -07:00
|
|
|
nsRefPtr<GenericPromise>
|
2015-07-11 01:41:39 -07:00
|
|
|
DecodedStream::StartPlayback(int64_t aStartTime, const MediaInfo& aInfo)
|
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-08-16 16:52:28 -07:00
|
|
|
MOZ_ASSERT(mStartTime.isNothing(), "playback already started.");
|
2015-08-19 22:02:43 -07:00
|
|
|
|
2015-08-16 16:52:28 -07:00
|
|
|
mStartTime.emplace(aStartTime);
|
|
|
|
mInfo = aInfo;
|
2015-08-23 19:04:21 -07:00
|
|
|
ConnectListener();
|
2015-08-16 16:52:42 -07:00
|
|
|
|
2015-08-19 22:02:43 -07:00
|
|
|
class R : public nsRunnable {
|
|
|
|
typedef MozPromiseHolder<GenericPromise> Promise;
|
|
|
|
typedef void(DecodedStream::*Method)(Promise&&);
|
|
|
|
public:
|
|
|
|
R(DecodedStream* aThis, Method aMethod, Promise&& aPromise)
|
|
|
|
: mThis(aThis), mMethod(aMethod)
|
|
|
|
{
|
|
|
|
mPromise = Move(aPromise);
|
|
|
|
}
|
|
|
|
NS_IMETHOD Run() override
|
|
|
|
{
|
|
|
|
(mThis->*mMethod)(Move(mPromise));
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
private:
|
|
|
|
nsRefPtr<DecodedStream> mThis;
|
|
|
|
Method mMethod;
|
|
|
|
Promise mPromise;
|
|
|
|
};
|
|
|
|
|
|
|
|
MozPromiseHolder<GenericPromise> promise;
|
|
|
|
nsRefPtr<GenericPromise> rv = promise.Ensure(__func__);
|
|
|
|
nsCOMPtr<nsIRunnable> r = new R(this, &DecodedStream::CreateData, Move(promise));
|
|
|
|
AbstractThread::MainThread()->Dispatch(r.forget());
|
|
|
|
|
|
|
|
return rv.forget();
|
2015-07-11 01:41:39 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void DecodedStream::StopPlayback()
|
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-08-24 06:05:32 -07:00
|
|
|
|
2015-08-19 22:02:43 -07:00
|
|
|
// Playback didn't even start at all.
|
|
|
|
if (mStartTime.isNothing()) {
|
|
|
|
return;
|
|
|
|
}
|
2015-08-23 19:04:21 -07:00
|
|
|
|
2015-07-11 01:41:39 -07:00
|
|
|
mStartTime.reset();
|
2015-08-23 19:04:21 -07:00
|
|
|
DisconnectListener();
|
2015-08-19 22:02:43 -07:00
|
|
|
|
|
|
|
// Clear mData immediately when this playback session ends so we won't
|
|
|
|
// send data to the wrong stream in SendData() in next playback session.
|
2015-08-24 06:05:22 -07:00
|
|
|
DestroyData(Move(mData));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
DecodedStream::DestroyData(UniquePtr<DecodedStreamData> aData)
|
|
|
|
{
|
|
|
|
AssertOwnerThread();
|
|
|
|
|
|
|
|
if (!aData) {
|
2015-08-19 22:02:43 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2015-08-24 06:05:22 -07:00
|
|
|
DecodedStreamData* data = aData.release();
|
2015-08-19 22:02:43 -07:00
|
|
|
nsRefPtr<DecodedStream> self = this;
|
|
|
|
nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction([=] () {
|
|
|
|
self->mOutputStreamManager.Disconnect();
|
|
|
|
delete data;
|
|
|
|
});
|
|
|
|
AbstractThread::MainThread()->Dispatch(r.forget());
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
DecodedStream::CreateData(MozPromiseHolder<GenericPromise>&& aPromise)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
|
|
|
|
// No need to create a source stream when there are no output streams. This
|
|
|
|
// happens when RemoveOutput() is called immediately after StartPlayback().
|
2015-08-24 06:05:22 -07:00
|
|
|
// Also we don't create a source stream when MDSM has begun shutdown.
|
|
|
|
if (!mOutputStreamManager.Graph() || mShuttingDown) {
|
2015-08-19 22:02:43 -07:00
|
|
|
// Resolve the promise to indicate the end of playback.
|
|
|
|
aPromise.Resolve(true, __func__);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto source = mOutputStreamManager.Graph()->CreateSourceStream(nullptr);
|
2015-08-24 06:05:22 -07:00
|
|
|
auto data = new DecodedStreamData(source, Move(aPromise));
|
|
|
|
mOutputStreamManager.Connect(data->mStream);
|
2015-08-23 19:04:21 -07:00
|
|
|
|
2015-08-24 06:05:22 -07:00
|
|
|
class R : public nsRunnable {
|
|
|
|
typedef void(DecodedStream::*Method)(UniquePtr<DecodedStreamData>);
|
|
|
|
public:
|
|
|
|
R(DecodedStream* aThis, Method aMethod, DecodedStreamData* aData)
|
|
|
|
: mThis(aThis), mMethod(aMethod), mData(aData) {}
|
|
|
|
NS_IMETHOD Run() override
|
|
|
|
{
|
|
|
|
(mThis->*mMethod)(Move(mData));
|
|
|
|
return NS_OK;
|
2015-08-23 19:04:21 -07:00
|
|
|
}
|
2015-08-24 06:05:22 -07:00
|
|
|
private:
|
|
|
|
nsRefPtr<DecodedStream> mThis;
|
|
|
|
Method mMethod;
|
|
|
|
UniquePtr<DecodedStreamData> mData;
|
|
|
|
};
|
|
|
|
|
|
|
|
// Post a message to ensure |mData| is only updated on the worker thread.
|
|
|
|
// Note this must be done before MDSM's shutdown since dispatch could fail
|
|
|
|
// when the worker thread is shut down.
|
|
|
|
nsCOMPtr<nsIRunnable> r = new R(this, &DecodedStream::OnDataCreated, data);
|
|
|
|
mOwnerThread->Dispatch(r.forget());
|
2015-07-11 01:41:39 -07:00
|
|
|
}
|
|
|
|
|
2015-07-24 05:28:17 -07:00
|
|
|
bool
|
|
|
|
DecodedStream::HasConsumers() const
|
|
|
|
{
|
2015-08-17 19:58:13 -07:00
|
|
|
return !mOutputStreamManager.IsEmpty();
|
2015-07-24 05:28:17 -07:00
|
|
|
}
|
|
|
|
|
2015-08-24 06:05:22 -07:00
|
|
|
void
|
|
|
|
DecodedStream::OnDataCreated(UniquePtr<DecodedStreamData> aData)
|
|
|
|
{
|
|
|
|
AssertOwnerThread();
|
|
|
|
MOZ_ASSERT(!mData, "Already created.");
|
|
|
|
|
|
|
|
// Start to send data to the stream immediately
|
|
|
|
if (mStartTime.isSome()) {
|
|
|
|
aData->SetPlaying(mPlaying);
|
|
|
|
mData = Move(aData);
|
|
|
|
SendData();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Playback has ended. Destroy aData which is not needed anymore.
|
|
|
|
DestroyData(Move(aData));
|
|
|
|
}
|
|
|
|
|
2015-05-25 19:21:53 -07:00
|
|
|
void
|
2015-08-19 22:02:20 -07:00
|
|
|
DecodedStream::AddOutput(ProcessedMediaStream* aStream, bool aFinishWhenEnded)
|
2015-05-25 19:21:53 -07:00
|
|
|
{
|
2015-08-17 19:58:13 -07:00
|
|
|
mOutputStreamManager.Add(aStream, aFinishWhenEnded);
|
2015-05-25 19:21:53 -07:00
|
|
|
}
|
|
|
|
|
2015-07-05 20:34:47 -07:00
|
|
|
void
|
2015-08-19 22:02:20 -07:00
|
|
|
DecodedStream::RemoveOutput(MediaStream* aStream)
|
2015-07-05 20:34:47 -07:00
|
|
|
{
|
2015-08-17 19:58:13 -07:00
|
|
|
mOutputStreamManager.Remove(aStream);
|
2015-07-05 20:34:47 -07:00
|
|
|
}
|
|
|
|
|
2015-06-08 01:51:39 -07:00
|
|
|
void
|
|
|
|
DecodedStream::SetPlaying(bool aPlaying)
|
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-07-05 20:36:26 -07:00
|
|
|
mPlaying = aPlaying;
|
|
|
|
if (mData) {
|
|
|
|
mData->SetPlaying(aPlaying);
|
|
|
|
}
|
2015-06-08 01:51:39 -07:00
|
|
|
}
|
|
|
|
|
2015-08-06 19:29:31 -07:00
|
|
|
void
|
|
|
|
DecodedStream::SetVolume(double aVolume)
|
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-08-06 19:29:31 -07:00
|
|
|
mVolume = aVolume;
|
|
|
|
}
|
|
|
|
|
2015-08-12 18:22:59 -07:00
|
|
|
void
|
|
|
|
DecodedStream::SetSameOrigin(bool aSameOrigin)
|
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-08-12 18:22:59 -07:00
|
|
|
mSameOrigin = aSameOrigin;
|
|
|
|
}
|
|
|
|
|
2015-07-05 20:36:15 -07:00
|
|
|
void
|
2015-07-11 01:41:39 -07:00
|
|
|
DecodedStream::InitTracks()
|
2015-07-05 20:36:15 -07:00
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-07-05 20:36:15 -07:00
|
|
|
|
|
|
|
if (mData->mStreamInitialized) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
SourceMediaStream* sourceStream = mData->mStream;
|
|
|
|
|
2015-07-11 01:41:39 -07:00
|
|
|
if (mInfo.HasAudio()) {
|
|
|
|
TrackID audioTrackId = mInfo.mAudio.mTrackId;
|
2015-07-05 20:36:15 -07:00
|
|
|
AudioSegment* audio = new AudioSegment();
|
2015-07-11 01:41:39 -07:00
|
|
|
sourceStream->AddAudioTrack(audioTrackId, mInfo.mAudio.mRate, 0, audio,
|
2015-07-05 20:36:15 -07:00
|
|
|
SourceMediaStream::ADDTRACK_QUEUED);
|
2015-07-11 01:41:39 -07:00
|
|
|
mData->mNextAudioTime = mStartTime.ref();
|
2015-07-05 20:36:15 -07:00
|
|
|
}
|
|
|
|
|
2015-07-11 01:41:39 -07:00
|
|
|
if (mInfo.HasVideo()) {
|
|
|
|
TrackID videoTrackId = mInfo.mVideo.mTrackId;
|
2015-07-05 20:36:15 -07:00
|
|
|
VideoSegment* video = new VideoSegment();
|
|
|
|
sourceStream->AddTrack(videoTrackId, 0, video,
|
|
|
|
SourceMediaStream::ADDTRACK_QUEUED);
|
2015-07-11 01:41:39 -07:00
|
|
|
mData->mNextVideoTime = mStartTime.ref();
|
2015-07-05 20:36:15 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
sourceStream->FinishAddTracks();
|
|
|
|
mData->mStreamInitialized = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
SendStreamAudio(DecodedStreamData* aStream, int64_t aStartTime,
|
2015-07-27 09:21:33 -07:00
|
|
|
MediaData* aData, AudioSegment* aOutput,
|
2015-07-05 20:36:15 -07:00
|
|
|
uint32_t aRate, double aVolume)
|
|
|
|
{
|
2015-07-27 09:21:33 -07:00
|
|
|
MOZ_ASSERT(aData);
|
|
|
|
AudioData* audio = aData->As<AudioData>();
|
2015-07-05 20:36:15 -07:00
|
|
|
// This logic has to mimic AudioSink closely to make sure we write
|
|
|
|
// the exact same silences
|
|
|
|
CheckedInt64 audioWrittenOffset = aStream->mAudioFramesWritten +
|
|
|
|
UsecsToFrames(aStartTime, aRate);
|
2015-07-27 09:21:33 -07:00
|
|
|
CheckedInt64 frameOffset = UsecsToFrames(audio->mTime, aRate);
|
2015-07-05 20:36:15 -07:00
|
|
|
|
|
|
|
if (!audioWrittenOffset.isValid() ||
|
|
|
|
!frameOffset.isValid() ||
|
|
|
|
// ignore packet that we've already processed
|
2015-07-27 09:21:33 -07:00
|
|
|
frameOffset.value() + audio->mFrames <= audioWrittenOffset.value()) {
|
2015-07-05 20:36:15 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (audioWrittenOffset.value() < frameOffset.value()) {
|
|
|
|
int64_t silentFrames = frameOffset.value() - audioWrittenOffset.value();
|
|
|
|
// Write silence to catch up
|
|
|
|
AudioSegment silence;
|
|
|
|
silence.InsertNullDataAtStart(silentFrames);
|
|
|
|
aStream->mAudioFramesWritten += silentFrames;
|
|
|
|
audioWrittenOffset += silentFrames;
|
|
|
|
aOutput->AppendFrom(&silence);
|
|
|
|
}
|
|
|
|
|
|
|
|
MOZ_ASSERT(audioWrittenOffset.value() >= frameOffset.value());
|
|
|
|
|
|
|
|
int64_t offset = audioWrittenOffset.value() - frameOffset.value();
|
2015-07-27 09:21:33 -07:00
|
|
|
size_t framesToWrite = audio->mFrames - offset;
|
2015-07-05 20:36:15 -07:00
|
|
|
|
2015-07-27 09:21:33 -07:00
|
|
|
audio->EnsureAudioBuffer();
|
|
|
|
nsRefPtr<SharedBuffer> buffer = audio->mAudioBuffer;
|
2015-07-05 20:36:15 -07:00
|
|
|
AudioDataValue* bufferData = static_cast<AudioDataValue*>(buffer->Data());
|
|
|
|
nsAutoTArray<const AudioDataValue*, 2> channels;
|
2015-07-27 09:21:33 -07:00
|
|
|
for (uint32_t i = 0; i < audio->mChannels; ++i) {
|
|
|
|
channels.AppendElement(bufferData + i * audio->mFrames + offset);
|
2015-07-05 20:36:15 -07:00
|
|
|
}
|
|
|
|
aOutput->AppendFrames(buffer.forget(), channels, framesToWrite);
|
|
|
|
aStream->mAudioFramesWritten += framesToWrite;
|
|
|
|
aOutput->ApplyVolume(aVolume);
|
|
|
|
|
2015-07-27 09:21:33 -07:00
|
|
|
aStream->mNextAudioTime = audio->GetEndTime();
|
2015-07-05 20:36:15 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2015-07-11 01:41:39 -07:00
|
|
|
DecodedStream::SendAudio(double aVolume, bool aIsSameOrigin)
|
2015-07-05 20:36:15 -07:00
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-07-05 20:36:15 -07:00
|
|
|
|
2015-07-11 01:41:39 -07:00
|
|
|
if (!mInfo.HasAudio()) {
|
2015-07-05 20:36:15 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
AudioSegment output;
|
2015-07-11 01:41:39 -07:00
|
|
|
uint32_t rate = mInfo.mAudio.mRate;
|
2015-07-27 09:21:33 -07:00
|
|
|
nsAutoTArray<nsRefPtr<MediaData>,10> audio;
|
2015-07-11 01:41:39 -07:00
|
|
|
TrackID audioTrackId = mInfo.mAudio.mTrackId;
|
2015-07-05 20:36:15 -07:00
|
|
|
SourceMediaStream* sourceStream = mData->mStream;
|
|
|
|
|
|
|
|
// It's OK to hold references to the AudioData because AudioData
|
|
|
|
// is ref-counted.
|
2015-07-03 18:30:15 -07:00
|
|
|
mAudioQueue.GetElementsAfter(mData->mNextAudioTime, &audio);
|
2015-07-05 20:36:15 -07:00
|
|
|
for (uint32_t i = 0; i < audio.Length(); ++i) {
|
2015-07-11 01:41:39 -07:00
|
|
|
SendStreamAudio(mData.get(), mStartTime.ref(), audio[i], &output, rate, aVolume);
|
2015-07-05 20:36:15 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (!aIsSameOrigin) {
|
|
|
|
output.ReplaceWithDisabled();
|
|
|
|
}
|
|
|
|
|
|
|
|
// |mNextAudioTime| is updated as we process each audio sample in
|
|
|
|
// SendStreamAudio(). This is consistent with how |mNextVideoTime|
|
|
|
|
// is updated for video samples.
|
|
|
|
if (output.GetDuration() > 0) {
|
|
|
|
sourceStream->AppendToTrack(audioTrackId, &output);
|
|
|
|
}
|
|
|
|
|
2015-07-03 18:30:15 -07:00
|
|
|
if (mAudioQueue.IsFinished() && !mData->mHaveSentFinishAudio) {
|
2015-07-05 20:36:15 -07:00
|
|
|
sourceStream->EndTrack(audioTrackId);
|
|
|
|
mData->mHaveSentFinishAudio = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
WriteVideoToMediaStream(MediaStream* aStream,
|
|
|
|
layers::Image* aImage,
|
|
|
|
int64_t aEndMicroseconds,
|
|
|
|
int64_t aStartMicroseconds,
|
|
|
|
const mozilla::gfx::IntSize& aIntrinsicSize,
|
|
|
|
VideoSegment* aOutput)
|
|
|
|
{
|
|
|
|
nsRefPtr<layers::Image> image = aImage;
|
|
|
|
StreamTime duration =
|
|
|
|
aStream->MicrosecondsToStreamTimeRoundDown(aEndMicroseconds) -
|
|
|
|
aStream->MicrosecondsToStreamTimeRoundDown(aStartMicroseconds);
|
|
|
|
aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool
|
|
|
|
ZeroDurationAtLastChunk(VideoSegment& aInput)
|
|
|
|
{
|
|
|
|
// Get the last video frame's start time in VideoSegment aInput.
|
|
|
|
// If the start time is equal to the duration of aInput, means the last video
|
|
|
|
// frame's duration is zero.
|
|
|
|
StreamTime lastVideoStratTime;
|
|
|
|
aInput.GetLastFrame(&lastVideoStratTime);
|
|
|
|
return lastVideoStratTime == aInput.GetDuration();
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2015-07-11 01:41:39 -07:00
|
|
|
DecodedStream::SendVideo(bool aIsSameOrigin)
|
2015-07-05 20:36:15 -07:00
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-07-05 20:36:15 -07:00
|
|
|
|
2015-07-11 01:41:39 -07:00
|
|
|
if (!mInfo.HasVideo()) {
|
2015-07-05 20:36:15 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
VideoSegment output;
|
2015-07-11 01:41:39 -07:00
|
|
|
TrackID videoTrackId = mInfo.mVideo.mTrackId;
|
2015-07-27 09:21:33 -07:00
|
|
|
nsAutoTArray<nsRefPtr<MediaData>, 10> video;
|
2015-07-05 20:36:15 -07:00
|
|
|
SourceMediaStream* sourceStream = mData->mStream;
|
|
|
|
|
|
|
|
// It's OK to hold references to the VideoData because VideoData
|
|
|
|
// is ref-counted.
|
2015-07-03 18:30:15 -07:00
|
|
|
mVideoQueue.GetElementsAfter(mData->mNextVideoTime, &video);
|
2015-07-05 20:36:15 -07:00
|
|
|
|
|
|
|
for (uint32_t i = 0; i < video.Length(); ++i) {
|
2015-07-27 09:21:33 -07:00
|
|
|
VideoData* v = video[i]->As<VideoData>();
|
2015-07-05 20:36:15 -07:00
|
|
|
|
|
|
|
if (mData->mNextVideoTime < v->mTime) {
|
|
|
|
// Write last video frame to catch up. mLastVideoImage can be null here
|
|
|
|
// which is fine, it just means there's no video.
|
|
|
|
|
|
|
|
// TODO: |mLastVideoImage| should come from the last image rendered
|
|
|
|
// by the state machine. This will avoid the black frame when capture
|
|
|
|
// happens in the middle of playback (especially in th middle of a
|
|
|
|
// video frame). E.g. if we have a video frame that is 30 sec long
|
|
|
|
// and capture happens at 15 sec, we'll have to append a black frame
|
|
|
|
// that is 15 sec long.
|
|
|
|
WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage, v->mTime,
|
|
|
|
mData->mNextVideoTime, mData->mLastVideoImageDisplaySize, &output);
|
|
|
|
mData->mNextVideoTime = v->mTime;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mData->mNextVideoTime < v->GetEndTime()) {
|
|
|
|
WriteVideoToMediaStream(sourceStream, v->mImage,
|
|
|
|
v->GetEndTime(), mData->mNextVideoTime, v->mDisplay, &output);
|
|
|
|
mData->mNextVideoTime = v->GetEndTime();
|
|
|
|
mData->mLastVideoImage = v->mImage;
|
|
|
|
mData->mLastVideoImageDisplaySize = v->mDisplay;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check the output is not empty.
|
|
|
|
if (output.GetLastFrame()) {
|
|
|
|
mData->mEOSVideoCompensation = ZeroDurationAtLastChunk(output);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!aIsSameOrigin) {
|
|
|
|
output.ReplaceWithDisabled();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (output.GetDuration() > 0) {
|
|
|
|
sourceStream->AppendToTrack(videoTrackId, &output);
|
|
|
|
}
|
|
|
|
|
2015-07-03 18:30:15 -07:00
|
|
|
if (mVideoQueue.IsFinished() && !mData->mHaveSentFinishVideo) {
|
2015-07-05 20:36:15 -07:00
|
|
|
if (mData->mEOSVideoCompensation) {
|
|
|
|
VideoSegment endSegment;
|
|
|
|
// Calculate the deviation clock time from DecodedStream.
|
|
|
|
int64_t deviation_usec = sourceStream->StreamTimeToMicroseconds(1);
|
|
|
|
WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage,
|
|
|
|
mData->mNextVideoTime + deviation_usec, mData->mNextVideoTime,
|
|
|
|
mData->mLastVideoImageDisplaySize, &endSegment);
|
|
|
|
mData->mNextVideoTime += deviation_usec;
|
|
|
|
MOZ_ASSERT(endSegment.GetDuration() > 0);
|
|
|
|
if (!aIsSameOrigin) {
|
|
|
|
endSegment.ReplaceWithDisabled();
|
|
|
|
}
|
|
|
|
sourceStream->AppendToTrack(videoTrackId, &endSegment);
|
|
|
|
}
|
|
|
|
sourceStream->EndTrack(videoTrackId);
|
|
|
|
mData->mHaveSentFinishVideo = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2015-07-11 01:41:39 -07:00
|
|
|
DecodedStream::AdvanceTracks()
|
2015-07-05 20:36:15 -07:00
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-07-05 20:36:15 -07:00
|
|
|
|
|
|
|
StreamTime endPosition = 0;
|
|
|
|
|
2015-07-11 01:41:39 -07:00
|
|
|
if (mInfo.HasAudio()) {
|
2015-07-05 20:36:15 -07:00
|
|
|
StreamTime audioEnd = mData->mStream->TicksToTimeRoundDown(
|
2015-07-11 01:41:39 -07:00
|
|
|
mInfo.mAudio.mRate, mData->mAudioFramesWritten);
|
2015-07-05 20:36:15 -07:00
|
|
|
endPosition = std::max(endPosition, audioEnd);
|
|
|
|
}
|
|
|
|
|
2015-07-11 01:41:39 -07:00
|
|
|
if (mInfo.HasVideo()) {
|
2015-07-05 20:36:15 -07:00
|
|
|
StreamTime videoEnd = mData->mStream->MicrosecondsToStreamTimeRoundDown(
|
2015-07-11 01:41:39 -07:00
|
|
|
mData->mNextVideoTime - mStartTime.ref());
|
2015-07-05 20:36:15 -07:00
|
|
|
endPosition = std::max(endPosition, videoEnd);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!mData->mHaveSentFinish) {
|
|
|
|
mData->mStream->AdvanceKnownTracksTime(endPosition);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-08-16 16:52:28 -07:00
|
|
|
void
|
2015-08-12 18:22:59 -07:00
|
|
|
DecodedStream::SendData()
|
2015-07-05 20:36:15 -07:00
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-07-11 01:41:39 -07:00
|
|
|
MOZ_ASSERT(mStartTime.isSome(), "Must be called after StartPlayback()");
|
2015-07-05 20:36:15 -07:00
|
|
|
|
2015-08-19 22:02:43 -07:00
|
|
|
// Not yet created on the main thread. MDSM will try again later.
|
|
|
|
if (!mData) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2015-08-16 16:52:28 -07:00
|
|
|
// Nothing to do when the stream is finished.
|
|
|
|
if (mData->mHaveSentFinish) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2015-07-11 01:41:39 -07:00
|
|
|
InitTracks();
|
2015-08-12 18:22:59 -07:00
|
|
|
SendAudio(mVolume, mSameOrigin);
|
|
|
|
SendVideo(mSameOrigin);
|
2015-07-11 01:41:39 -07:00
|
|
|
AdvanceTracks();
|
2015-07-05 20:36:15 -07:00
|
|
|
|
2015-07-11 01:41:39 -07:00
|
|
|
bool finished = (!mInfo.HasAudio() || mAudioQueue.IsFinished()) &&
|
|
|
|
(!mInfo.HasVideo() || mVideoQueue.IsFinished());
|
2015-07-05 20:36:15 -07:00
|
|
|
|
|
|
|
if (finished && !mData->mHaveSentFinish) {
|
|
|
|
mData->mHaveSentFinish = true;
|
|
|
|
mData->mStream->Finish();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-07-16 19:18:04 -07:00
|
|
|
int64_t
|
2015-07-11 01:41:39 -07:00
|
|
|
DecodedStream::AudioEndTime() const
|
2015-07-05 20:36:15 -07:00
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-08-19 22:02:43 -07:00
|
|
|
if (mStartTime.isSome() && mInfo.HasAudio() && mData) {
|
2015-07-16 19:18:04 -07:00
|
|
|
CheckedInt64 t = mStartTime.ref() +
|
|
|
|
FramesToUsecs(mData->mAudioFramesWritten, mInfo.mAudio.mRate);
|
|
|
|
if (t.isValid()) {
|
|
|
|
return t.value();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1;
|
2015-07-05 20:36:15 -07:00
|
|
|
}
|
|
|
|
|
2015-07-05 20:36:26 -07:00
|
|
|
int64_t
|
|
|
|
DecodedStream::GetPosition() const
|
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-07-11 02:20:28 -07:00
|
|
|
// This is only called after MDSM starts playback. So mStartTime is
|
|
|
|
// guaranteed to be something.
|
|
|
|
MOZ_ASSERT(mStartTime.isSome());
|
2015-08-19 22:02:43 -07:00
|
|
|
return mStartTime.ref() + (mData ? mData->GetPosition() : 0);
|
2015-07-05 20:36:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
DecodedStream::IsFinished() const
|
|
|
|
{
|
2015-08-23 18:55:30 -07:00
|
|
|
AssertOwnerThread();
|
2015-08-19 22:02:43 -07:00
|
|
|
return mData && mData->IsFinished();
|
2015-07-05 20:36:26 -07:00
|
|
|
}
|
|
|
|
|
2015-08-23 19:04:21 -07:00
|
|
|
void
|
|
|
|
DecodedStream::ConnectListener()
|
|
|
|
{
|
|
|
|
AssertOwnerThread();
|
|
|
|
|
|
|
|
mAudioPushListener = mAudioQueue.PushEvent().Connect(
|
|
|
|
mOwnerThread, this, &DecodedStream::SendData);
|
|
|
|
mAudioFinishListener = mAudioQueue.FinishEvent().Connect(
|
|
|
|
mOwnerThread, this, &DecodedStream::SendData);
|
|
|
|
mVideoPushListener = mVideoQueue.PushEvent().Connect(
|
|
|
|
mOwnerThread, this, &DecodedStream::SendData);
|
|
|
|
mVideoFinishListener = mVideoQueue.FinishEvent().Connect(
|
|
|
|
mOwnerThread, this, &DecodedStream::SendData);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
DecodedStream::DisconnectListener()
|
|
|
|
{
|
|
|
|
AssertOwnerThread();
|
|
|
|
|
|
|
|
mAudioPushListener.Disconnect();
|
|
|
|
mVideoPushListener.Disconnect();
|
|
|
|
mAudioFinishListener.Disconnect();
|
|
|
|
mVideoFinishListener.Disconnect();
|
|
|
|
}
|
|
|
|
|
2015-05-09 20:38:15 -07:00
|
|
|
} // namespace mozilla
|