Backed out 3 changesets (bug 1059058) for mochitest-1 leaks.

Backed out changeset 3a343c27fc7a (bug 1059058)
Backed out changeset 8808324ba834 (bug 1059058)
Backed out changeset c8f0afffca59 (bug 1059058)
This commit is contained in:
Ryan VanderMeulen 2014-09-03 13:55:55 -04:00
parent 0acb03a879
commit 5fc3d54717
11 changed files with 346 additions and 755 deletions

View File

@ -132,27 +132,6 @@ MediaSourceDecoder::CreateSubDecoder(const nsACString& aType)
return mReader->CreateSubDecoder(aType); return mReader->CreateSubDecoder(aType);
} }
void
MediaSourceDecoder::AddTrackBuffer(TrackBuffer* aTrackBuffer)
{
MOZ_ASSERT(mReader);
mReader->AddTrackBuffer(aTrackBuffer);
}
void
MediaSourceDecoder::RemoveTrackBuffer(TrackBuffer* aTrackBuffer)
{
MOZ_ASSERT(mReader);
mReader->RemoveTrackBuffer(aTrackBuffer);
}
void
MediaSourceDecoder::OnTrackBufferConfigured(TrackBuffer* aTrackBuffer)
{
MOZ_ASSERT(mReader);
mReader->OnTrackBufferConfigured(aTrackBuffer);
}
void void
MediaSourceDecoder::Ended() MediaSourceDecoder::Ended()
{ {

View File

@ -20,7 +20,6 @@ class MediaResource;
class MediaDecoderStateMachine; class MediaDecoderStateMachine;
class MediaSourceReader; class MediaSourceReader;
class SourceBufferDecoder; class SourceBufferDecoder;
class TrackBuffer;
namespace dom { namespace dom {
@ -47,9 +46,6 @@ public:
void DetachMediaSource(); void DetachMediaSource();
already_AddRefed<SourceBufferDecoder> CreateSubDecoder(const nsACString& aType); already_AddRefed<SourceBufferDecoder> CreateSubDecoder(const nsACString& aType);
void AddTrackBuffer(TrackBuffer* aTrackBuffer);
void RemoveTrackBuffer(TrackBuffer* aTrackBuffer);
void OnTrackBufferConfigured(TrackBuffer* aTrackBuffer);
void Ended(); void Ended();

View File

@ -14,7 +14,6 @@
#include "MediaSourceDecoder.h" #include "MediaSourceDecoder.h"
#include "MediaSourceUtils.h" #include "MediaSourceUtils.h"
#include "SourceBufferDecoder.h" #include "SourceBufferDecoder.h"
#include "TrackBuffer.h"
#ifdef MOZ_FMP4 #ifdef MOZ_FMP4
#include "MP4Decoder.h" #include "MP4Decoder.h"
@ -38,8 +37,6 @@ namespace mozilla {
MediaSourceReader::MediaSourceReader(MediaSourceDecoder* aDecoder) MediaSourceReader::MediaSourceReader(MediaSourceDecoder* aDecoder)
: MediaDecoderReader(aDecoder) : MediaDecoderReader(aDecoder)
, mLastAudioTime(-1)
, mLastVideoTime(-1)
, mTimeThreshold(-1) , mTimeThreshold(-1)
, mDropAudioBeforeThreshold(false) , mDropAudioBeforeThreshold(false)
, mDropVideoBeforeThreshold(false) , mDropVideoBeforeThreshold(false)
@ -52,35 +49,25 @@ MediaSourceReader::MediaSourceReader(MediaSourceDecoder* aDecoder)
bool bool
MediaSourceReader::IsWaitingMediaResources() MediaSourceReader::IsWaitingMediaResources()
{ {
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); return mDecoders.IsEmpty() && mPendingDecoders.IsEmpty();
for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
if (!mTrackBuffers[i]->HasInitSegment()) {
return true;
}
}
return mTrackBuffers.IsEmpty();
} }
void void
MediaSourceReader::RequestAudioData() MediaSourceReader::RequestAudioData()
{ {
MSE_DEBUGV("MediaSourceReader(%p)::RequestAudioData", this);
if (!mAudioReader) { if (!mAudioReader) {
MSE_DEBUG("MediaSourceReader(%p)::RequestAudioData called with no audio reader", this); MSE_DEBUG("MediaSourceReader(%p)::RequestAudioData called with no audio reader", this);
MOZ_ASSERT(mPendingDecoders.IsEmpty());
GetCallback()->OnDecodeError(); GetCallback()->OnDecodeError();
return; return;
} }
if (SwitchAudioReader(double(mLastAudioTime) / USECS_PER_S)) { SwitchReaders(SWITCH_OPTIONAL);
MSE_DEBUGV("MediaSourceReader(%p)::RequestAudioData switching audio reader", this);
}
mAudioReader->RequestAudioData(); mAudioReader->RequestAudioData();
} }
void void
MediaSourceReader::OnAudioDecoded(AudioData* aSample) MediaSourceReader::OnAudioDecoded(AudioData* aSample)
{ {
MSE_DEBUGV("MediaSourceReader(%p)::OnAudioDecoded mTime=%lld mDuration=%lld d=%d",
this, aSample->mTime, aSample->mDuration, aSample->mDiscontinuity);
if (mDropAudioBeforeThreshold) { if (mDropAudioBeforeThreshold) {
if (aSample->mTime < mTimeThreshold) { if (aSample->mTime < mTimeThreshold) {
MSE_DEBUG("MediaSourceReader(%p)::OnAudioDecoded mTime=%lld < mTimeThreshold=%lld", MSE_DEBUG("MediaSourceReader(%p)::OnAudioDecoded mTime=%lld < mTimeThreshold=%lld",
@ -99,22 +86,21 @@ MediaSourceReader::OnAudioDecoded(AudioData* aSample)
mAudioIsSeeking = false; mAudioIsSeeking = false;
aSample->mDiscontinuity = true; aSample->mDiscontinuity = true;
} }
mLastAudioTime = aSample->mTime + aSample->mDuration;
GetCallback()->OnAudioDecoded(aSample); GetCallback()->OnAudioDecoded(aSample);
} }
void void
MediaSourceReader::OnAudioEOS() MediaSourceReader::OnAudioEOS()
{ {
MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p (decoders=%u)", MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p (readers=%u)",
this, mAudioReader.get(), mAudioTrack->Decoders().Length()); this, mAudioReader.get(), mDecoders.Length());
if (SwitchAudioReader(double(mLastAudioTime) / USECS_PER_S)) { if (SwitchReaders(SWITCH_FORCED)) {
// Success! Resume decoding with next audio decoder. // Success! Resume decoding with next audio decoder.
RequestAudioData(); RequestAudioData();
} else if (IsEnded()) { } else if (IsEnded()) {
// End of stream. // End of stream.
MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p EOS (decoders=%u)", MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p EOS (readers=%u)",
this, mAudioReader.get(), mAudioTrack->Decoders().Length()); this, mAudioReader.get(), mDecoders.Length());
GetCallback()->OnAudioEOS(); GetCallback()->OnAudioEOS();
} }
} }
@ -122,35 +108,26 @@ MediaSourceReader::OnAudioEOS()
void void
MediaSourceReader::RequestVideoData(bool aSkipToNextKeyframe, int64_t aTimeThreshold) MediaSourceReader::RequestVideoData(bool aSkipToNextKeyframe, int64_t aTimeThreshold)
{ {
MSE_DEBUGV("MediaSourceReader(%p)::RequestVideoData(%d, %lld)",
this, aSkipToNextKeyframe, aTimeThreshold);
if (!mVideoReader) { if (!mVideoReader) {
MSE_DEBUG("MediaSourceReader(%p)::RequestVideoData called with no video reader", this); MSE_DEBUG("MediaSourceReader(%p)::RequestVideoData called with no video reader", this);
MOZ_ASSERT(mPendingDecoders.IsEmpty());
GetCallback()->OnDecodeError(); GetCallback()->OnDecodeError();
return; return;
} }
if (aSkipToNextKeyframe) {
mTimeThreshold = aTimeThreshold; mTimeThreshold = aTimeThreshold;
mDropAudioBeforeThreshold = true; SwitchReaders(SWITCH_OPTIONAL);
mDropVideoBeforeThreshold = true;
}
if (SwitchVideoReader(double(mLastVideoTime) / USECS_PER_S)) {
MSE_DEBUGV("MediaSourceReader(%p)::RequestVideoData switching video reader", this);
}
mVideoReader->RequestVideoData(aSkipToNextKeyframe, aTimeThreshold); mVideoReader->RequestVideoData(aSkipToNextKeyframe, aTimeThreshold);
} }
void void
MediaSourceReader::OnVideoDecoded(VideoData* aSample) MediaSourceReader::OnVideoDecoded(VideoData* aSample)
{ {
MSE_DEBUGV("MediaSourceReader(%p)::OnVideoDecoded mTime=%lld mDuration=%lld d=%d",
this, aSample->mTime, aSample->mDuration, aSample->mDiscontinuity);
if (mDropVideoBeforeThreshold) { if (mDropVideoBeforeThreshold) {
if (aSample->mTime < mTimeThreshold) { if (aSample->mTime < mTimeThreshold) {
MSE_DEBUG("MediaSourceReader(%p)::OnVideoDecoded mTime=%lld < mTimeThreshold=%lld", MSE_DEBUG("MediaSourceReader(%p)::OnVideoDecoded mTime=%lld < mTimeThreshold=%lld",
this, aSample->mTime, mTimeThreshold); this, aSample->mTime, mTimeThreshold);
delete aSample; delete aSample;
mVideoReader->RequestVideoData(false, 0); mVideoReader->RequestVideoData(false, mTimeThreshold);
return; return;
} }
mDropVideoBeforeThreshold = false; mDropVideoBeforeThreshold = false;
@ -163,7 +140,7 @@ MediaSourceReader::OnVideoDecoded(VideoData* aSample)
mVideoIsSeeking = false; mVideoIsSeeking = false;
aSample->mDiscontinuity = true; aSample->mDiscontinuity = true;
} }
mLastVideoTime = aSample->mTime + aSample->mDuration;
GetCallback()->OnVideoDecoded(aSample); GetCallback()->OnVideoDecoded(aSample);
} }
@ -171,15 +148,15 @@ void
MediaSourceReader::OnVideoEOS() MediaSourceReader::OnVideoEOS()
{ {
// End of stream. See if we can switch to another video decoder. // End of stream. See if we can switch to another video decoder.
MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p (decoders=%u)", MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p (readers=%u)",
this, mVideoReader.get(), mVideoTrack->Decoders().Length()); this, mVideoReader.get(), mDecoders.Length());
if (SwitchVideoReader(double(mLastVideoTime) / USECS_PER_S)) { if (SwitchReaders(SWITCH_FORCED)) {
// Success! Resume decoding with next video decoder. // Success! Resume decoding with next video decoder.
RequestVideoData(false, 0); RequestVideoData(false, mTimeThreshold);
} else if (IsEnded()) { } else if (IsEnded()) {
// End of stream. // End of stream.
MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p EOS (decoders=%u)", MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p EOS (readers=%u)",
this, mVideoReader.get(), mVideoTrack->Decoders().Length()); this, mVideoReader.get(), mDecoders.Length());
GetCallback()->OnVideoEOS(); GetCallback()->OnVideoEOS();
} }
} }
@ -187,7 +164,6 @@ MediaSourceReader::OnVideoEOS()
void void
MediaSourceReader::OnDecodeError() MediaSourceReader::OnDecodeError()
{ {
MSE_DEBUG("MediaSourceReader(%p)::OnDecodeError", this);
GetCallback()->OnDecodeError(); GetCallback()->OnDecodeError();
} }
@ -195,97 +171,171 @@ void
MediaSourceReader::Shutdown() MediaSourceReader::Shutdown()
{ {
MediaDecoderReader::Shutdown(); MediaDecoderReader::Shutdown();
for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) { for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
mTrackBuffers[i]->Shutdown(); mDecoders[i]->GetReader()->Shutdown();
} }
mTrackBuffers.Clear();
} }
void void
MediaSourceReader::BreakCycles() MediaSourceReader::BreakCycles()
{ {
MediaDecoderReader::BreakCycles(); MediaDecoderReader::BreakCycles();
for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) { for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
mTrackBuffers[i]->BreakCycles(); mDecoders[i]->GetReader()->BreakCycles();
} }
mTrackBuffers.Clear();
} }
bool bool
MediaSourceReader::SwitchAudioReader(double aTarget) MediaSourceReader::SwitchAudioReader(MediaDecoderReader* aTargetReader)
{ {
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); if (aTargetReader == mAudioReader) {
// XXX: Can't handle adding an audio track after ReadMetadata yet.
if (!mAudioTrack) {
return false; return false;
} }
auto& decoders = mAudioTrack->Decoders(); if (mAudioReader) {
for (uint32_t i = 0; i < decoders.Length(); ++i) { AudioInfo targetInfo = aTargetReader->GetMediaInfo().mAudio;
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
decoders[i]->GetBuffered(ranges);
MediaDecoderReader* newReader = decoders[i]->GetReader();
MSE_DEBUGV("MediaDecoderReader(%p)::SwitchAudioReader(%f) audioReader=%p reader=%p ranges=%s",
this, aTarget, mAudioReader.get(), newReader, DumpTimeRanges(ranges).get());
AudioInfo targetInfo = newReader->GetMediaInfo().mAudio;
AudioInfo currentInfo = mAudioReader->GetMediaInfo().mAudio; AudioInfo currentInfo = mAudioReader->GetMediaInfo().mAudio;
// TODO: We can't handle switching audio formats yet. // TODO: We can't handle switching audio formats yet.
if (currentInfo.mRate != targetInfo.mRate || if (currentInfo.mRate != targetInfo.mRate ||
currentInfo.mChannels != targetInfo.mChannels) { currentInfo.mChannels != targetInfo.mChannels) {
continue; return false;
} }
if (ranges->Find(aTarget) != dom::TimeRanges::NoIndex) {
if (newReader->AudioQueue().AtEndOfStream()) {
continue;
}
if (mAudioReader) {
mAudioReader->SetIdle(); mAudioReader->SetIdle();
} }
mAudioReader = newReader; mAudioReader = aTargetReader;
MSE_DEBUG("MediaDecoderReader(%p)::SwitchAudioReader(%f) switching to audio reader %p", mDropAudioBeforeThreshold = true;
this, aTarget, mAudioReader.get()); MSE_DEBUG("MediaDecoderReader(%p)::SwitchReaders(%p) switching audio reader",
this, mAudioReader.get());
return true; return true;
} }
}
return false;
}
bool bool
MediaSourceReader::SwitchVideoReader(double aTarget) MediaSourceReader::SwitchVideoReader(MediaDecoderReader* aTargetReader)
{ {
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); if (aTargetReader == mVideoReader) {
// XXX: Can't handle adding a video track after ReadMetadata yet.
if (!mVideoTrack) {
return false; return false;
} }
auto& decoders = mVideoTrack->Decoders();
for (uint32_t i = 0; i < decoders.Length(); ++i) {
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
decoders[i]->GetBuffered(ranges);
MediaDecoderReader* newReader = decoders[i]->GetReader();
MSE_DEBUGV("MediaDecoderReader(%p)::SwitchVideoReader(%f) videoReader=%p reader=%p ranges=%s",
this, aTarget, mVideoReader.get(), newReader, DumpTimeRanges(ranges).get());
if (ranges->Find(aTarget) != dom::TimeRanges::NoIndex) {
if (newReader->VideoQueue().AtEndOfStream()) {
continue;
}
if (mVideoReader) { if (mVideoReader) {
mVideoReader->SetIdle(); mVideoReader->SetIdle();
} }
mVideoReader = newReader; mVideoReader = aTargetReader;
MSE_DEBUG("MediaDecoderReader(%p)::SwitchVideoReader(%f) switching to video reader %p", mDropVideoBeforeThreshold = true;
this, aTarget, mVideoReader.get()); MSE_DEBUG("MediaDecoderReader(%p)::SwitchVideoReader(%p) switching video reader",
this, mVideoReader.get());
return true; return true;
} }
bool
MediaSourceReader::SwitchReaders(SwitchType aType)
{
InitializePendingDecoders();
// This monitor must be held after the call to InitializePendingDecoders
// as that method also obtains the lock, and then attempts to exit it
// to call ReadMetadata on the readers. If we hold it before the call then
// it remains held during the ReadMetadata call causing a deadlock.
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
bool didSwitch = false;
double decodeTarget = double(mTimeThreshold) / USECS_PER_S;
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
SourceBufferDecoder* decoder = mDecoders[i];
const MediaInfo& info = decoder->GetReader()->GetMediaInfo();
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
decoder->GetBuffered(ranges);
MSE_DEBUGV("MediaDecoderReader(%p)::SwitchReaders(%d) decoder=%u (%p) discarded=%d"
" reader=%p audioReader=%p videoReader=%p"
" hasAudio=%d hasVideo=%d decodeTarget=%f ranges=%s",
this, aType, i, decoder, decoder->IsDiscarded(),
decoder->GetReader(), mAudioReader.get(), mVideoReader.get(),
info.HasAudio(), info.HasVideo(), decodeTarget,
DumpTimeRanges(ranges).get());
if (decoder->IsDiscarded()) {
continue;
}
if (aType == SWITCH_FORCED || ranges->Find(decodeTarget) != dom::TimeRanges::NoIndex) {
if (info.HasAudio()) {
didSwitch |= SwitchAudioReader(mDecoders[i]->GetReader());
}
if (info.HasVideo()) {
didSwitch |= SwitchVideoReader(mDecoders[i]->GetReader());
}
}
} }
return false; return didSwitch;
}
class ReleaseDecodersTask : public nsRunnable {
public:
explicit ReleaseDecodersTask(nsTArray<nsRefPtr<SourceBufferDecoder>>& aDecoders)
{
mDecoders.SwapElements(aDecoders);
}
NS_IMETHOD Run() MOZ_OVERRIDE MOZ_FINAL {
mDecoders.Clear();
return NS_OK;
}
private:
nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
};
void
MediaSourceReader::InitializePendingDecoders()
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
for (uint32_t i = 0; i < mPendingDecoders.Length(); ++i) {
nsRefPtr<SourceBufferDecoder> decoder = mPendingDecoders[i];
MediaDecoderReader* reader = decoder->GetReader();
MSE_DEBUG("MediaSourceReader(%p): Initializing subdecoder %p reader %p",
this, decoder.get(), reader);
MediaInfo mi;
nsAutoPtr<MetadataTags> tags; // TODO: Handle metadata.
nsresult rv;
{
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
rv = reader->ReadMetadata(&mi, getter_Transfers(tags));
}
reader->SetIdle();
if (NS_FAILED(rv)) {
// XXX: Need to signal error back to owning SourceBuffer.
MSE_DEBUG("MediaSourceReader(%p): Reader %p failed to initialize rv=%x", this, reader, rv);
continue;
}
bool active = false;
if (mi.HasVideo() || mi.HasAudio()) {
MSE_DEBUG("MediaSourceReader(%p): Reader %p has video=%d audio=%d",
this, reader, mi.HasVideo(), mi.HasAudio());
if (mi.HasVideo()) {
MSE_DEBUG("MediaSourceReader(%p): Reader %p video resolution=%dx%d",
this, reader, mi.mVideo.mDisplay.width, mi.mVideo.mDisplay.height);
}
if (mi.HasAudio()) {
MSE_DEBUG("MediaSourceReader(%p): Reader %p audio sampleRate=%d channels=%d",
this, reader, mi.mAudio.mRate, mi.mAudio.mChannels);
}
active = true;
}
if (active) {
mDecoders.AppendElement(decoder);
} else {
MSE_DEBUG("MediaSourceReader(%p): Reader %p not activated", this, reader);
}
}
NS_DispatchToMainThread(new ReleaseDecodersTask(mPendingDecoders));
MOZ_ASSERT(mPendingDecoders.IsEmpty());
mDecoder->NotifyWaitingForResourcesStatusChanged();
} }
MediaDecoderReader* MediaDecoderReader*
@ -326,51 +376,22 @@ MediaSourceReader::CreateSubDecoder(const nsACString& aType)
reader->SetCallback(callback); reader->SetCallback(callback);
reader->SetTaskQueue(GetTaskQueue()); reader->SetTaskQueue(GetTaskQueue());
reader->Init(nullptr); reader->Init(nullptr);
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
MSE_DEBUG("MediaSourceReader(%p)::CreateSubDecoder subdecoder %p subreader %p", MSE_DEBUG("MediaSourceReader(%p)::CreateSubDecoder subdecoder %p subreader %p",
this, decoder.get(), reader.get()); this, decoder.get(), reader.get());
decoder->SetReader(reader); decoder->SetReader(reader);
mPendingDecoders.AppendElement(decoder);
RefPtr<nsIRunnable> task =
NS_NewRunnableMethod(this, &MediaSourceReader::InitializePendingDecoders);
if (NS_FAILED(GetTaskQueue()->Dispatch(task))) {
MSE_DEBUG("MediaSourceReader(%p): Failed to enqueue decoder initialization task", this);
return nullptr;
}
mDecoder->NotifyWaitingForResourcesStatusChanged();
return decoder.forget(); return decoder.forget();
} }
void namespace {
MediaSourceReader::AddTrackBuffer(TrackBuffer* aTrackBuffer)
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
MSE_DEBUG("MediaSourceReader(%p)::AddTrackBuffer %p", this, aTrackBuffer);
mTrackBuffers.AppendElement(aTrackBuffer);
}
void
MediaSourceReader::RemoveTrackBuffer(TrackBuffer* aTrackBuffer)
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
MSE_DEBUG("MediaSourceReader(%p)::RemoveTrackBuffer %p", this, aTrackBuffer);
mTrackBuffers.RemoveElement(aTrackBuffer);
if (mAudioTrack == aTrackBuffer) {
mAudioTrack = nullptr;
}
if (mVideoTrack == aTrackBuffer) {
mVideoTrack = nullptr;
}
}
void
MediaSourceReader::OnTrackBufferConfigured(TrackBuffer* aTrackBuffer)
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
MOZ_ASSERT(mTrackBuffers.Contains(aTrackBuffer));
if (aTrackBuffer->HasAudio() && !mAudioTrack) {
MSE_DEBUG("MediaSourceReader(%p)::OnTrackBufferConfigured %p audio", this, aTrackBuffer);
mAudioTrack = aTrackBuffer;
}
if (aTrackBuffer->HasVideo() && !mVideoTrack) {
MSE_DEBUG("MediaSourceReader(%p)::OnTrackBufferConfigured %p video", this, aTrackBuffer);
mVideoTrack = aTrackBuffer;
}
mDecoder->NotifyWaitingForResourcesStatusChanged();
}
class ChangeToHaveMetadata : public nsRunnable { class ChangeToHaveMetadata : public nsRunnable {
public: public:
explicit ChangeToHaveMetadata(AbstractMediaDecoder* aDecoder) : explicit ChangeToHaveMetadata(AbstractMediaDecoder* aDecoder) :
@ -389,18 +410,23 @@ public:
private: private:
nsRefPtr<AbstractMediaDecoder> mDecoder; nsRefPtr<AbstractMediaDecoder> mDecoder;
}; };
}
bool bool
MediaSourceReader::TrackBuffersContainTime(double aTime) MediaSourceReader::DecodersContainTime(double aTime)
{ {
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); bool found = false;
if (mAudioTrack && !mAudioTrack->ContainsTime(aTime)) {
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
if (!mDecoders[i]->IsDiscarded()) {
if (!mDecoders[i]->ContainsTime(aTime)) {
// No use to continue searching, one source buffer isn't ready yet
return false; return false;
} }
if (mVideoTrack && !mVideoTrack->ContainsTime(aTime)) { found = true;
return false;
} }
return true; }
return found;
} }
nsresult nsresult
@ -409,17 +435,8 @@ MediaSourceReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
{ {
MSE_DEBUG("MediaSourceReader(%p)::Seek(aTime=%lld, aStart=%lld, aEnd=%lld, aCurrent=%lld)", MSE_DEBUG("MediaSourceReader(%p)::Seek(aTime=%lld, aStart=%lld, aEnd=%lld, aCurrent=%lld)",
this, aTime, aStartTime, aEndTime, aCurrentTime); this, aTime, aStartTime, aEndTime, aCurrentTime);
ResetDecode();
for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
mTrackBuffers[i]->ResetDecode();
}
// Decoding discontinuity upon seek, reset last times to seek target.
mLastAudioTime = aTime;
mLastVideoTime = aTime;
double target = static_cast<double>(aTime) / USECS_PER_S; double target = static_cast<double>(aTime) / USECS_PER_S;
if (!TrackBuffersContainTime(target)) { if (!DecodersContainTime(target)) {
MSE_DEBUG("MediaSourceReader(%p)::Seek no active buffer contains target=%f", this, target); MSE_DEBUG("MediaSourceReader(%p)::Seek no active buffer contains target=%f", this, target);
NS_DispatchToMainThread(new ChangeToHaveMetadata(mDecoder)); NS_DispatchToMainThread(new ChangeToHaveMetadata(mDecoder));
} }
@ -427,30 +444,28 @@ MediaSourceReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
// Loop until we have the requested time range in the source buffers. // Loop until we have the requested time range in the source buffers.
// This is a workaround for our lack of async functionality in the // This is a workaround for our lack of async functionality in the
// MediaDecoderStateMachine. Bug 979104 implements what we need and // MediaDecoderStateMachine. Bug 979104 implements what we need and
// we'll remove this for an async approach based on that in bug 1056441. // we'll remove this for an async approach based on that in bug XXXXXXX.
while (!TrackBuffersContainTime(target) && !IsShutdown() && !IsEnded()) { while (!DecodersContainTime(target) && !IsShutdown() && !IsEnded()) {
MSE_DEBUG("MediaSourceReader(%p)::Seek waiting for target=%f", this, target); MSE_DEBUG("MediaSourceReader(%p)::Seek waiting for target=%f", this, target);
static_cast<MediaSourceDecoder*>(mDecoder)->WaitForData(); static_cast<MediaSourceDecoder*>(mDecoder)->WaitForData();
SwitchReaders(SWITCH_FORCED);
} }
if (IsShutdown()) { if (IsShutdown()) {
return NS_ERROR_FAILURE; return NS_ERROR_FAILURE;
} }
if (mAudioTrack) { ResetDecode();
if (mAudioReader) {
mAudioIsSeeking = true; mAudioIsSeeking = true;
DebugOnly<bool> ok = SwitchAudioReader(target);
MOZ_ASSERT(ok && static_cast<SourceBufferDecoder*>(mAudioReader->GetDecoder())->ContainsTime(target));
nsresult rv = mAudioReader->Seek(aTime, aStartTime, aEndTime, aCurrentTime); nsresult rv = mAudioReader->Seek(aTime, aStartTime, aEndTime, aCurrentTime);
MSE_DEBUG("MediaSourceReader(%p)::Seek audio reader=%p rv=%x", this, mAudioReader.get(), rv); MSE_DEBUG("MediaSourceReader(%p)::Seek audio reader=%p rv=%x", this, mAudioReader.get(), rv);
if (NS_FAILED(rv)) { if (NS_FAILED(rv)) {
return rv; return rv;
} }
} }
if (mVideoTrack) { if (mVideoReader) {
mVideoIsSeeking = true; mVideoIsSeeking = true;
DebugOnly<bool> ok = SwitchVideoReader(target);
MOZ_ASSERT(ok && static_cast<SourceBufferDecoder*>(mVideoReader->GetDecoder())->ContainsTime(target));
nsresult rv = mVideoReader->Seek(aTime, aStartTime, aEndTime, aCurrentTime); nsresult rv = mVideoReader->Seek(aTime, aStartTime, aEndTime, aCurrentTime);
MSE_DEBUG("MediaSourceReader(%p)::Seek video reader=%p rv=%x", this, mVideoReader.get(), rv); MSE_DEBUG("MediaSourceReader(%p)::Seek video reader=%p rv=%x", this, mVideoReader.get(), rv);
if (NS_FAILED(rv)) { if (NS_FAILED(rv)) {
@ -463,41 +478,39 @@ MediaSourceReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
nsresult nsresult
MediaSourceReader::ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) MediaSourceReader::ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags)
{ {
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata tracks=%u", this, mTrackBuffers.Length()); InitializePendingDecoders();
// ReadMetadata is called *before* checking IsWaitingMediaResources.
if (IsWaitingMediaResources()) {
return NS_OK;
}
if (!mAudioTrack && !mVideoTrack) {
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata missing track: mAudioTrack=%p mVideoTrack=%p",
this, mAudioTrack.get(), mVideoTrack.get());
return NS_ERROR_FAILURE;
}
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata decoders=%u", this, mDecoders.Length());
// XXX: Make subdecoder setup async, so that use cases like bug 989888 can
// work. This will require teaching the state machine about dynamic track
// changes (and multiple tracks).
// Shorter term, make this block until we've got at least one video track
// and lie about having an audio track, then resample/remix as necessary
// to match any audio track added later to fit the format we lied about
// now. For now we just configure what we've got and cross our fingers.
int64_t maxDuration = -1; int64_t maxDuration = -1;
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
MediaDecoderReader* reader = mDecoders[i]->GetReader();
if (mAudioTrack) { MediaInfo mi = reader->GetMediaInfo();
MOZ_ASSERT(mAudioTrack->HasInitSegment());
mAudioReader = mAudioTrack->Decoders()[0]->GetReader();
const MediaInfo& info = mAudioReader->GetMediaInfo(); if (mi.HasVideo() && !mInfo.HasVideo()) {
MOZ_ASSERT(info.HasAudio()); MOZ_ASSERT(!mVideoReader);
mInfo.mAudio = info.mAudio; mVideoReader = reader;
maxDuration = std::max(maxDuration, mAudioReader->GetDecoder()->GetMediaDuration()); mInfo.mVideo = mi.mVideo;
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata audio reader=%p maxDuration=%lld", maxDuration = std::max(maxDuration, mDecoders[i]->GetMediaDuration());
this, mAudioReader.get(), maxDuration);
}
if (mVideoTrack) {
MOZ_ASSERT(mVideoTrack->HasInitSegment());
mVideoReader = mVideoTrack->Decoders()[0]->GetReader();
const MediaInfo& info = mVideoReader->GetMediaInfo();
MOZ_ASSERT(info.HasVideo());
mInfo.mVideo = info.mVideo;
maxDuration = std::max(maxDuration, mVideoReader->GetDecoder()->GetMediaDuration());
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata video reader=%p maxDuration=%lld", MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata video reader=%p maxDuration=%lld",
this, mVideoReader.get(), maxDuration); this, reader, maxDuration);
}
if (mi.HasAudio() && !mInfo.HasAudio()) {
MOZ_ASSERT(!mAudioReader);
mAudioReader = reader;
mInfo.mAudio = mi.mAudio;
maxDuration = std::max(maxDuration, mDecoders[i]->GetMediaDuration());
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata audio reader=%p maxDuration=%lld",
this, reader, maxDuration);
}
} }
if (maxDuration != -1) { if (maxDuration != -1) {

View File

@ -19,7 +19,6 @@ namespace mozilla {
class MediaSourceDecoder; class MediaSourceDecoder;
class SourceBufferDecoder; class SourceBufferDecoder;
class TrackBuffer;
namespace dom { namespace dom {
@ -71,25 +70,22 @@ public:
nsresult ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) MOZ_OVERRIDE; nsresult ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) MOZ_OVERRIDE;
nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
int64_t aCurrentTime) MOZ_OVERRIDE; int64_t aCurrentTime) MOZ_OVERRIDE;
already_AddRefed<SourceBufferDecoder> CreateSubDecoder(const nsACString& aType); already_AddRefed<SourceBufferDecoder> CreateSubDecoder(const nsACString& aType);
void AddTrackBuffer(TrackBuffer* aTrackBuffer);
void RemoveTrackBuffer(TrackBuffer* aTrackBuffer);
void OnTrackBufferConfigured(TrackBuffer* aTrackBuffer);
void Shutdown(); void Shutdown();
virtual void BreakCycles(); virtual void BreakCycles();
void InitializePendingDecoders();
bool IsShutdown() bool IsShutdown()
{ {
ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor()); ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
return mDecoder->IsShutdown(); return mDecoder->IsShutdown();
} }
// Return true if all of the active tracks contain data for the specified time. // Return true if any of the active decoders contain data for the given time
bool TrackBuffersContainTime(double aTime); bool DecodersContainTime(double aTime);
// Mark the reader to indicate that EndOfStream has been called on our MediaSource // Mark the reader to indicate that EndOfStream has been called on our MediaSource
void Ended(); void Ended();
@ -98,24 +94,27 @@ public:
bool IsEnded(); bool IsEnded();
private: private:
bool SwitchAudioReader(double aTarget); enum SwitchType {
bool SwitchVideoReader(double aTarget); SWITCH_OPTIONAL,
SWITCH_FORCED
};
nsRefPtr<MediaDecoderReader> mAudioReader; bool SwitchReaders(SwitchType aType);
nsRefPtr<MediaDecoderReader> mVideoReader;
nsTArray<nsRefPtr<TrackBuffer>> mTrackBuffers; bool SwitchAudioReader(MediaDecoderReader* aTargetReader);
nsRefPtr<TrackBuffer> mAudioTrack; bool SwitchVideoReader(MediaDecoderReader* aTargetReader);
nsRefPtr<TrackBuffer> mVideoTrack;
// These are read and written on the decode task queue threads. // These are read and written on the decode task queue threads.
int64_t mLastAudioTime;
int64_t mLastVideoTime;
int64_t mTimeThreshold; int64_t mTimeThreshold;
bool mDropAudioBeforeThreshold; bool mDropAudioBeforeThreshold;
bool mDropVideoBeforeThreshold; bool mDropVideoBeforeThreshold;
nsTArray<nsRefPtr<SourceBufferDecoder>> mPendingDecoders;
nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
nsRefPtr<MediaDecoderReader> mAudioReader;
nsRefPtr<MediaDecoderReader> mVideoReader;
bool mEnded; bool mEnded;
// For a seek to complete we need to send a sample with // For a seek to complete we need to send a sample with

View File

@ -6,14 +6,14 @@
#include "SourceBuffer.h" #include "SourceBuffer.h"
#include "AsyncEventRunner.h" #include "AsyncEventRunner.h"
#include "DecoderTraits.h"
#include "MediaDecoder.h"
#include "MediaSourceDecoder.h"
#include "MediaSourceUtils.h" #include "MediaSourceUtils.h"
#include "TrackBuffer.h" #include "SourceBufferResource.h"
#include "VideoUtils.h"
#include "WebMBufferedParser.h"
#include "mozilla/Endian.h" #include "mozilla/Endian.h"
#include "mozilla/ErrorResult.h" #include "mozilla/ErrorResult.h"
#include "mozilla/FloatingPoint.h" #include "mozilla/FloatingPoint.h"
#include "mozilla/Preferences.h"
#include "mozilla/dom/MediaSourceBinding.h" #include "mozilla/dom/MediaSourceBinding.h"
#include "mozilla/dom/TimeRanges.h" #include "mozilla/dom/TimeRanges.h"
#include "mp4_demuxer/BufferStream.h" #include "mp4_demuxer/BufferStream.h"
@ -23,6 +23,10 @@
#include "nsIRunnable.h" #include "nsIRunnable.h"
#include "nsThreadUtils.h" #include "nsThreadUtils.h"
#include "prlog.h" #include "prlog.h"
#include "SourceBufferDecoder.h"
#include "mozilla/Preferences.h"
#include "WebMBufferedParser.h"
struct JSContext; struct JSContext;
class JSObject; class JSObject;
@ -331,8 +335,19 @@ SourceBuffer::GetBuffered(ErrorResult& aRv)
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return nullptr; return nullptr;
} }
double highestEndTime = 0;
nsRefPtr<TimeRanges> ranges = new TimeRanges(); nsRefPtr<TimeRanges> ranges = new TimeRanges();
double highestEndTime = mTrackBuffer->Buffered(ranges); // TODO: Need to adjust mDecoders so it only tracks active decoders.
// Once we have an abstraction for track buffers, this needs to report the
// intersection of buffered ranges within those track buffers.
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
nsRefPtr<TimeRanges> r = new TimeRanges();
mDecoders[i]->GetBuffered(r);
if (r->Length() > 0) {
highestEndTime = std::max(highestEndTime, r->GetEndTime());
ranges->Union(r);
}
}
if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended) { if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended) {
// Set the end time on the last range to highestEndTime by adding a // Set the end time on the last range to highestEndTime by adding a
// new range spanning the current end time to highestEndTime, which // new range spanning the current end time to highestEndTime, which
@ -417,7 +432,7 @@ SourceBuffer::Abort(ErrorResult& aRv)
mAppendWindowEnd = PositiveInfinity<double>(); mAppendWindowEnd = PositiveInfinity<double>();
MSE_DEBUG("SourceBuffer(%p)::Abort() Discarding decoder", this); MSE_DEBUG("SourceBuffer(%p)::Abort() Discarding decoder", this);
mTrackBuffer->DiscardDecoder(); DiscardDecoder();
} }
void void
@ -449,10 +464,8 @@ SourceBuffer::Detach()
{ {
MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("SourceBuffer(%p)::Detach", this); MSE_DEBUG("SourceBuffer(%p)::Detach", this);
if (mTrackBuffer) { Ended();
mTrackBuffer->Detach(); DiscardDecoder();
}
mTrackBuffer = nullptr;
mMediaSource = nullptr; mMediaSource = nullptr;
} }
@ -460,34 +473,36 @@ void
SourceBuffer::Ended() SourceBuffer::Ended()
{ {
MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(IsAttached());
MSE_DEBUG("SourceBuffer(%p)::Ended", this); MSE_DEBUG("SourceBuffer(%p)::Ended", this);
mTrackBuffer->DiscardDecoder(); if (mDecoder) {
mDecoder->GetResource()->Ended();
}
} }
SourceBuffer::SourceBuffer(MediaSource* aMediaSource, const nsACString& aType) SourceBuffer::SourceBuffer(MediaSource* aMediaSource, const nsACString& aType)
: DOMEventTargetHelper(aMediaSource->GetParentObject()) : DOMEventTargetHelper(aMediaSource->GetParentObject())
, mMediaSource(aMediaSource) , mMediaSource(aMediaSource)
, mType(aType) , mType(aType)
, mLastParsedTimestamp(UnspecifiedNaN<double>())
, mAppendWindowStart(0) , mAppendWindowStart(0)
, mAppendWindowEnd(PositiveInfinity<double>()) , mAppendWindowEnd(PositiveInfinity<double>())
, mTimestampOffset(0) , mTimestampOffset(0)
, mAppendMode(SourceBufferAppendMode::Segments) , mAppendMode(SourceBufferAppendMode::Segments)
, mUpdating(false) , mUpdating(false)
, mDecoderInitialized(false)
{ {
MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aMediaSource); MOZ_ASSERT(aMediaSource);
mParser = ContainerParser::CreateForMIMEType(aType); mParser = ContainerParser::CreateForMIMEType(aType);
mTrackBuffer = new TrackBuffer(aMediaSource->GetDecoder(), aType); MSE_DEBUG("SourceBuffer(%p)::SourceBuffer: Creating initial decoder, mParser=%p", this, mParser.get());
MSE_DEBUG("SourceBuffer(%p)::SourceBuffer: Create mParser=%p mTrackBuffer=%p", InitNewDecoder();
this, mParser.get(), mTrackBuffer.get());
} }
SourceBuffer::~SourceBuffer() SourceBuffer::~SourceBuffer()
{ {
MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!mMediaSource);
MSE_DEBUG("SourceBuffer(%p)::~SourceBuffer", this); MSE_DEBUG("SourceBuffer(%p)::~SourceBuffer", this);
DiscardDecoder();
} }
MediaSource* MediaSource*
@ -518,6 +533,37 @@ SourceBuffer::QueueAsyncSimpleEvent(const char* aName)
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL); NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
} }
bool
SourceBuffer::InitNewDecoder()
{
MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("SourceBuffer(%p)::InitNewDecoder", this);
MOZ_ASSERT(!mDecoder);
MediaSourceDecoder* parentDecoder = mMediaSource->GetDecoder();
nsRefPtr<SourceBufferDecoder> decoder = parentDecoder->CreateSubDecoder(mType);
if (!decoder) {
return false;
}
mDecoder = decoder;
mDecoderInitialized = false;
mDecoders.AppendElement(mDecoder);
return true;
}
void
SourceBuffer::DiscardDecoder()
{
MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("SourceBuffer(%p)::DiscardDecoder mDecoder=%p", this, mDecoder.get());
if (mDecoder) {
mDecoder->SetDiscarded();
}
mDecoder = nullptr;
mDecoderInitialized = false;
// XXX: Parser reset may be required?
mLastParsedTimestamp = UnspecifiedNaN<double>();
}
void void
SourceBuffer::StartUpdating() SourceBuffer::StartUpdating()
{ {
@ -564,13 +610,20 @@ SourceBuffer::AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aR
// TODO: Run buffer append algorithm asynchronously (would call StopUpdating()). // TODO: Run buffer append algorithm asynchronously (would call StopUpdating()).
if (mParser->IsInitSegmentPresent(aData, aLength)) { if (mParser->IsInitSegmentPresent(aData, aLength)) {
MSE_DEBUG("SourceBuffer(%p)::AppendData: New initialization segment.", this); MSE_DEBUG("SourceBuffer(%p)::AppendData: New initialization segment.", this);
mTrackBuffer->DiscardDecoder(); if (mDecoderInitialized) {
if (!mTrackBuffer->NewDecoder()) { // Existing decoder has been used, time for a new one.
DiscardDecoder();
}
// If we've got a decoder here, it's not initialized, so we can use it
// rather than creating a new one.
if (!mDecoder && !InitNewDecoder()) {
aRv.Throw(NS_ERROR_FAILURE); // XXX: Review error handling. aRv.Throw(NS_ERROR_FAILURE); // XXX: Review error handling.
return; return;
} }
MSE_DEBUG("SourceBuffer(%p)::AppendData: Decoder marked as initialized.", this); MSE_DEBUG("SourceBuffer(%p)::AppendData: Decoder marked as initialized.", this);
} else if (!mTrackBuffer->HasInitSegment()) { mDecoderInitialized = true;
} else if (!mDecoderInitialized) {
MSE_DEBUG("SourceBuffer(%p)::AppendData: Non-init segment appended during initialization."); MSE_DEBUG("SourceBuffer(%p)::AppendData: Non-init segment appended during initialization.");
Optional<MediaSourceEndOfStreamError> decodeError(MediaSourceEndOfStreamError::Decode); Optional<MediaSourceEndOfStreamError> decodeError(MediaSourceEndOfStreamError::Decode);
ErrorResult dummy; ErrorResult dummy;
@ -580,33 +633,37 @@ SourceBuffer::AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aR
} }
double start, end; double start, end;
if (mParser->ParseStartAndEndTimestamps(aData, aLength, start, end)) { if (mParser->ParseStartAndEndTimestamps(aData, aLength, start, end)) {
double lastStart, lastEnd;
mTrackBuffer->LastTimestamp(lastStart, lastEnd);
if (mParser->IsMediaSegmentPresent(aData, aLength) && if (mParser->IsMediaSegmentPresent(aData, aLength) &&
(start < lastEnd || start - lastEnd > 0.1)) { (start < mLastParsedTimestamp || start - mLastParsedTimestamp > 0.1)) {
MSE_DEBUG("SourceBuffer(%p)::AppendData: Data last=[%f, %f] overlaps [%f, %f]", MSE_DEBUG("SourceBuffer(%p)::AppendData: Data (%f, %f) overlaps %f.",
this, lastStart, lastEnd, start, end); this, start, end, mLastParsedTimestamp);
// This data is earlier in the timeline than data we have already // This data is earlier in the timeline than data we have already
// processed, so we must create a new decoder to handle the decoding. // processed, so we must create a new decoder to handle the decoding.
mTrackBuffer->DiscardDecoder(); DiscardDecoder();
// If we've got a decoder here, it's not initialized, so we can use it // If we've got a decoder here, it's not initialized, so we can use it
// rather than creating a new one. // rather than creating a new one.
if (!mTrackBuffer->NewDecoder()) { if (!InitNewDecoder()) {
aRv.Throw(NS_ERROR_FAILURE); // XXX: Review error handling. aRv.Throw(NS_ERROR_FAILURE); // XXX: Review error handling.
return; return;
} }
MSE_DEBUG("SourceBuffer(%p)::AppendData: Decoder marked as initialized.", this); MSE_DEBUG("SourceBuffer(%p)::AppendData: Decoder marked as initialized.", this);
mDecoderInitialized = true;
const nsTArray<uint8_t>& initData = mParser->InitData(); const nsTArray<uint8_t>& initData = mParser->InitData();
mTrackBuffer->AppendData(initData.Elements(), initData.Length()); mDecoder->NotifyDataArrived(reinterpret_cast<const char*>(initData.Elements()),
mTrackBuffer->SetLastStartTimestamp(start); initData.Length(),
0);
mDecoder->GetResource()->AppendData(initData.Elements(), initData.Length());
} }
mTrackBuffer->SetLastEndTimestamp(end); mLastParsedTimestamp = end;
MSE_DEBUG("SourceBuffer(%p)::AppendData: Segment last=[%f, %f] [%f, %f]", MSE_DEBUG("SourceBuffer(%p)::AppendData: Segment start=%f end=%f", this, start, end);
this, lastStart, lastEnd, start, end);
} }
mTrackBuffer->AppendData(aData, aLength); // XXX: For future reference: NDA call must run on the main thread.
mDecoder->NotifyDataArrived(reinterpret_cast<const char*>(aData),
aLength,
mDecoder->GetResource()->GetLength());
mDecoder->GetResource()->AppendData(aData, aLength);
// Eviction uses a byte threshold. If the buffer is greater than the // Eviction uses a byte threshold. If the buffer is greater than the
// number of bytes then data is evicted. The time range for this // number of bytes then data is evicted. The time range for this
@ -616,7 +673,7 @@ SourceBuffer::AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aR
// TODO: Make the eviction threshold smaller for audio-only streams. // TODO: Make the eviction threshold smaller for audio-only streams.
// TODO: Drive evictions off memory pressure notifications. // TODO: Drive evictions off memory pressure notifications.
const uint32_t evict_threshold = 75 * (1 << 20); const uint32_t evict_threshold = 75 * (1 << 20);
bool evicted = mTrackBuffer->EvictData(evict_threshold); bool evicted = mDecoder->GetResource()->EvictData(evict_threshold);
if (evicted) { if (evicted) {
MSE_DEBUG("SourceBuffer(%p)::AppendData Evict; current buffered start=%f", MSE_DEBUG("SourceBuffer(%p)::AppendData Evict; current buffered start=%f",
this, GetBufferedStart()); this, GetBufferedStart());
@ -657,13 +714,20 @@ SourceBuffer::Evict(double aStart, double aEnd)
{ {
MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("SourceBuffer(%p)::Evict(aStart=%f, aEnd=%f)", this, aStart, aEnd); MSE_DEBUG("SourceBuffer(%p)::Evict(aStart=%f, aEnd=%f)", this, aStart, aEnd);
if (!mDecoder) {
return;
}
double currentTime = mMediaSource->GetDecoder()->GetCurrentTime(); double currentTime = mMediaSource->GetDecoder()->GetCurrentTime();
double evictTime = aEnd; double evictTime = aEnd;
const double safety_threshold = 5; const double safety_threshold = 5;
if (currentTime + safety_threshold >= evictTime) { if (currentTime + safety_threshold >= evictTime) {
evictTime -= safety_threshold; evictTime -= safety_threshold;
} }
mTrackBuffer->EvictBefore(evictTime); int64_t endOffset = mDecoder->ConvertToByteOffset(evictTime);
if (endOffset > 0) {
mDecoder->GetResource()->EvictBefore(endOffset);
}
MSE_DEBUG("SourceBuffer(%p)::Evict offset=%lld", this, endOffset);
} }
NS_IMPL_CYCLE_COLLECTION_INHERITED(SourceBuffer, DOMEventTargetHelper, NS_IMPL_CYCLE_COLLECTION_INHERITED(SourceBuffer, DOMEventTargetHelper,

View File

@ -7,13 +7,14 @@
#ifndef mozilla_dom_SourceBuffer_h_ #ifndef mozilla_dom_SourceBuffer_h_
#define mozilla_dom_SourceBuffer_h_ #define mozilla_dom_SourceBuffer_h_
#include "MediaDecoderReader.h"
#include "MediaSource.h" #include "MediaSource.h"
#include "js/RootingAPI.h" #include "js/RootingAPI.h"
#include "mozilla/Assertions.h" #include "mozilla/Assertions.h"
#include "mozilla/Attributes.h" #include "mozilla/Attributes.h"
#include "mozilla/DOMEventTargetHelper.h"
#include "mozilla/dom/SourceBufferBinding.h" #include "mozilla/dom/SourceBufferBinding.h"
#include "mozilla/dom/TypedArray.h" #include "mozilla/dom/TypedArray.h"
#include "mozilla/DOMEventTargetHelper.h"
#include "mozilla/mozalloc.h" #include "mozilla/mozalloc.h"
#include "nsAutoPtr.h" #include "nsAutoPtr.h"
#include "nsCOMPtr.h" #include "nsCOMPtr.h"
@ -30,7 +31,8 @@ namespace mozilla {
class ContainerParser; class ContainerParser;
class ErrorResult; class ErrorResult;
class TrackBuffer; class SourceBufferResource;
class SourceBufferDecoder;
template <typename T> class AsyncEventRunner; template <typename T> class AsyncEventRunner;
namespace dom { namespace dom {
@ -137,7 +139,10 @@ private:
nsAutoPtr<ContainerParser> mParser; nsAutoPtr<ContainerParser> mParser;
nsRefPtr<TrackBuffer> mTrackBuffer; double mLastParsedTimestamp;
nsRefPtr<SourceBufferDecoder> mDecoder;
nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
double mAppendWindowStart; double mAppendWindowStart;
double mAppendWindowEnd; double mAppendWindowEnd;
@ -146,6 +151,8 @@ private:
SourceBufferAppendMode mAppendMode; SourceBufferAppendMode mAppendMode;
bool mUpdating; bool mUpdating;
bool mDecoderInitialized;
}; };
} // namespace dom } // namespace dom

View File

@ -38,6 +38,7 @@ SourceBufferDecoder::SourceBufferDecoder(MediaResource* aResource,
, mParentDecoder(aParentDecoder) , mParentDecoder(aParentDecoder)
, mReader(nullptr) , mReader(nullptr)
, mMediaDuration(-1) , mMediaDuration(-1)
, mDiscarded(false)
{ {
MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(NS_IsMainThread());
MOZ_COUNT_CTOR(SourceBufferDecoder); MOZ_COUNT_CTOR(SourceBufferDecoder);
@ -146,10 +147,6 @@ SourceBufferDecoder::OnStateMachineThread() const
bool bool
SourceBufferDecoder::OnDecodeThread() const SourceBufferDecoder::OnDecodeThread() const
{ {
// During initialization we run on our TrackBuffer's task queue.
if (mTaskQueue) {
return mTaskQueue->IsCurrentThreadIn();
}
return mParentDecoder->OnDecodeThread(); return mParentDecoder->OnDecodeThread();
} }

View File

@ -8,10 +8,9 @@
#define MOZILLA_SOURCEBUFFERDECODER_H_ #define MOZILLA_SOURCEBUFFERDECODER_H_
#include "AbstractMediaDecoder.h" #include "AbstractMediaDecoder.h"
#include "MediaDecoderReader.h"
#include "SourceBufferResource.h"
#include "mozilla/Attributes.h" #include "mozilla/Attributes.h"
#include "mozilla/ReentrantMonitor.h" #include "mozilla/ReentrantMonitor.h"
#include "SourceBufferResource.h"
namespace mozilla { namespace mozilla {
@ -75,30 +74,33 @@ public:
return mReader; return mReader;
} }
void SetTaskQueue(MediaTaskQueue* aTaskQueue)
{
MOZ_ASSERT((!mTaskQueue && aTaskQueue) || (mTaskQueue && !aTaskQueue));
mTaskQueue = aTaskQueue;
}
// Given a time convert it into an approximate byte offset from the // Given a time convert it into an approximate byte offset from the
// cached data. Returns -1 if no such value is computable. // cached data. Returns -1 if no such value is computable.
int64_t ConvertToByteOffset(double aTime); int64_t ConvertToByteOffset(double aTime);
bool IsDiscarded()
{
return mDiscarded;
}
void SetDiscarded()
{
GetResource()->Ended();
mDiscarded = true;
}
// Returns true if the data buffered by this decoder contains the given time. // Returns true if the data buffered by this decoder contains the given time.
bool ContainsTime(double aTime); bool ContainsTime(double aTime);
private: private:
virtual ~SourceBufferDecoder(); virtual ~SourceBufferDecoder();
// Our TrackBuffer's task queue, this is only non-null during initialization.
RefPtr<MediaTaskQueue> mTaskQueue;
nsRefPtr<MediaResource> mResource; nsRefPtr<MediaResource> mResource;
AbstractMediaDecoder* mParentDecoder; AbstractMediaDecoder* mParentDecoder;
nsRefPtr<MediaDecoderReader> mReader; nsRefPtr<MediaDecoderReader> mReader;
int64_t mMediaDuration; int64_t mMediaDuration;
bool mDiscarded;
}; };
} // namespace mozilla } // namespace mozilla

View File

@ -1,336 +0,0 @@
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "TrackBuffer.h"
#include "MediaSourceDecoder.h"
#include "SharedThreadPool.h"
#include "MediaTaskQueue.h"
#include "SourceBufferDecoder.h"
#include "SourceBufferResource.h"
#include "VideoUtils.h"
#include "mozilla/FloatingPoint.h"
#include "mozilla/dom/MediaSourceBinding.h"
#include "mozilla/dom/TimeRanges.h"
#include "nsError.h"
#include "nsIRunnable.h"
#include "nsThreadUtils.h"
#include "prlog.h"
struct JSContext;
class JSObject;
#ifdef PR_LOGGING
extern PRLogModuleInfo* GetMediaSourceLog();
extern PRLogModuleInfo* GetMediaSourceAPILog();
#define MSE_DEBUG(...) PR_LOG(GetMediaSourceLog(), PR_LOG_DEBUG, (__VA_ARGS__))
#define MSE_DEBUGV(...) PR_LOG(GetMediaSourceLog(), PR_LOG_DEBUG+1, (__VA_ARGS__))
#define MSE_API(...) PR_LOG(GetMediaSourceAPILog(), PR_LOG_DEBUG, (__VA_ARGS__))
#else
#define MSE_DEBUG(...)
#define MSE_DEBUGV(...)
#define MSE_API(...)
#endif
namespace mozilla {
TrackBuffer::TrackBuffer(MediaSourceDecoder* aParentDecoder, const nsACString& aType)
: mParentDecoder(aParentDecoder)
, mType(aType)
, mLastStartTimestamp(0)
, mLastEndTimestamp(UnspecifiedNaN<double>())
, mHasAudio(false)
, mHasVideo(false)
{
MOZ_COUNT_CTOR(TrackBuffer);
mTaskQueue = new MediaTaskQueue(GetMediaDecodeThreadPool());
aParentDecoder->AddTrackBuffer(this);
}
TrackBuffer::~TrackBuffer()
{
MOZ_COUNT_DTOR(TrackBuffer);
}
class ReleaseDecoderTask : public nsRunnable {
public:
explicit ReleaseDecoderTask(nsRefPtr<SourceBufferDecoder> aDecoder)
{
mDecoders.AppendElement(aDecoder);
}
explicit ReleaseDecoderTask(nsTArray<nsRefPtr<SourceBufferDecoder>>& aDecoders)
{
mDecoders.SwapElements(aDecoders);
}
NS_IMETHOD Run() MOZ_OVERRIDE MOZ_FINAL {
mDecoders.Clear();
return NS_OK;
}
private:
nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
};
void
TrackBuffer::Shutdown()
{
// Shutdown waits for any pending events, which may require the monitor,
// so we must not hold the monitor during this call.
mTaskQueue->Shutdown();
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
DiscardDecoder();
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
mDecoders[i]->GetReader()->Shutdown();
}
NS_DispatchToMainThread(new ReleaseDecoderTask(mDecoders));
MOZ_ASSERT(mDecoders.IsEmpty());
}
void
TrackBuffer::AppendData(const uint8_t* aData, uint32_t aLength)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mCurrentDecoder);
SourceBufferResource* resource = mCurrentDecoder->GetResource();
// XXX: For future reference: NDA call must run on the main thread.
mCurrentDecoder->NotifyDataArrived(reinterpret_cast<const char*>(aData),
aLength, resource->GetLength());
resource->AppendData(aData, aLength);
}
bool
TrackBuffer::EvictData(uint32_t aThreshold)
{
MOZ_ASSERT(NS_IsMainThread());
// XXX Call EvictData on mDecoders?
return mCurrentDecoder->GetResource()->EvictData(aThreshold);
}
void
TrackBuffer::EvictBefore(double aTime)
{
MOZ_ASSERT(NS_IsMainThread());
// XXX Call EvictBefore on mDecoders?
int64_t endOffset = mCurrentDecoder->ConvertToByteOffset(aTime);
if (endOffset > 0) {
mCurrentDecoder->GetResource()->EvictBefore(endOffset);
}
MSE_DEBUG("TrackBuffer(%p)::EvictBefore offset=%lld", this, endOffset);
}
double
TrackBuffer::Buffered(dom::TimeRanges* aRanges)
{
MOZ_ASSERT(NS_IsMainThread());
// XXX check default if mDecoders empty?
double highestEndTime = 0;
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
nsRefPtr<dom::TimeRanges> r = new dom::TimeRanges();
mDecoders[i]->GetBuffered(r);
if (r->Length() > 0) {
highestEndTime = std::max(highestEndTime, r->GetEndTime());
aRanges->Union(r);
}
}
return highestEndTime;
}
bool
TrackBuffer::NewDecoder()
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!mCurrentDecoder && mParentDecoder);
nsRefPtr<SourceBufferDecoder> decoder = mParentDecoder->CreateSubDecoder(mType);
if (!decoder) {
return false;
}
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
mCurrentDecoder = decoder;
mLastStartTimestamp = 0;
mLastEndTimestamp = UnspecifiedNaN<double>();
return QueueInitializeDecoder(decoder);
}
bool
TrackBuffer::QueueInitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder)
{
RefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<nsRefPtr<SourceBufferDecoder>>(this,
&TrackBuffer::InitializeDecoder,
aDecoder);
aDecoder->SetTaskQueue(mTaskQueue);
if (NS_FAILED(mTaskQueue->Dispatch(task))) {
MSE_DEBUG("MediaSourceReader(%p): Failed to enqueue decoder initialization task", this);
return false;
}
return true;
}
void
TrackBuffer::InitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder)
{
// ReadMetadata may block the thread waiting on data, so it must not be
// called with the monitor held.
mParentDecoder->GetReentrantMonitor().AssertNotCurrentThreadIn();
MediaDecoderReader* reader = aDecoder->GetReader();
MSE_DEBUG("TrackBuffer(%p): Initializing subdecoder %p reader %p",
this, aDecoder.get(), reader);
MediaInfo mi;
nsAutoPtr<MetadataTags> tags; // TODO: Handle metadata.
nsresult rv = reader->ReadMetadata(&mi, getter_Transfers(tags));
reader->SetIdle();
if (NS_FAILED(rv) || (!mi.HasVideo() && !mi.HasAudio())) {
// XXX: Need to signal error back to owning SourceBuffer.
MSE_DEBUG("TrackBuffer(%p): Reader %p failed to initialize rv=%x audio=%d video=%d",
this, reader, rv, mi.HasAudio(), mi.HasVideo());
aDecoder->SetTaskQueue(nullptr);
NS_DispatchToMainThread(new ReleaseDecoderTask(aDecoder));
return;
}
if (mi.HasVideo()) {
MSE_DEBUG("TrackBuffer(%p): Reader %p video resolution=%dx%d",
this, reader, mi.mVideo.mDisplay.width, mi.mVideo.mDisplay.height);
}
if (mi.HasAudio()) {
MSE_DEBUG("TrackBuffer(%p): Reader %p audio sampleRate=%d channels=%d",
this, reader, mi.mAudio.mRate, mi.mAudio.mChannels);
}
MSE_DEBUG("TrackBuffer(%p): Reader %p activated", this, reader);
RegisterDecoder(aDecoder);
}
void
TrackBuffer::RegisterDecoder(nsRefPtr<SourceBufferDecoder> aDecoder)
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
aDecoder->SetTaskQueue(nullptr);
const MediaInfo& info = aDecoder->GetReader()->GetMediaInfo();
// Initialize the track info since this is the first decoder.
if (mDecoders.IsEmpty()) {
mHasAudio = info.HasAudio();
mHasVideo = info.HasVideo();
mParentDecoder->OnTrackBufferConfigured(this);
} else if ((info.HasAudio() && !mHasAudio) || (info.HasVideo() && !mHasVideo)) {
MSE_DEBUG("TrackBuffer(%p)::RegisterDecoder with mismatched audio/video tracks", this);
}
mDecoders.AppendElement(aDecoder);
}
void
TrackBuffer::DiscardDecoder()
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
if (mCurrentDecoder) {
mCurrentDecoder->GetResource()->Ended();
}
mCurrentDecoder = nullptr;
}
void
TrackBuffer::Detach()
{
MOZ_ASSERT(NS_IsMainThread());
if (mCurrentDecoder) {
DiscardDecoder();
}
}
bool
TrackBuffer::HasInitSegment()
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
return mHasAudio || mHasVideo;
}
void
TrackBuffer::LastTimestamp(double& aStart, double& aEnd)
{
MOZ_ASSERT(NS_IsMainThread());
aStart = mLastStartTimestamp;
aEnd = mLastEndTimestamp;
}
void
TrackBuffer::SetLastStartTimestamp(double aStart)
{
MOZ_ASSERT(NS_IsMainThread());
mLastStartTimestamp = aStart;
}
void
TrackBuffer::SetLastEndTimestamp(double aEnd)
{
MOZ_ASSERT(NS_IsMainThread());
mLastEndTimestamp = aEnd;
}
bool
TrackBuffer::ContainsTime(double aTime)
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
nsRefPtr<dom::TimeRanges> r = new dom::TimeRanges();
mDecoders[i]->GetBuffered(r);
if (r->Find(aTime) != dom::TimeRanges::NoIndex) {
return true;
}
}
return false;
}
bool
TrackBuffer::HasAudio()
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
return mHasAudio;
}
bool
TrackBuffer::HasVideo()
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
return mHasVideo;
}
void
TrackBuffer::BreakCycles()
{
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
mDecoders[i]->GetReader()->BreakCycles();
}
mDecoders.Clear();
}
void
TrackBuffer::ResetDecode()
{
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
mDecoders[i]->GetReader()->ResetDecode();
}
}
const nsTArray<nsRefPtr<SourceBufferDecoder>>&
TrackBuffer::Decoders()
{
// XXX assert OnDecodeThread
return mDecoders;
}
} // namespace mozilla

View File

@ -1,129 +0,0 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MOZILLA_TRACKBUFFER_H_
#define MOZILLA_TRACKBUFFER_H_
#include "SourceBufferDecoder.h"
#include "mozilla/Assertions.h"
#include "mozilla/Attributes.h"
#include "mozilla/mozalloc.h"
#include "nsCOMPtr.h"
#include "nsString.h"
#include "nscore.h"
namespace mozilla {
class MediaSourceDecoder;
namespace dom {
class TimeRanges;
} // namespace dom
class TrackBuffer MOZ_FINAL {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TrackBuffer);
TrackBuffer(MediaSourceDecoder* aParentDecoder, const nsACString& aType);
void Shutdown();
// Append data to the current decoder. Also responsible for calling
// NotifyDataArrived on the decoder to keep buffered range computation up
// to date.
void AppendData(const uint8_t* aData, uint32_t aLength);
bool EvictData(uint32_t aThreshold);
void EvictBefore(double aTime);
// Returns the highest end time of all of the buffered ranges in the
// decoders managed by this TrackBuffer, and returns the union of the
// decoders buffered ranges in aRanges.
double Buffered(dom::TimeRanges* aRanges);
// Create a new decoder, set mCurrentDecoder to the new decoder, and queue
// the decoder for initialization. The decoder is not considered
// initialized until it is added to mDecoders.
bool NewDecoder();
// Mark the current decoder's resource as ended, clear mCurrentDecoder and
// reset mLast{Start,End}Timestamp.
void DiscardDecoder();
void Detach();
// Returns true if an init segment has been appended *and* the decoder
// using that init segment has successfully initialized.
bool HasInitSegment();
// Query and update mLast{Start,End}Timestamp.
void LastTimestamp(double& aStart, double& aEnd);
void SetLastStartTimestamp(double aStart);
void SetLastEndTimestamp(double aEnd);
// Returns true if any of the decoders managed by this track buffer
// contain aTime in their buffered ranges.
bool ContainsTime(double aTime);
// Returns true if this TrackBuffer has an audio or video track,
// respectively.
bool HasAudio();
bool HasVideo();
void BreakCycles();
// Call ResetDecode() on each decoder in mDecoders.
void ResetDecode();
// Returns a reference to mDecoders, used by MediaSourceReader to select
// decoders.
// TODO: Refactor to a clenaer interface between TrackBuffer and MediaSourceReader.
const nsTArray<nsRefPtr<SourceBufferDecoder>>& Decoders();
private:
~TrackBuffer();
// Queue execution of InitializeDecoder on mTaskQueue.
bool QueueInitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder);
// Runs decoder initialization including calling ReadMetadata. Runs as an
// event on the decode thread pool.
void InitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder);
// Adds a successfully initialized decoder to mDecoders and (if it's the
// first decoder initialized), initializes mHasAudio/mHasVideo. Called
// from the decode thread pool.
void RegisterDecoder(nsRefPtr<SourceBufferDecoder> aDecoder);
// A task queue using the shared media thread pool. Used exclusively to
// initialize (i.e. call ReadMetadata on) decoders as they are created via
// NewDecoder.
RefPtr<MediaTaskQueue> mTaskQueue;
// All of the initialized decoders managed by this TrackBuffer. Access
// protected by mParentDecoder's monitor.
nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
// The decoder that the owning SourceBuffer is currently appending data to.
nsRefPtr<SourceBufferDecoder> mCurrentDecoder;
nsRefPtr<MediaSourceDecoder> mParentDecoder;
const nsCString mType;
// The last start and end timestamps added to the TrackBuffer via
// AppendData. Accessed on the main thread only.
double mLastStartTimestamp;
double mLastEndTimestamp;
// Set when the first decoder used by this TrackBuffer is initialized.
// Protected by mParentDecoder's monitor.
bool mHasAudio;
bool mHasVideo;
};
} // namespace mozilla
#endif /* MOZILLA_TRACKBUFFER_H_ */

View File

@ -25,7 +25,6 @@ UNIFIED_SOURCES += [
'SourceBufferDecoder.cpp', 'SourceBufferDecoder.cpp',
'SourceBufferList.cpp', 'SourceBufferList.cpp',
'SourceBufferResource.cpp', 'SourceBufferResource.cpp',
'TrackBuffer.cpp',
] ]
FAIL_ON_WARNINGS = True FAIL_ON_WARNINGS = True