Bug 1059058 - Introduce abstraction to manager mapping between SourceBuffers and SourceBufferDecoders for the MediaSourceReader. r=cajbir

This commit is contained in:
Matthew Gregan 2014-08-28 20:18:01 +12:00
parent 61266720e7
commit 08408b531e
11 changed files with 759 additions and 350 deletions

View File

@ -132,6 +132,27 @@ MediaSourceDecoder::CreateSubDecoder(const nsACString& aType)
return mReader->CreateSubDecoder(aType);
}
void
MediaSourceDecoder::AddTrackBuffer(TrackBuffer* aTrackBuffer)
{
MOZ_ASSERT(mReader);
mReader->AddTrackBuffer(aTrackBuffer);
}
void
MediaSourceDecoder::RemoveTrackBuffer(TrackBuffer* aTrackBuffer)
{
MOZ_ASSERT(mReader);
mReader->RemoveTrackBuffer(aTrackBuffer);
}
void
MediaSourceDecoder::OnTrackBufferConfigured(TrackBuffer* aTrackBuffer)
{
MOZ_ASSERT(mReader);
mReader->OnTrackBufferConfigured(aTrackBuffer);
}
void
MediaSourceDecoder::Ended()
{

View File

@ -20,6 +20,7 @@ class MediaResource;
class MediaDecoderStateMachine;
class MediaSourceReader;
class SourceBufferDecoder;
class TrackBuffer;
namespace dom {
@ -46,6 +47,9 @@ public:
void DetachMediaSource();
already_AddRefed<SourceBufferDecoder> CreateSubDecoder(const nsACString& aType);
void AddTrackBuffer(TrackBuffer* aTrackBuffer);
void RemoveTrackBuffer(TrackBuffer* aTrackBuffer);
void OnTrackBufferConfigured(TrackBuffer* aTrackBuffer);
void Ended();

View File

@ -14,6 +14,7 @@
#include "MediaSourceDecoder.h"
#include "MediaSourceUtils.h"
#include "SourceBufferDecoder.h"
#include "TrackBuffer.h"
#ifdef MOZ_FMP4
#include "MP4Decoder.h"
@ -37,6 +38,8 @@ namespace mozilla {
MediaSourceReader::MediaSourceReader(MediaSourceDecoder* aDecoder)
: MediaDecoderReader(aDecoder)
, mLastAudioTime(-1)
, mLastVideoTime(-1)
, mTimeThreshold(-1)
, mDropAudioBeforeThreshold(false)
, mDropVideoBeforeThreshold(false)
@ -49,25 +52,35 @@ MediaSourceReader::MediaSourceReader(MediaSourceDecoder* aDecoder)
bool
MediaSourceReader::IsWaitingMediaResources()
{
return mDecoders.IsEmpty() && mPendingDecoders.IsEmpty();
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
if (!mTrackBuffers[i]->HasInitSegment()) {
return true;
}
}
return mTrackBuffers.IsEmpty();
}
void
MediaSourceReader::RequestAudioData()
{
MSE_DEBUGV("MediaSourceReader(%p)::RequestAudioData", this);
if (!mAudioReader) {
MSE_DEBUG("MediaSourceReader(%p)::RequestAudioData called with no audio reader", this);
MOZ_ASSERT(mPendingDecoders.IsEmpty());
GetCallback()->OnDecodeError();
return;
}
SwitchReaders(SWITCH_OPTIONAL);
if (SwitchAudioReader(double(mLastAudioTime) / USECS_PER_S)) {
MSE_DEBUGV("MediaSourceReader(%p)::RequestAudioData switching audio reader", this);
}
mAudioReader->RequestAudioData();
}
void
MediaSourceReader::OnAudioDecoded(AudioData* aSample)
{
MSE_DEBUGV("MediaSourceReader(%p)::OnAudioDecoded mTime=%lld mDuration=%lld d=%d",
this, aSample->mTime, aSample->mDuration, aSample->mDiscontinuity);
if (mDropAudioBeforeThreshold) {
if (aSample->mTime < mTimeThreshold) {
MSE_DEBUG("MediaSourceReader(%p)::OnAudioDecoded mTime=%lld < mTimeThreshold=%lld",
@ -86,21 +99,22 @@ MediaSourceReader::OnAudioDecoded(AudioData* aSample)
mAudioIsSeeking = false;
aSample->mDiscontinuity = true;
}
mLastAudioTime = aSample->mTime + aSample->mDuration;
GetCallback()->OnAudioDecoded(aSample);
}
void
MediaSourceReader::OnAudioEOS()
{
MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p (readers=%u)",
this, mAudioReader.get(), mDecoders.Length());
if (SwitchReaders(SWITCH_FORCED)) {
MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p (decoders=%u)",
this, mAudioReader.get(), mAudioTrack->Decoders().Length());
if (SwitchAudioReader(double(mLastAudioTime) / USECS_PER_S)) {
// Success! Resume decoding with next audio decoder.
RequestAudioData();
} else if (IsEnded()) {
// End of stream.
MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p EOS (readers=%u)",
this, mAudioReader.get(), mDecoders.Length());
MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p EOS (decoders=%u)",
this, mAudioReader.get(), mAudioTrack->Decoders().Length());
GetCallback()->OnAudioEOS();
}
}
@ -108,26 +122,35 @@ MediaSourceReader::OnAudioEOS()
void
MediaSourceReader::RequestVideoData(bool aSkipToNextKeyframe, int64_t aTimeThreshold)
{
MSE_DEBUGV("MediaSourceReader(%p)::RequestVideoData(%d, %lld)",
this, aSkipToNextKeyframe, aTimeThreshold);
if (!mVideoReader) {
MSE_DEBUG("MediaSourceReader(%p)::RequestVideoData called with no video reader", this);
MOZ_ASSERT(mPendingDecoders.IsEmpty());
GetCallback()->OnDecodeError();
return;
}
mTimeThreshold = aTimeThreshold;
SwitchReaders(SWITCH_OPTIONAL);
if (aSkipToNextKeyframe) {
mTimeThreshold = aTimeThreshold;
mDropAudioBeforeThreshold = true;
mDropVideoBeforeThreshold = true;
}
if (SwitchVideoReader(double(mLastVideoTime) / USECS_PER_S)) {
MSE_DEBUGV("MediaSourceReader(%p)::RequestVideoData switching video reader", this);
}
mVideoReader->RequestVideoData(aSkipToNextKeyframe, aTimeThreshold);
}
void
MediaSourceReader::OnVideoDecoded(VideoData* aSample)
{
MSE_DEBUGV("MediaSourceReader(%p)::OnVideoDecoded mTime=%lld mDuration=%lld d=%d",
this, aSample->mTime, aSample->mDuration, aSample->mDiscontinuity);
if (mDropVideoBeforeThreshold) {
if (aSample->mTime < mTimeThreshold) {
MSE_DEBUG("MediaSourceReader(%p)::OnVideoDecoded mTime=%lld < mTimeThreshold=%lld",
this, aSample->mTime, mTimeThreshold);
delete aSample;
mVideoReader->RequestVideoData(false, mTimeThreshold);
mVideoReader->RequestVideoData(false, 0);
return;
}
mDropVideoBeforeThreshold = false;
@ -140,7 +163,7 @@ MediaSourceReader::OnVideoDecoded(VideoData* aSample)
mVideoIsSeeking = false;
aSample->mDiscontinuity = true;
}
mLastVideoTime = aSample->mTime + aSample->mDuration;
GetCallback()->OnVideoDecoded(aSample);
}
@ -148,15 +171,15 @@ void
MediaSourceReader::OnVideoEOS()
{
// End of stream. See if we can switch to another video decoder.
MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p (readers=%u)",
this, mVideoReader.get(), mDecoders.Length());
if (SwitchReaders(SWITCH_FORCED)) {
MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p (decoders=%u)",
this, mVideoReader.get(), mVideoTrack->Decoders().Length());
if (SwitchVideoReader(double(mLastVideoTime) / USECS_PER_S)) {
// Success! Resume decoding with next video decoder.
RequestVideoData(false, mTimeThreshold);
RequestVideoData(false, 0);
} else if (IsEnded()) {
// End of stream.
MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p EOS (readers=%u)",
this, mVideoReader.get(), mDecoders.Length());
MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p EOS (decoders=%u)",
this, mVideoReader.get(), mVideoTrack->Decoders().Length());
GetCallback()->OnVideoEOS();
}
}
@ -164,6 +187,7 @@ MediaSourceReader::OnVideoEOS()
void
MediaSourceReader::OnDecodeError()
{
MSE_DEBUG("MediaSourceReader(%p)::OnDecodeError", this);
GetCallback()->OnDecodeError();
}
@ -171,171 +195,97 @@ void
MediaSourceReader::Shutdown()
{
MediaDecoderReader::Shutdown();
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
mDecoders[i]->GetReader()->Shutdown();
for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
mTrackBuffers[i]->Shutdown();
}
mTrackBuffers.Clear();
}
void
MediaSourceReader::BreakCycles()
{
MediaDecoderReader::BreakCycles();
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
mDecoders[i]->GetReader()->BreakCycles();
for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
mTrackBuffers[i]->BreakCycles();
}
mTrackBuffers.Clear();
}
bool
MediaSourceReader::SwitchAudioReader(MediaDecoderReader* aTargetReader)
MediaSourceReader::SwitchAudioReader(double aTarget)
{
if (aTargetReader == mAudioReader) {
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
// XXX: Can't handle adding an audio track after ReadMetadata yet.
if (!mAudioTrack) {
return false;
}
if (mAudioReader) {
AudioInfo targetInfo = aTargetReader->GetMediaInfo().mAudio;
auto& decoders = mAudioTrack->Decoders();
for (uint32_t i = 0; i < decoders.Length(); ++i) {
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
decoders[i]->GetBuffered(ranges);
MediaDecoderReader* newReader = decoders[i]->GetReader();
MSE_DEBUGV("MediaDecoderReader(%p)::SwitchAudioReader(%f) audioReader=%p reader=%p ranges=%s",
this, aTarget, mAudioReader.get(), newReader, DumpTimeRanges(ranges).get());
AudioInfo targetInfo = newReader->GetMediaInfo().mAudio;
AudioInfo currentInfo = mAudioReader->GetMediaInfo().mAudio;
// TODO: We can't handle switching audio formats yet.
if (currentInfo.mRate != targetInfo.mRate ||
currentInfo.mChannels != targetInfo.mChannels) {
return false;
continue;
}
mAudioReader->SetIdle();
if (ranges->Find(aTarget) != dom::TimeRanges::NoIndex) {
if (newReader->AudioQueue().AtEndOfStream()) {
continue;
}
if (mAudioReader) {
mAudioReader->SetIdle();
}
mAudioReader = newReader;
MSE_DEBUG("MediaDecoderReader(%p)::SwitchAudioReader(%f) switching to audio reader %p",
this, aTarget, mAudioReader.get());
return true;
}
}
mAudioReader = aTargetReader;
mDropAudioBeforeThreshold = true;
MSE_DEBUG("MediaDecoderReader(%p)::SwitchReaders(%p) switching audio reader",
this, mAudioReader.get());
return true;
return false;
}
bool
MediaSourceReader::SwitchVideoReader(MediaDecoderReader* aTargetReader)
MediaSourceReader::SwitchVideoReader(double aTarget)
{
if (aTargetReader == mVideoReader) {
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
// XXX: Can't handle adding a video track after ReadMetadata yet.
if (!mVideoTrack) {
return false;
}
if (mVideoReader) {
mVideoReader->SetIdle();
}
mVideoReader = aTargetReader;
mDropVideoBeforeThreshold = true;
MSE_DEBUG("MediaDecoderReader(%p)::SwitchVideoReader(%p) switching video reader",
this, mVideoReader.get());
return true;
}
bool
MediaSourceReader::SwitchReaders(SwitchType aType)
{
InitializePendingDecoders();
// This monitor must be held after the call to InitializePendingDecoders
// as that method also obtains the lock, and then attempts to exit it
// to call ReadMetadata on the readers. If we hold it before the call then
// it remains held during the ReadMetadata call causing a deadlock.
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
bool didSwitch = false;
double decodeTarget = double(mTimeThreshold) / USECS_PER_S;
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
SourceBufferDecoder* decoder = mDecoders[i];
const MediaInfo& info = decoder->GetReader()->GetMediaInfo();
auto& decoders = mVideoTrack->Decoders();
for (uint32_t i = 0; i < decoders.Length(); ++i) {
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
decoder->GetBuffered(ranges);
decoders[i]->GetBuffered(ranges);
MSE_DEBUGV("MediaDecoderReader(%p)::SwitchReaders(%d) decoder=%u (%p) discarded=%d"
" reader=%p audioReader=%p videoReader=%p"
" hasAudio=%d hasVideo=%d decodeTarget=%f ranges=%s",
this, aType, i, decoder, decoder->IsDiscarded(),
decoder->GetReader(), mAudioReader.get(), mVideoReader.get(),
info.HasAudio(), info.HasVideo(), decodeTarget,
DumpTimeRanges(ranges).get());
MediaDecoderReader* newReader = decoders[i]->GetReader();
MSE_DEBUGV("MediaDecoderReader(%p)::SwitchVideoReader(%f) videoReader=%p reader=%p ranges=%s",
this, aTarget, mVideoReader.get(), newReader, DumpTimeRanges(ranges).get());
if (decoder->IsDiscarded()) {
continue;
}
if (aType == SWITCH_FORCED || ranges->Find(decodeTarget) != dom::TimeRanges::NoIndex) {
if (info.HasAudio()) {
didSwitch |= SwitchAudioReader(mDecoders[i]->GetReader());
if (ranges->Find(aTarget) != dom::TimeRanges::NoIndex) {
if (newReader->VideoQueue().AtEndOfStream()) {
continue;
}
if (info.HasVideo()) {
didSwitch |= SwitchVideoReader(mDecoders[i]->GetReader());
if (mVideoReader) {
mVideoReader->SetIdle();
}
mVideoReader = newReader;
MSE_DEBUG("MediaDecoderReader(%p)::SwitchVideoReader(%f) switching to video reader %p",
this, aTarget, mVideoReader.get());
return true;
}
}
return didSwitch;
}
class ReleaseDecodersTask : public nsRunnable {
public:
explicit ReleaseDecodersTask(nsTArray<nsRefPtr<SourceBufferDecoder>>& aDecoders)
{
mDecoders.SwapElements(aDecoders);
}
NS_IMETHOD Run() MOZ_OVERRIDE MOZ_FINAL {
mDecoders.Clear();
return NS_OK;
}
private:
nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
};
void
MediaSourceReader::InitializePendingDecoders()
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
for (uint32_t i = 0; i < mPendingDecoders.Length(); ++i) {
nsRefPtr<SourceBufferDecoder> decoder = mPendingDecoders[i];
MediaDecoderReader* reader = decoder->GetReader();
MSE_DEBUG("MediaSourceReader(%p): Initializing subdecoder %p reader %p",
this, decoder.get(), reader);
MediaInfo mi;
nsAutoPtr<MetadataTags> tags; // TODO: Handle metadata.
nsresult rv;
{
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
rv = reader->ReadMetadata(&mi, getter_Transfers(tags));
}
reader->SetIdle();
if (NS_FAILED(rv)) {
// XXX: Need to signal error back to owning SourceBuffer.
MSE_DEBUG("MediaSourceReader(%p): Reader %p failed to initialize rv=%x", this, reader, rv);
continue;
}
bool active = false;
if (mi.HasVideo() || mi.HasAudio()) {
MSE_DEBUG("MediaSourceReader(%p): Reader %p has video=%d audio=%d",
this, reader, mi.HasVideo(), mi.HasAudio());
if (mi.HasVideo()) {
MSE_DEBUG("MediaSourceReader(%p): Reader %p video resolution=%dx%d",
this, reader, mi.mVideo.mDisplay.width, mi.mVideo.mDisplay.height);
}
if (mi.HasAudio()) {
MSE_DEBUG("MediaSourceReader(%p): Reader %p audio sampleRate=%d channels=%d",
this, reader, mi.mAudio.mRate, mi.mAudio.mChannels);
}
active = true;
}
if (active) {
mDecoders.AppendElement(decoder);
} else {
MSE_DEBUG("MediaSourceReader(%p): Reader %p not activated", this, reader);
}
}
NS_DispatchToMainThread(new ReleaseDecodersTask(mPendingDecoders));
MOZ_ASSERT(mPendingDecoders.IsEmpty());
mDecoder->NotifyWaitingForResourcesStatusChanged();
return false;
}
MediaDecoderReader*
@ -376,22 +326,51 @@ MediaSourceReader::CreateSubDecoder(const nsACString& aType)
reader->SetCallback(callback);
reader->SetTaskQueue(GetTaskQueue());
reader->Init(nullptr);
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
MSE_DEBUG("MediaSourceReader(%p)::CreateSubDecoder subdecoder %p subreader %p",
this, decoder.get(), reader.get());
decoder->SetReader(reader);
mPendingDecoders.AppendElement(decoder);
RefPtr<nsIRunnable> task =
NS_NewRunnableMethod(this, &MediaSourceReader::InitializePendingDecoders);
if (NS_FAILED(GetTaskQueue()->Dispatch(task))) {
MSE_DEBUG("MediaSourceReader(%p): Failed to enqueue decoder initialization task", this);
return nullptr;
}
mDecoder->NotifyWaitingForResourcesStatusChanged();
return decoder.forget();
}
namespace {
void
MediaSourceReader::AddTrackBuffer(TrackBuffer* aTrackBuffer)
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
MSE_DEBUG("MediaSourceReader(%p)::AddTrackBuffer %p", this, aTrackBuffer);
mTrackBuffers.AppendElement(aTrackBuffer);
}
void
MediaSourceReader::RemoveTrackBuffer(TrackBuffer* aTrackBuffer)
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
MSE_DEBUG("MediaSourceReader(%p)::RemoveTrackBuffer %p", this, aTrackBuffer);
mTrackBuffers.RemoveElement(aTrackBuffer);
if (mAudioTrack == aTrackBuffer) {
mAudioTrack = nullptr;
}
if (mVideoTrack == aTrackBuffer) {
mVideoTrack = nullptr;
}
}
void
MediaSourceReader::OnTrackBufferConfigured(TrackBuffer* aTrackBuffer)
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
MOZ_ASSERT(mTrackBuffers.Contains(aTrackBuffer));
if (aTrackBuffer->HasAudio() && !mAudioTrack) {
MSE_DEBUG("MediaSourceReader(%p)::OnTrackBufferConfigured %p audio", this, aTrackBuffer);
mAudioTrack = aTrackBuffer;
}
if (aTrackBuffer->HasVideo() && !mVideoTrack) {
MSE_DEBUG("MediaSourceReader(%p)::OnTrackBufferConfigured %p video", this, aTrackBuffer);
mVideoTrack = aTrackBuffer;
}
mDecoder->NotifyWaitingForResourcesStatusChanged();
}
class ChangeToHaveMetadata : public nsRunnable {
public:
explicit ChangeToHaveMetadata(AbstractMediaDecoder* aDecoder) :
@ -410,23 +389,18 @@ public:
private:
nsRefPtr<AbstractMediaDecoder> mDecoder;
};
}
bool
MediaSourceReader::DecodersContainTime(double aTime)
MediaSourceReader::TrackBuffersContainTime(double aTime)
{
bool found = false;
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
if (!mDecoders[i]->IsDiscarded()) {
if (!mDecoders[i]->ContainsTime(aTime)) {
// No use to continue searching, one source buffer isn't ready yet
return false;
}
found = true;
}
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
if (mAudioTrack && !mAudioTrack->ContainsTime(aTime)) {
return false;
}
return found;
if (mVideoTrack && !mVideoTrack->ContainsTime(aTime)) {
return false;
}
return true;
}
nsresult
@ -435,8 +409,17 @@ MediaSourceReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
{
MSE_DEBUG("MediaSourceReader(%p)::Seek(aTime=%lld, aStart=%lld, aEnd=%lld, aCurrent=%lld)",
this, aTime, aStartTime, aEndTime, aCurrentTime);
ResetDecode();
for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
mTrackBuffers[i]->ResetDecode();
}
// Decoding discontinuity upon seek, reset last times to seek target.
mLastAudioTime = aTime;
mLastVideoTime = aTime;
double target = static_cast<double>(aTime) / USECS_PER_S;
if (!DecodersContainTime(target)) {
if (!TrackBuffersContainTime(target)) {
MSE_DEBUG("MediaSourceReader(%p)::Seek no active buffer contains target=%f", this, target);
NS_DispatchToMainThread(new ChangeToHaveMetadata(mDecoder));
}
@ -444,28 +427,30 @@ MediaSourceReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
// Loop until we have the requested time range in the source buffers.
// This is a workaround for our lack of async functionality in the
// MediaDecoderStateMachine. Bug 979104 implements what we need and
// we'll remove this for an async approach based on that in bug XXXXXXX.
while (!DecodersContainTime(target) && !IsShutdown() && !IsEnded()) {
// we'll remove this for an async approach based on that in bug 1056441.
while (!TrackBuffersContainTime(target) && !IsShutdown() && !IsEnded()) {
MSE_DEBUG("MediaSourceReader(%p)::Seek waiting for target=%f", this, target);
static_cast<MediaSourceDecoder*>(mDecoder)->WaitForData();
SwitchReaders(SWITCH_FORCED);
}
if (IsShutdown()) {
return NS_ERROR_FAILURE;
}
ResetDecode();
if (mAudioReader) {
if (mAudioTrack) {
mAudioIsSeeking = true;
DebugOnly<bool> ok = SwitchAudioReader(target);
MOZ_ASSERT(ok && static_cast<SourceBufferDecoder*>(mAudioReader->GetDecoder())->ContainsTime(target));
nsresult rv = mAudioReader->Seek(aTime, aStartTime, aEndTime, aCurrentTime);
MSE_DEBUG("MediaSourceReader(%p)::Seek audio reader=%p rv=%x", this, mAudioReader.get(), rv);
if (NS_FAILED(rv)) {
return rv;
}
}
if (mVideoReader) {
if (mVideoTrack) {
mVideoIsSeeking = true;
DebugOnly<bool> ok = SwitchVideoReader(target);
MOZ_ASSERT(ok && static_cast<SourceBufferDecoder*>(mVideoReader->GetDecoder())->ContainsTime(target));
nsresult rv = mVideoReader->Seek(aTime, aStartTime, aEndTime, aCurrentTime);
MSE_DEBUG("MediaSourceReader(%p)::Seek video reader=%p rv=%x", this, mVideoReader.get(), rv);
if (NS_FAILED(rv)) {
@ -478,39 +463,41 @@ MediaSourceReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
nsresult
MediaSourceReader::ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags)
{
InitializePendingDecoders();
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata tracks=%u", this, mTrackBuffers.Length());
// ReadMetadata is called *before* checking IsWaitingMediaResources.
if (IsWaitingMediaResources()) {
return NS_OK;
}
if (!mAudioTrack || !mVideoTrack) {
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata missing track: mAudioTrack=%p mVideoTrack=%p",
this, mAudioTrack.get(), mVideoTrack.get());
return NS_ERROR_FAILURE;
}
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata decoders=%u", this, mDecoders.Length());
// XXX: Make subdecoder setup async, so that use cases like bug 989888 can
// work. This will require teaching the state machine about dynamic track
// changes (and multiple tracks).
// Shorter term, make this block until we've got at least one video track
// and lie about having an audio track, then resample/remix as necessary
// to match any audio track added later to fit the format we lied about
// now. For now we just configure what we've got and cross our fingers.
int64_t maxDuration = -1;
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
MediaDecoderReader* reader = mDecoders[i]->GetReader();
MediaInfo mi = reader->GetMediaInfo();
if (mAudioTrack) {
MOZ_ASSERT(mAudioTrack->HasInitSegment());
mAudioReader = mAudioTrack->Decoders()[0]->GetReader();
if (mi.HasVideo() && !mInfo.HasVideo()) {
MOZ_ASSERT(!mVideoReader);
mVideoReader = reader;
mInfo.mVideo = mi.mVideo;
maxDuration = std::max(maxDuration, mDecoders[i]->GetMediaDuration());
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata video reader=%p maxDuration=%lld",
this, reader, maxDuration);
}
if (mi.HasAudio() && !mInfo.HasAudio()) {
MOZ_ASSERT(!mAudioReader);
mAudioReader = reader;
mInfo.mAudio = mi.mAudio;
maxDuration = std::max(maxDuration, mDecoders[i]->GetMediaDuration());
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata audio reader=%p maxDuration=%lld",
this, reader, maxDuration);
}
const MediaInfo& info = mAudioReader->GetMediaInfo();
MOZ_ASSERT(info.HasAudio());
mInfo.mAudio = info.mAudio;
maxDuration = std::max(maxDuration, mAudioReader->GetDecoder()->GetMediaDuration());
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata audio reader=%p maxDuration=%lld",
this, mAudioReader.get(), maxDuration);
}
if (mVideoTrack) {
MOZ_ASSERT(mVideoTrack->HasInitSegment());
mVideoReader = mVideoTrack->Decoders()[0]->GetReader();
const MediaInfo& info = mVideoReader->GetMediaInfo();
MOZ_ASSERT(info.HasVideo());
mInfo.mVideo = info.mVideo;
maxDuration = std::max(maxDuration, mVideoReader->GetDecoder()->GetMediaDuration());
MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata video reader=%p maxDuration=%lld",
this, mVideoReader.get(), maxDuration);
}
if (maxDuration != -1) {

View File

@ -19,6 +19,7 @@ namespace mozilla {
class MediaSourceDecoder;
class SourceBufferDecoder;
class TrackBuffer;
namespace dom {
@ -70,22 +71,25 @@ public:
nsresult ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) MOZ_OVERRIDE;
nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
int64_t aCurrentTime) MOZ_OVERRIDE;
already_AddRefed<SourceBufferDecoder> CreateSubDecoder(const nsACString& aType);
void AddTrackBuffer(TrackBuffer* aTrackBuffer);
void RemoveTrackBuffer(TrackBuffer* aTrackBuffer);
void OnTrackBufferConfigured(TrackBuffer* aTrackBuffer);
void Shutdown();
virtual void BreakCycles();
void InitializePendingDecoders();
bool IsShutdown()
{
ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
return mDecoder->IsShutdown();
}
// Return true if any of the active decoders contain data for the given time
bool DecodersContainTime(double aTime);
// Return true if all of the active tracks contain data for the specified time.
bool TrackBuffersContainTime(double aTime);
// Mark the reader to indicate that EndOfStream has been called on our MediaSource
void Ended();
@ -94,27 +98,24 @@ public:
bool IsEnded();
private:
enum SwitchType {
SWITCH_OPTIONAL,
SWITCH_FORCED
};
bool SwitchReaders(SwitchType aType);
bool SwitchAudioReader(MediaDecoderReader* aTargetReader);
bool SwitchVideoReader(MediaDecoderReader* aTargetReader);
// These are read and written on the decode task queue threads.
int64_t mTimeThreshold;
bool mDropAudioBeforeThreshold;
bool mDropVideoBeforeThreshold;
nsTArray<nsRefPtr<SourceBufferDecoder>> mPendingDecoders;
nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
bool SwitchAudioReader(double aTarget);
bool SwitchVideoReader(double aTarget);
nsRefPtr<MediaDecoderReader> mAudioReader;
nsRefPtr<MediaDecoderReader> mVideoReader;
nsTArray<nsRefPtr<TrackBuffer>> mTrackBuffers;
nsRefPtr<TrackBuffer> mAudioTrack;
nsRefPtr<TrackBuffer> mVideoTrack;
// These are read and written on the decode task queue threads.
int64_t mLastAudioTime;
int64_t mLastVideoTime;
int64_t mTimeThreshold;
bool mDropAudioBeforeThreshold;
bool mDropVideoBeforeThreshold;
bool mEnded;
// For a seek to complete we need to send a sample with

View File

@ -6,14 +6,14 @@
#include "SourceBuffer.h"
#include "AsyncEventRunner.h"
#include "DecoderTraits.h"
#include "MediaDecoder.h"
#include "MediaSourceDecoder.h"
#include "MediaSourceUtils.h"
#include "SourceBufferResource.h"
#include "TrackBuffer.h"
#include "VideoUtils.h"
#include "WebMBufferedParser.h"
#include "mozilla/Endian.h"
#include "mozilla/ErrorResult.h"
#include "mozilla/FloatingPoint.h"
#include "mozilla/Preferences.h"
#include "mozilla/dom/MediaSourceBinding.h"
#include "mozilla/dom/TimeRanges.h"
#include "mp4_demuxer/BufferStream.h"
@ -23,10 +23,6 @@
#include "nsIRunnable.h"
#include "nsThreadUtils.h"
#include "prlog.h"
#include "SourceBufferDecoder.h"
#include "mozilla/Preferences.h"
#include "WebMBufferedParser.h"
struct JSContext;
class JSObject;
@ -335,19 +331,8 @@ SourceBuffer::GetBuffered(ErrorResult& aRv)
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return nullptr;
}
double highestEndTime = 0;
nsRefPtr<TimeRanges> ranges = new TimeRanges();
// TODO: Need to adjust mDecoders so it only tracks active decoders.
// Once we have an abstraction for track buffers, this needs to report the
// intersection of buffered ranges within those track buffers.
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
nsRefPtr<TimeRanges> r = new TimeRanges();
mDecoders[i]->GetBuffered(r);
if (r->Length() > 0) {
highestEndTime = std::max(highestEndTime, r->GetEndTime());
ranges->Union(r);
}
}
double highestEndTime = mTrackBuffer->Buffered(ranges);
if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended) {
// Set the end time on the last range to highestEndTime by adding a
// new range spanning the current end time to highestEndTime, which
@ -432,7 +417,7 @@ SourceBuffer::Abort(ErrorResult& aRv)
mAppendWindowEnd = PositiveInfinity<double>();
MSE_DEBUG("SourceBuffer(%p)::Abort() Discarding decoder", this);
DiscardDecoder();
mTrackBuffer->DiscardDecoder();
}
void
@ -464,8 +449,10 @@ SourceBuffer::Detach()
{
MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("SourceBuffer(%p)::Detach", this);
Ended();
DiscardDecoder();
if (mTrackBuffer) {
mTrackBuffer->Detach();
}
mTrackBuffer = nullptr;
mMediaSource = nullptr;
}
@ -473,36 +460,34 @@ void
SourceBuffer::Ended()
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(IsAttached());
MSE_DEBUG("SourceBuffer(%p)::Ended", this);
if (mDecoder) {
mDecoder->GetResource()->Ended();
}
mTrackBuffer->DiscardDecoder();
}
SourceBuffer::SourceBuffer(MediaSource* aMediaSource, const nsACString& aType)
: DOMEventTargetHelper(aMediaSource->GetParentObject())
, mMediaSource(aMediaSource)
, mType(aType)
, mLastParsedTimestamp(UnspecifiedNaN<double>())
, mAppendWindowStart(0)
, mAppendWindowEnd(PositiveInfinity<double>())
, mTimestampOffset(0)
, mAppendMode(SourceBufferAppendMode::Segments)
, mUpdating(false)
, mDecoderInitialized(false)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aMediaSource);
mParser = ContainerParser::CreateForMIMEType(aType);
MSE_DEBUG("SourceBuffer(%p)::SourceBuffer: Creating initial decoder, mParser=%p", this, mParser.get());
InitNewDecoder();
mTrackBuffer = new TrackBuffer(aMediaSource->GetDecoder(), aType);
MSE_DEBUG("SourceBuffer(%p)::SourceBuffer: Create mParser=%p mTrackBuffer=%p",
this, mParser.get(), mTrackBuffer.get());
}
SourceBuffer::~SourceBuffer()
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!mMediaSource);
MSE_DEBUG("SourceBuffer(%p)::~SourceBuffer", this);
DiscardDecoder();
}
MediaSource*
@ -533,37 +518,6 @@ SourceBuffer::QueueAsyncSimpleEvent(const char* aName)
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
}
bool
SourceBuffer::InitNewDecoder()
{
MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("SourceBuffer(%p)::InitNewDecoder", this);
MOZ_ASSERT(!mDecoder);
MediaSourceDecoder* parentDecoder = mMediaSource->GetDecoder();
nsRefPtr<SourceBufferDecoder> decoder = parentDecoder->CreateSubDecoder(mType);
if (!decoder) {
return false;
}
mDecoder = decoder;
mDecoderInitialized = false;
mDecoders.AppendElement(mDecoder);
return true;
}
void
SourceBuffer::DiscardDecoder()
{
MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("SourceBuffer(%p)::DiscardDecoder mDecoder=%p", this, mDecoder.get());
if (mDecoder) {
mDecoder->SetDiscarded();
}
mDecoder = nullptr;
mDecoderInitialized = false;
// XXX: Parser reset may be required?
mLastParsedTimestamp = UnspecifiedNaN<double>();
}
void
SourceBuffer::StartUpdating()
{
@ -610,20 +564,13 @@ SourceBuffer::AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aR
// TODO: Run buffer append algorithm asynchronously (would call StopUpdating()).
if (mParser->IsInitSegmentPresent(aData, aLength)) {
MSE_DEBUG("SourceBuffer(%p)::AppendData: New initialization segment.", this);
if (mDecoderInitialized) {
// Existing decoder has been used, time for a new one.
DiscardDecoder();
}
// If we've got a decoder here, it's not initialized, so we can use it
// rather than creating a new one.
if (!mDecoder && !InitNewDecoder()) {
mTrackBuffer->DiscardDecoder();
if (!mTrackBuffer->NewDecoder()) {
aRv.Throw(NS_ERROR_FAILURE); // XXX: Review error handling.
return;
}
MSE_DEBUG("SourceBuffer(%p)::AppendData: Decoder marked as initialized.", this);
mDecoderInitialized = true;
} else if (!mDecoderInitialized) {
} else if (!mTrackBuffer->HasInitSegment()) {
MSE_DEBUG("SourceBuffer(%p)::AppendData: Non-init segment appended during initialization.");
Optional<MediaSourceEndOfStreamError> decodeError(MediaSourceEndOfStreamError::Decode);
ErrorResult dummy;
@ -633,37 +580,33 @@ SourceBuffer::AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aR
}
double start, end;
if (mParser->ParseStartAndEndTimestamps(aData, aLength, start, end)) {
double lastStart, lastEnd;
mTrackBuffer->LastTimestamp(lastStart, lastEnd);
if (mParser->IsMediaSegmentPresent(aData, aLength) &&
(start < mLastParsedTimestamp || start - mLastParsedTimestamp > 0.1)) {
MSE_DEBUG("SourceBuffer(%p)::AppendData: Data (%f, %f) overlaps %f.",
this, start, end, mLastParsedTimestamp);
(start < lastEnd || start - lastEnd > 0.1)) {
MSE_DEBUG("SourceBuffer(%p)::AppendData: Data last=[%f, %f] overlaps [%f, %f]",
this, lastStart, lastEnd, start, end);
// This data is earlier in the timeline than data we have already
// processed, so we must create a new decoder to handle the decoding.
DiscardDecoder();
mTrackBuffer->DiscardDecoder();
// If we've got a decoder here, it's not initialized, so we can use it
// rather than creating a new one.
if (!InitNewDecoder()) {
if (!mTrackBuffer->NewDecoder()) {
aRv.Throw(NS_ERROR_FAILURE); // XXX: Review error handling.
return;
}
MSE_DEBUG("SourceBuffer(%p)::AppendData: Decoder marked as initialized.", this);
mDecoderInitialized = true;
const nsTArray<uint8_t>& initData = mParser->InitData();
mDecoder->NotifyDataArrived(reinterpret_cast<const char*>(initData.Elements()),
initData.Length(),
0);
mDecoder->GetResource()->AppendData(initData.Elements(), initData.Length());
mTrackBuffer->AppendData(initData.Elements(), initData.Length());
mTrackBuffer->SetLastStartTimestamp(start);
}
mLastParsedTimestamp = end;
MSE_DEBUG("SourceBuffer(%p)::AppendData: Segment start=%f end=%f", this, start, end);
mTrackBuffer->SetLastEndTimestamp(end);
MSE_DEBUG("SourceBuffer(%p)::AppendData: Segment last=[%f, %f] [%f, %f]",
this, lastStart, lastEnd, start, end);
}
// XXX: For future reference: NDA call must run on the main thread.
mDecoder->NotifyDataArrived(reinterpret_cast<const char*>(aData),
aLength,
mDecoder->GetResource()->GetLength());
mDecoder->GetResource()->AppendData(aData, aLength);
mTrackBuffer->AppendData(aData, aLength);
// Eviction uses a byte threshold. If the buffer is greater than the
// number of bytes then data is evicted. The time range for this
@ -673,7 +616,7 @@ SourceBuffer::AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aR
// TODO: Make the eviction threshold smaller for audio-only streams.
// TODO: Drive evictions off memory pressure notifications.
const uint32_t evict_threshold = 75 * (1 << 20);
bool evicted = mDecoder->GetResource()->EvictData(evict_threshold);
bool evicted = mTrackBuffer->EvictData(evict_threshold);
if (evicted) {
MSE_DEBUG("SourceBuffer(%p)::AppendData Evict; current buffered start=%f",
this, GetBufferedStart());
@ -714,20 +657,13 @@ SourceBuffer::Evict(double aStart, double aEnd)
{
MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("SourceBuffer(%p)::Evict(aStart=%f, aEnd=%f)", this, aStart, aEnd);
if (!mDecoder) {
return;
}
double currentTime = mMediaSource->GetDecoder()->GetCurrentTime();
double evictTime = aEnd;
const double safety_threshold = 5;
if (currentTime + safety_threshold >= evictTime) {
evictTime -= safety_threshold;
}
int64_t endOffset = mDecoder->ConvertToByteOffset(evictTime);
if (endOffset > 0) {
mDecoder->GetResource()->EvictBefore(endOffset);
}
MSE_DEBUG("SourceBuffer(%p)::Evict offset=%lld", this, endOffset);
mTrackBuffer->EvictBefore(evictTime);
}
NS_IMPL_CYCLE_COLLECTION_INHERITED(SourceBuffer, DOMEventTargetHelper,

View File

@ -7,14 +7,13 @@
#ifndef mozilla_dom_SourceBuffer_h_
#define mozilla_dom_SourceBuffer_h_
#include "MediaDecoderReader.h"
#include "MediaSource.h"
#include "js/RootingAPI.h"
#include "mozilla/Assertions.h"
#include "mozilla/Attributes.h"
#include "mozilla/DOMEventTargetHelper.h"
#include "mozilla/dom/SourceBufferBinding.h"
#include "mozilla/dom/TypedArray.h"
#include "mozilla/DOMEventTargetHelper.h"
#include "mozilla/mozalloc.h"
#include "nsAutoPtr.h"
#include "nsCOMPtr.h"
@ -31,8 +30,7 @@ namespace mozilla {
class ContainerParser;
class ErrorResult;
class SourceBufferResource;
class SourceBufferDecoder;
class TrackBuffer;
template <typename T> class AsyncEventRunner;
namespace dom {
@ -139,10 +137,7 @@ private:
nsAutoPtr<ContainerParser> mParser;
double mLastParsedTimestamp;
nsRefPtr<SourceBufferDecoder> mDecoder;
nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
nsRefPtr<TrackBuffer> mTrackBuffer;
double mAppendWindowStart;
double mAppendWindowEnd;
@ -151,8 +146,6 @@ private:
SourceBufferAppendMode mAppendMode;
bool mUpdating;
bool mDecoderInitialized;
};
} // namespace dom

View File

@ -38,7 +38,6 @@ SourceBufferDecoder::SourceBufferDecoder(MediaResource* aResource,
, mParentDecoder(aParentDecoder)
, mReader(nullptr)
, mMediaDuration(-1)
, mDiscarded(false)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_COUNT_CTOR(SourceBufferDecoder);
@ -147,6 +146,10 @@ SourceBufferDecoder::OnStateMachineThread() const
bool
SourceBufferDecoder::OnDecodeThread() const
{
// During initialization we run on our TrackBuffer's task queue.
if (mTaskQueue) {
return mTaskQueue->IsCurrentThreadIn();
}
return mParentDecoder->OnDecodeThread();
}

View File

@ -8,9 +8,10 @@
#define MOZILLA_SOURCEBUFFERDECODER_H_
#include "AbstractMediaDecoder.h"
#include "MediaDecoderReader.h"
#include "SourceBufferResource.h"
#include "mozilla/Attributes.h"
#include "mozilla/ReentrantMonitor.h"
#include "SourceBufferResource.h"
namespace mozilla {
@ -74,33 +75,30 @@ public:
return mReader;
}
void SetTaskQueue(MediaTaskQueue* aTaskQueue)
{
MOZ_ASSERT((!mTaskQueue && aTaskQueue) || (mTaskQueue && !aTaskQueue));
mTaskQueue = aTaskQueue;
}
// Given a time convert it into an approximate byte offset from the
// cached data. Returns -1 if no such value is computable.
int64_t ConvertToByteOffset(double aTime);
bool IsDiscarded()
{
return mDiscarded;
}
void SetDiscarded()
{
GetResource()->Ended();
mDiscarded = true;
}
// Returns true if the data buffered by this decoder contains the given time.
bool ContainsTime(double aTime);
private:
virtual ~SourceBufferDecoder();
// Our TrackBuffer's task queue, this is only non-null during initialization.
RefPtr<MediaTaskQueue> mTaskQueue;
nsRefPtr<MediaResource> mResource;
AbstractMediaDecoder* mParentDecoder;
nsRefPtr<MediaDecoderReader> mReader;
int64_t mMediaDuration;
bool mDiscarded;
};
} // namespace mozilla

View File

@ -0,0 +1,336 @@
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "TrackBuffer.h"
#include "MediaSourceDecoder.h"
#include "SharedThreadPool.h"
#include "MediaTaskQueue.h"
#include "SourceBufferDecoder.h"
#include "SourceBufferResource.h"
#include "VideoUtils.h"
#include "mozilla/FloatingPoint.h"
#include "mozilla/dom/MediaSourceBinding.h"
#include "mozilla/dom/TimeRanges.h"
#include "nsError.h"
#include "nsIRunnable.h"
#include "nsThreadUtils.h"
#include "prlog.h"
struct JSContext;
class JSObject;
#ifdef PR_LOGGING
extern PRLogModuleInfo* GetMediaSourceLog();
extern PRLogModuleInfo* GetMediaSourceAPILog();
#define MSE_DEBUG(...) PR_LOG(GetMediaSourceLog(), PR_LOG_DEBUG, (__VA_ARGS__))
#define MSE_DEBUGV(...) PR_LOG(GetMediaSourceLog(), PR_LOG_DEBUG+1, (__VA_ARGS__))
#define MSE_API(...) PR_LOG(GetMediaSourceAPILog(), PR_LOG_DEBUG, (__VA_ARGS__))
#else
#define MSE_DEBUG(...)
#define MSE_DEBUGV(...)
#define MSE_API(...)
#endif
namespace mozilla {
TrackBuffer::TrackBuffer(MediaSourceDecoder* aParentDecoder, const nsACString& aType)
: mParentDecoder(aParentDecoder)
, mType(aType)
, mLastStartTimestamp(0)
, mLastEndTimestamp(UnspecifiedNaN<double>())
, mHasAudio(false)
, mHasVideo(false)
{
MOZ_COUNT_CTOR(TrackBuffer);
mTaskQueue = new MediaTaskQueue(GetMediaDecodeThreadPool());
aParentDecoder->AddTrackBuffer(this);
}
TrackBuffer::~TrackBuffer()
{
MOZ_COUNT_DTOR(TrackBuffer);
}
class ReleaseDecoderTask : public nsRunnable {
public:
explicit ReleaseDecoderTask(nsRefPtr<SourceBufferDecoder> aDecoder)
{
mDecoders.AppendElement(aDecoder);
}
explicit ReleaseDecoderTask(nsTArray<nsRefPtr<SourceBufferDecoder>>& aDecoders)
{
mDecoders.SwapElements(aDecoders);
}
NS_IMETHOD Run() MOZ_OVERRIDE MOZ_FINAL {
mDecoders.Clear();
return NS_OK;
}
private:
nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
};
void
TrackBuffer::Shutdown()
{
// Shutdown waits for any pending events, which may require the monitor,
// so we must not hold the monitor during this call.
mTaskQueue->Shutdown();
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
DiscardDecoder();
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
mDecoders[i]->GetReader()->Shutdown();
}
NS_DispatchToMainThread(new ReleaseDecoderTask(mDecoders));
MOZ_ASSERT(mDecoders.IsEmpty());
}
void
TrackBuffer::AppendData(const uint8_t* aData, uint32_t aLength)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mCurrentDecoder);
SourceBufferResource* resource = mCurrentDecoder->GetResource();
// XXX: For future reference: NDA call must run on the main thread.
mCurrentDecoder->NotifyDataArrived(reinterpret_cast<const char*>(aData),
aLength, resource->GetLength());
resource->AppendData(aData, aLength);
}
bool
TrackBuffer::EvictData(uint32_t aThreshold)
{
MOZ_ASSERT(NS_IsMainThread());
// XXX Call EvictData on mDecoders?
return mCurrentDecoder->GetResource()->EvictData(aThreshold);
}
void
TrackBuffer::EvictBefore(double aTime)
{
MOZ_ASSERT(NS_IsMainThread());
// XXX Call EvictBefore on mDecoders?
int64_t endOffset = mCurrentDecoder->ConvertToByteOffset(aTime);
if (endOffset > 0) {
mCurrentDecoder->GetResource()->EvictBefore(endOffset);
}
MSE_DEBUG("TrackBuffer(%p)::EvictBefore offset=%lld", this, endOffset);
}
double
TrackBuffer::Buffered(dom::TimeRanges* aRanges)
{
MOZ_ASSERT(NS_IsMainThread());
// XXX check default if mDecoders empty?
double highestEndTime = 0;
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
nsRefPtr<dom::TimeRanges> r = new dom::TimeRanges();
mDecoders[i]->GetBuffered(r);
if (r->Length() > 0) {
highestEndTime = std::max(highestEndTime, r->GetEndTime());
aRanges->Union(r);
}
}
return highestEndTime;
}
bool
TrackBuffer::NewDecoder()
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!mCurrentDecoder && mParentDecoder);
nsRefPtr<SourceBufferDecoder> decoder = mParentDecoder->CreateSubDecoder(mType);
if (!decoder) {
return false;
}
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
mCurrentDecoder = decoder;
mLastStartTimestamp = 0;
mLastEndTimestamp = UnspecifiedNaN<double>();
return QueueInitializeDecoder(decoder);
}
bool
TrackBuffer::QueueInitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder)
{
RefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<nsRefPtr<SourceBufferDecoder>>(this,
&TrackBuffer::InitializeDecoder,
aDecoder);
aDecoder->SetTaskQueue(mTaskQueue);
if (NS_FAILED(mTaskQueue->Dispatch(task))) {
MSE_DEBUG("MediaSourceReader(%p): Failed to enqueue decoder initialization task", this);
return false;
}
return true;
}
void
TrackBuffer::InitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder)
{
// ReadMetadata may block the thread waiting on data, so it must not be
// called with the monitor held.
mParentDecoder->GetReentrantMonitor().AssertNotCurrentThreadIn();
MediaDecoderReader* reader = aDecoder->GetReader();
MSE_DEBUG("TrackBuffer(%p): Initializing subdecoder %p reader %p",
this, aDecoder.get(), reader);
MediaInfo mi;
nsAutoPtr<MetadataTags> tags; // TODO: Handle metadata.
nsresult rv = reader->ReadMetadata(&mi, getter_Transfers(tags));
reader->SetIdle();
if (NS_FAILED(rv) || (!mi.HasVideo() && !mi.HasAudio())) {
// XXX: Need to signal error back to owning SourceBuffer.
MSE_DEBUG("TrackBuffer(%p): Reader %p failed to initialize rv=%x audio=%d video=%d",
this, reader, rv, mi.HasAudio(), mi.HasVideo());
aDecoder->SetTaskQueue(nullptr);
NS_DispatchToMainThread(new ReleaseDecoderTask(aDecoder));
return;
}
if (mi.HasVideo()) {
MSE_DEBUG("TrackBuffer(%p): Reader %p video resolution=%dx%d",
this, reader, mi.mVideo.mDisplay.width, mi.mVideo.mDisplay.height);
}
if (mi.HasAudio()) {
MSE_DEBUG("TrackBuffer(%p): Reader %p audio sampleRate=%d channels=%d",
this, reader, mi.mAudio.mRate, mi.mAudio.mChannels);
}
MSE_DEBUG("TrackBuffer(%p): Reader %p activated", this, reader);
RegisterDecoder(aDecoder);
}
void
TrackBuffer::RegisterDecoder(nsRefPtr<SourceBufferDecoder> aDecoder)
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
aDecoder->SetTaskQueue(nullptr);
const MediaInfo& info = aDecoder->GetReader()->GetMediaInfo();
// Initialize the track info since this is the first decoder.
if (mDecoders.IsEmpty()) {
mHasAudio = info.HasAudio();
mHasVideo = info.HasVideo();
mParentDecoder->OnTrackBufferConfigured(this);
} else if ((info.HasAudio() && !mHasAudio) || (info.HasVideo() && !mHasVideo)) {
MSE_DEBUG("TrackBuffer(%p)::RegisterDecoder with mismatched audio/video tracks", this);
}
mDecoders.AppendElement(aDecoder);
}
void
TrackBuffer::DiscardDecoder()
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
if (mCurrentDecoder) {
mCurrentDecoder->GetResource()->Ended();
}
mCurrentDecoder = nullptr;
}
void
TrackBuffer::Detach()
{
MOZ_ASSERT(NS_IsMainThread());
if (mCurrentDecoder) {
DiscardDecoder();
}
}
bool
TrackBuffer::HasInitSegment()
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
return mHasAudio || mHasVideo;
}
void
TrackBuffer::LastTimestamp(double& aStart, double& aEnd)
{
MOZ_ASSERT(NS_IsMainThread());
aStart = mLastStartTimestamp;
aEnd = mLastEndTimestamp;
}
void
TrackBuffer::SetLastStartTimestamp(double aStart)
{
MOZ_ASSERT(NS_IsMainThread());
mLastStartTimestamp = aStart;
}
void
TrackBuffer::SetLastEndTimestamp(double aEnd)
{
MOZ_ASSERT(NS_IsMainThread());
mLastEndTimestamp = aEnd;
}
bool
TrackBuffer::ContainsTime(double aTime)
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
nsRefPtr<dom::TimeRanges> r = new dom::TimeRanges();
mDecoders[i]->GetBuffered(r);
if (r->Find(aTime) != dom::TimeRanges::NoIndex) {
return true;
}
}
return false;
}
bool
TrackBuffer::HasAudio()
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
return mHasAudio;
}
bool
TrackBuffer::HasVideo()
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
return mHasVideo;
}
void
TrackBuffer::BreakCycles()
{
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
mDecoders[i]->GetReader()->BreakCycles();
}
mDecoders.Clear();
}
void
TrackBuffer::ResetDecode()
{
for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
mDecoders[i]->GetReader()->ResetDecode();
}
}
const nsTArray<nsRefPtr<SourceBufferDecoder>>&
TrackBuffer::Decoders()
{
// XXX assert OnDecodeThread
return mDecoders;
}
} // namespace mozilla

View File

@ -0,0 +1,129 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MOZILLA_TRACKBUFFER_H_
#define MOZILLA_TRACKBUFFER_H_
#include "SourceBufferDecoder.h"
#include "mozilla/Assertions.h"
#include "mozilla/Attributes.h"
#include "mozilla/mozalloc.h"
#include "nsCOMPtr.h"
#include "nsString.h"
#include "nscore.h"
namespace mozilla {
class MediaSourceDecoder;
namespace dom {
class TimeRanges;
} // namespace dom
class TrackBuffer MOZ_FINAL {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TrackBuffer);
TrackBuffer(MediaSourceDecoder* aParentDecoder, const nsACString& aType);
void Shutdown();
// Append data to the current decoder. Also responsible for calling
// NotifyDataArrived on the decoder to keep buffered range computation up
// to date.
void AppendData(const uint8_t* aData, uint32_t aLength);
bool EvictData(uint32_t aThreshold);
void EvictBefore(double aTime);
// Returns the highest end time of all of the buffered ranges in the
// decoders managed by this TrackBuffer, and returns the union of the
// decoders buffered ranges in aRanges.
double Buffered(dom::TimeRanges* aRanges);
// Create a new decoder, set mCurrentDecoder to the new decoder, and queue
// the decoder for initialization. The decoder is not considered
// initialized until it is added to mDecoders.
bool NewDecoder();
// Mark the current decoder's resource as ended, clear mCurrentDecoder and
// reset mLast{Start,End}Timestamp.
void DiscardDecoder();
void Detach();
// Returns true if an init segment has been appended *and* the decoder
// using that init segment has successfully initialized.
bool HasInitSegment();
// Query and update mLast{Start,End}Timestamp.
void LastTimestamp(double& aStart, double& aEnd);
void SetLastStartTimestamp(double aStart);
void SetLastEndTimestamp(double aEnd);
// Returns true if any of the decoders managed by this track buffer
// contain aTime in their buffered ranges.
bool ContainsTime(double aTime);
// Returns true if this TrackBuffer has an audio or video track,
// respectively.
bool HasAudio();
bool HasVideo();
void BreakCycles();
// Call ResetDecode() on each decoder in mDecoders.
void ResetDecode();
// Returns a reference to mDecoders, used by MediaSourceReader to select
// decoders.
// TODO: Refactor to a clenaer interface between TrackBuffer and MediaSourceReader.
const nsTArray<nsRefPtr<SourceBufferDecoder>>& Decoders();
private:
~TrackBuffer();
// Queue execution of InitializeDecoder on mTaskQueue.
bool QueueInitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder);
// Runs decoder initialization including calling ReadMetadata. Runs as an
// event on the decode thread pool.
void InitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder);
// Adds a successfully initialized decoder to mDecoders and (if it's the
// first decoder initialized), initializes mHasAudio/mHasVideo. Called
// from the decode thread pool.
void RegisterDecoder(nsRefPtr<SourceBufferDecoder> aDecoder);
// A task queue using the shared media thread pool. Used exclusively to
// initialize (i.e. call ReadMetadata on) decoders as they are created via
// NewDecoder.
RefPtr<MediaTaskQueue> mTaskQueue;
// All of the initialized decoders managed by this TrackBuffer. Access
// protected by mParentDecoder's monitor.
nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
// The decoder that the owning SourceBuffer is currently appending data to.
nsRefPtr<SourceBufferDecoder> mCurrentDecoder;
nsRefPtr<MediaSourceDecoder> mParentDecoder;
const nsCString mType;
// The last start and end timestamps added to the TrackBuffer via
// AppendData. Accessed on the main thread only.
double mLastStartTimestamp;
double mLastEndTimestamp;
// Set when the first decoder used by this TrackBuffer is initialized.
// Protected by mParentDecoder's monitor.
bool mHasAudio;
bool mHasVideo;
};
} // namespace mozilla
#endif /* MOZILLA_TRACKBUFFER_H_ */

View File

@ -25,6 +25,7 @@ UNIFIED_SOURCES += [
'SourceBufferDecoder.cpp',
'SourceBufferList.cpp',
'SourceBufferResource.cpp',
'TrackBuffer.cpp',
]
FAIL_ON_WARNINGS = True