Bug 1163445: Part5. Replace dom::TimeRanges with TimeIntervals object. r=mattwoodrow

This commit is contained in:
Jean-Yves Avenard 2015-05-18 16:15:47 +10:00
parent f85b6e4435
commit cb58d9f184
47 changed files with 351 additions and 444 deletions

View File

@ -1484,10 +1484,12 @@ HTMLMediaElement::Seek(double aTime,
// Clamp the seek target to inside the seekable ranges.
nsRefPtr<dom::TimeRanges> seekable = new dom::TimeRanges();
if (NS_FAILED(mDecoder->GetSeekable(seekable))) {
media::TimeIntervals seekableIntervals = mDecoder->GetSeekable();
if (seekableIntervals.IsInvalid()) {
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return;
}
seekableIntervals.ToTimeRanges(seekable);
uint32_t length = 0;
seekable->GetLength(&length);
if (!length) {
@ -1601,9 +1603,8 @@ HTMLMediaElement::Seekable() const
{
nsRefPtr<TimeRanges> ranges = new TimeRanges();
if (mDecoder && mReadyState > nsIDOMHTMLMediaElement::HAVE_NOTHING) {
mDecoder->GetSeekable(ranges);
mDecoder->GetSeekable().ToTimeRanges(ranges);
}
ranges->Normalize();
return ranges.forget();
}
@ -4169,12 +4170,12 @@ HTMLMediaElement::Buffered() const
nsRefPtr<TimeRanges> ranges = new TimeRanges();
if (mReadyState > nsIDOMHTMLMediaElement::HAVE_NOTHING) {
if (mDecoder) {
// If GetBuffered fails we ignore the error result and just return the
// time ranges we found up till the error.
mDecoder->GetBuffered(ranges);
media::TimeIntervals buffered = mDecoder->GetBuffered();
if (!buffered.IsInvalid()) {
buffered.ToTimeRanges(ranges);
}
}
}
ranges->Normalize();
return ranges.forget();
}

View File

@ -513,8 +513,8 @@ public:
void Shift(const T& aOffset)
{
for (auto& interval : mIntervals) {
interval.mStart += aOffset;
interval.mEnd += aOffset;
interval.mStart = interval.mStart + aOffset;
interval.mEnd = interval.mEnd + aOffset;
}
}

View File

@ -12,7 +12,6 @@
#include "nsTArray.h"
#include "VideoUtils.h"
#include "MediaDecoderStateMachine.h"
#include "mozilla/dom/TimeRanges.h"
#include "ImageContainer.h"
#include "MediaResource.h"
#include "nsError.h"
@ -1292,22 +1291,21 @@ bool MediaDecoder::IsMediaSeekable()
return mMediaSeekable;
}
nsresult MediaDecoder::GetSeekable(dom::TimeRanges* aSeekable)
media::TimeIntervals MediaDecoder::GetSeekable()
{
double initialTime = 0.0;
// We can seek in buffered range if the media is seekable. Also, we can seek
// in unbuffered ranges if the transport level is seekable (local file or the
// server supports range requests, etc.)
if (!IsMediaSeekable()) {
return NS_OK;
return media::TimeIntervals();
} else if (!IsTransportSeekable()) {
return GetBuffered(aSeekable);
return GetBuffered();
} else {
double end = IsInfinite() ? std::numeric_limits<double>::infinity()
: initialTime + GetDuration();
aSeekable->Add(initialTime, end);
return NS_OK;
return media::TimeIntervals(
media::TimeInterval(media::TimeUnit::FromMicroseconds(0),
IsInfinite() ?
media::TimeUnit::FromInfinity() :
media::TimeUnit::FromSeconds(GetDuration())));
}
}
@ -1453,9 +1451,9 @@ void MediaDecoder::Invalidate()
// Constructs the time ranges representing what segments of the media
// are buffered and playable.
nsresult MediaDecoder::GetBuffered(dom::TimeRanges* aBuffered) {
NS_ENSURE_TRUE(mDecoderStateMachine && !mShuttingDown, NS_ERROR_FAILURE);
return mDecoderStateMachine->GetBuffered(aBuffered);
media::TimeIntervals MediaDecoder::GetBuffered() {
NS_ENSURE_TRUE(mDecoderStateMachine && !mShuttingDown, media::TimeIntervals::Invalid());
return mDecoderStateMachine->GetBuffered();
}
size_t MediaDecoder::SizeOfVideoQueue() {

View File

@ -203,16 +203,11 @@ destroying the MediaDecoder object.
#ifdef MOZ_EME
#include "mozilla/CDMProxy.h"
#endif
#include "TimeUnits.h"
class nsIStreamListener;
class nsIPrincipal;
namespace mozilla {
namespace dom {
class TimeRanges;
}
}
namespace mozilla {
class VideoFrameContainer;
@ -526,7 +521,7 @@ public:
virtual bool IsTransportSeekable() override;
// Return the time ranges that can be seeked into.
virtual nsresult GetSeekable(dom::TimeRanges* aSeekable);
virtual media::TimeIntervals GetSeekable();
// Set the end time of the media resource. When playback reaches
// this point the media pauses. aTime is in seconds.
@ -584,7 +579,7 @@ public:
// Constructs the time ranges representing what segments of the media
// are buffered and playable.
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered);
virtual media::TimeIntervals GetBuffered();
// Returns the size, in bytes, of the heap memory used by the currently
// queued decoded video and audio data.

View File

@ -147,8 +147,8 @@ MediaDecoderReader::SetStartTime(int64_t aStartTime)
mStartTime = aStartTime;
}
nsresult
MediaDecoderReader::GetBuffered(mozilla::dom::TimeRanges* aBuffered)
media::TimeIntervals
MediaDecoderReader::GetBuffered()
{
AutoPinned<MediaResource> stream(mDecoder->GetResource());
int64_t durationUs = 0;
@ -156,8 +156,7 @@ MediaDecoderReader::GetBuffered(mozilla::dom::TimeRanges* aBuffered)
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
durationUs = mDecoder->GetMediaDuration();
}
GetEstimatedBufferedTimeRanges(stream, durationUs, aBuffered);
return NS_OK;
return GetEstimatedBufferedTimeRanges(stream, durationUs);
}
int64_t

View File

@ -12,13 +12,10 @@
#include "MediaPromise.h"
#include "MediaQueue.h"
#include "AudioCompactor.h"
#include "TimeUnits.h"
namespace mozilla {
namespace dom {
class TimeRanges;
}
class MediaDecoderReader;
class SharedDecoderManager;
@ -226,7 +223,7 @@ public:
// The OggReader relies on this base implementation not performing I/O,
// since in FirefoxOS we can't do I/O on the main thread, where this is
// called.
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered);
virtual media::TimeIntervals GetBuffered();
virtual int64_t ComputeStartTime(const VideoData* aVideo, const AudioData* aAudio);

View File

@ -21,7 +21,7 @@
#include "mozilla/MathAlgorithms.h"
#include "mozilla/mozalloc.h"
#include "VideoUtils.h"
#include "mozilla/dom/TimeRanges.h"
#include "TimeUnits.h"
#include "nsDeque.h"
#include "AudioSegment.h"
#include "VideoSegment.h"
@ -1713,17 +1713,13 @@ void MediaDecoderStateMachine::NotifyDataArrived(const char* aBuffer,
//
// Make sure to only do this if we have a start time, otherwise the reader
// doesn't know how to compute GetBuffered.
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
if (mDecoder->IsInfinite() && (mStartTime != -1) &&
NS_SUCCEEDED(mDecoder->GetBuffered(buffered)))
{
uint32_t length = 0;
buffered->GetLength(&length);
if (length) {
double end = 0;
buffered->End(length - 1, &end);
media::TimeIntervals buffered{mDecoder->GetBuffered()};
if (mDecoder->IsInfinite() && (mStartTime != -1) && !buffered.IsInvalid()) {
bool exists;
media::TimeUnit end{buffered.GetEnd(&exists)};
if (exists) {
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
mEndTime = std::max<int64_t>(mEndTime, end * USECS_PER_S);
mEndTime = std::max<int64_t>(mEndTime, end.ToMicroseconds());
}
}
}
@ -2149,9 +2145,10 @@ bool MediaDecoderStateMachine::HasLowUndecodedData(int64_t aUsecs)
return false;
}
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
nsresult rv = mReader->GetBuffered(buffered.get());
NS_ENSURE_SUCCESS(rv, false);
media::TimeIntervals buffered{mReader->GetBuffered()};
if (buffered.IsInvalid()) {
return false;
}
int64_t endOfDecodedVideoData = INT64_MAX;
if (HasVideo() && !VideoQueue().AtEndOfStream()) {
@ -2165,10 +2162,13 @@ bool MediaDecoderStateMachine::HasLowUndecodedData(int64_t aUsecs)
endOfDecodedAudioData = mDecodedAudioEndTime;
}
int64_t endOfDecodedData = std::min(endOfDecodedVideoData, endOfDecodedAudioData);
return endOfDecodedData != INT64_MAX &&
!buffered->Contains(static_cast<double>(endOfDecodedData) / USECS_PER_S,
static_cast<double>(std::min(endOfDecodedData + aUsecs, GetDuration())) / USECS_PER_S);
if (GetDuration() < endOfDecodedData) {
// Our duration is not up to date. No point buffering.
return false;
}
media::TimeInterval interval(media::TimeUnit::FromMicroseconds(endOfDecodedData),
media::TimeUnit::FromMicroseconds(std::min(endOfDecodedData + aUsecs, GetDuration())));
return endOfDecodedData != INT64_MAX && !buffered.Contains(interval);
}
void

View File

@ -277,16 +277,16 @@ public:
return mState == DECODER_STATE_SEEKING;
}
nsresult GetBuffered(dom::TimeRanges* aBuffered) {
media::TimeIntervals GetBuffered() {
// It's possible for JS to query .buffered before we've determined the start
// time from metadata, in which case the reader isn't ready to be asked this
// question.
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
if (mStartTime < 0) {
return NS_OK;
return media::TimeIntervals();
}
return mReader->GetBuffered(aBuffered);
return mReader->GetBuffered();
}
size_t SizeOfVideoQueue() {

View File

@ -5,7 +5,6 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/dom/HTMLMediaElement.h"
#include "mozilla/dom/TimeRanges.h"
#include "mozilla/Preferences.h"
#include "nsPrintfCString.h"
#include "nsSize.h"
@ -1245,8 +1244,8 @@ MediaFormatReader::GetEvictionOffset(double aTime)
return std::min(audioOffset, videoOffset);
}
nsresult
MediaFormatReader::GetBuffered(dom::TimeRanges* aBuffered)
media::TimeIntervals
MediaFormatReader::GetBuffered()
{
media::TimeIntervals videoti;
media::TimeIntervals audioti;
@ -1270,14 +1269,14 @@ MediaFormatReader::GetBuffered(dom::TimeRanges* aBuffered)
}
if (HasAudio() && HasVideo()) {
videoti.Intersection(audioti);
videoti.ToTimeRanges(aBuffered);
return videoti;
} else if (HasAudio()) {
audioti.ToTimeRanges(aBuffered);
return audioti;
} else if (HasVideo()) {
videoti.ToTimeRanges(aBuffered);
return videoti;
}
return NS_OK;
return media::TimeIntervals();
}
bool MediaFormatReader::IsDormantNeeded()

View File

@ -17,10 +17,6 @@
namespace mozilla {
namespace dom {
class TimeRanges;
}
#if defined(MOZ_GONK_MEDIACODEC) || defined(XP_WIN) || defined(MOZ_APPLEMEDIA) || defined(MOZ_FFMPEG)
#define READER_DORMANT_HEURISTIC
#else
@ -81,7 +77,7 @@ public:
uint32_t aLength,
int64_t aOffset) override;
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
virtual media::TimeIntervals GetBuffered() override;
// For Media Resource Management
virtual void SetIdle() override;

View File

@ -8,7 +8,6 @@
#define TIME_UNITS_H
#include "Intervals.h"
#include "VideoUtils.h"
#include "mozilla/CheckedInt.h"
#include "mozilla/FloatingPoint.h"
#include "mozilla/dom/TimeRanges.h"
@ -16,6 +15,15 @@
namespace mozilla {
namespace media {
// Number of microseconds per second. 1e6.
static const int64_t USECS_PER_S = 1000000;
// Number of microseconds per millisecond.
static const int64_t USECS_PER_MS = 1000;
// Number of nanoseconds per second. 1e9.
static const int64_t NSECS_PER_S = 1000000000;
struct Microseconds {
Microseconds()
: mValue(0)
@ -89,6 +97,10 @@ public:
return TimeUnit(aValue.mValue);
}
static TimeUnit FromNanoseconds(int64_t aValue) {
return TimeUnit(aValue / 1000);
}
static TimeUnit FromInfinity() {
return TimeUnit(INT64_MAX);
}
@ -97,6 +109,10 @@ public:
return mValue.value();
}
int64_t ToNanoseconds() const {
return mValue.value() * 1000;
}
double ToSeconds() const {
if (IsInfinite()) {
return PositiveInfinity<double>();
@ -198,6 +214,17 @@ public:
: BaseType(Move(aOther))
{}
static TimeIntervals Invalid()
{
return TimeIntervals(TimeInterval(TimeUnit::FromMicroseconds(INT64_MIN),
TimeUnit::FromMicroseconds(INT64_MIN)));
}
bool IsInvalid()
{
return Length() == 1 && Start(0).ToMicroseconds() == INT64_MIN &&
End(0).ToMicroseconds() == INT64_MIN;
}
TimeIntervals() = default;
// Make TimeIntervals interchangeable with dom::TimeRanges.

View File

@ -4,7 +4,7 @@
#include "VideoUtils.h"
#include "MediaResource.h"
#include "mozilla/dom/TimeRanges.h"
#include "TimeUnits.h"
#include "nsMathUtils.h"
#include "nsSize.h"
#include "VorbisUtils.h"
@ -70,18 +70,20 @@ static int64_t BytesToTime(int64_t offset, int64_t length, int64_t durationUs) {
return int64_t(double(durationUs) * r);
}
void GetEstimatedBufferedTimeRanges(mozilla::MediaResource* aStream,
int64_t aDurationUsecs,
mozilla::dom::TimeRanges* aOutBuffered)
media::TimeIntervals GetEstimatedBufferedTimeRanges(mozilla::MediaResource* aStream,
int64_t aDurationUsecs)
{
media::TimeIntervals buffered;
// Nothing to cache if the media takes 0us to play.
if (aDurationUsecs <= 0 || !aStream || !aOutBuffered)
return;
if (aDurationUsecs <= 0 || !aStream)
return buffered;
// Special case completely cached files. This also handles local files.
if (aStream->IsDataCachedToEndOfResource(0)) {
aOutBuffered->Add(0, double(aDurationUsecs) / USECS_PER_S);
return;
buffered +=
media::TimeInterval(media::TimeUnit::FromMicroseconds(0),
media::TimeUnit::FromMicroseconds(aDurationUsecs));
return buffered;
}
int64_t totalBytes = aStream->GetLength();
@ -90,7 +92,7 @@ void GetEstimatedBufferedTimeRanges(mozilla::MediaResource* aStream,
// buffered. This will put us in a state of eternally-low-on-undecoded-data
// which is not great, but about the best we can do.
if (totalBytes <= 0)
return;
return buffered;
int64_t startOffset = aStream->GetNextCachedData(0);
while (startOffset >= 0) {
@ -102,12 +104,14 @@ void GetEstimatedBufferedTimeRanges(mozilla::MediaResource* aStream,
int64_t startUs = BytesToTime(startOffset, totalBytes, aDurationUsecs);
int64_t endUs = BytesToTime(endOffset, totalBytes, aDurationUsecs);
if (startUs != endUs) {
aOutBuffered->Add(double(startUs) / USECS_PER_S,
double(endUs) / USECS_PER_S);
buffered +=
media::TimeInterval(media::TimeUnit::FromMicroseconds(startUs),
media::TimeUnit::FromMicroseconds(endUs));
}
startOffset = aStream->GetNextCachedData(endOffset);
}
return;
return buffered;
}
int DownmixAudioToStereo(mozilla::AudioDataValue* buffer,

View File

@ -23,6 +23,7 @@
#include "prtime.h"
#include "AudioSampleFormat.h"
#include "mozilla/RefPtr.h"
#include "TimeUnits.h"
using mozilla::CheckedInt64;
using mozilla::CheckedUint64;
@ -115,19 +116,14 @@ void DeleteOnMainThread(nsAutoPtr<T>& aObject) {
class MediaResource;
namespace dom {
class TimeRanges;
}
// Estimates the buffered ranges of a MediaResource using a simple
// (byteOffset/length)*duration method. Probably inaccurate, but won't
// do file I/O, and can be used when we don't have detailed knowledge
// of the byte->time mapping of a resource. aDurationUsecs is the duration
// of the media in microseconds. Estimated buffered ranges are stored in
// aOutBuffered. Ranges are 0-normalized, i.e. in the range of (0,duration].
void GetEstimatedBufferedTimeRanges(mozilla::MediaResource* aStream,
int64_t aDurationUsecs,
mozilla::dom::TimeRanges* aOutBuffered);
media::TimeIntervals GetEstimatedBufferedTimeRanges(mozilla::MediaResource* aStream,
int64_t aDurationUsecs);
// Converts from number of audio frames (aFrames) to microseconds, given
// the specified audio rate (aRate). Stores result in aOutUsecs. Returns true

View File

@ -4,7 +4,6 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/Preferences.h"
#include "mozilla/dom/TimeRanges.h"
#include "MediaResource.h"
#include "mozilla/dom/HTMLMediaElement.h"
#include "AndroidMediaPluginHost.h"

View File

@ -5,7 +5,6 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "AndroidMediaReader.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/dom/TimeRanges.h"
#include "mozilla/gfx/Point.h"
#include "MediaResource.h"
#include "VideoUtils.h"

View File

@ -18,7 +18,6 @@
#include "SharedThreadPool.h"
#include "mozilla/Preferences.h"
#include "mozilla/Telemetry.h"
#include "mozilla/dom/TimeRanges.h"
#include "mp4_demuxer/AnnexB.h"
#include "mp4_demuxer/H264.h"
#include "SharedDecoderManager.h"
@ -1073,12 +1072,13 @@ MP4Reader::GetEvictionOffset(double aTime)
return mDemuxer->GetEvictionOffset(aTime * 1000000.0);
}
nsresult
MP4Reader::GetBuffered(dom::TimeRanges* aBuffered)
media::TimeIntervals
MP4Reader::GetBuffered()
{
MonitorAutoLock mon(mDemuxerMonitor);
media::TimeIntervals buffered;
if (!mIndexReady) {
return NS_OK;
return buffered;
}
UpdateIndex();
MOZ_ASSERT(mStartTime != -1, "Need to finish metadata decode first");
@ -1091,12 +1091,13 @@ MP4Reader::GetBuffered(dom::TimeRanges* aBuffered)
nsTArray<Interval<Microseconds>> timeRanges;
mDemuxer->ConvertByteRangesToTime(ranges, &timeRanges);
for (size_t i = 0; i < timeRanges.Length(); i++) {
aBuffered->Add((timeRanges[i].start - mStartTime) / 1000000.0,
(timeRanges[i].end - mStartTime) / 1000000.0);
buffered += media::TimeInterval(
media::TimeUnit::FromMicroseconds(timeRanges[i].start - mStartTime),
media::TimeUnit::FromMicroseconds(timeRanges[i].end - mStartTime));
}
}
return NS_OK;
return buffered;
}
bool MP4Reader::IsDormantNeeded()

View File

@ -19,10 +19,6 @@
namespace mozilla {
namespace dom {
class TimeRanges;
}
typedef std::deque<nsRefPtr<MediaRawData>> MediaSampleQueue;
class MP4Stream;
@ -68,7 +64,7 @@ public:
virtual int64_t GetEvictionOffset(double aTime) override;
virtual void NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset) override;
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
virtual media::TimeIntervals GetBuffered() override;
// For Media Resource Management
virtual void SetIdle() override;

View File

@ -6,7 +6,6 @@
#include "GStreamerMozVideoBuffer.h"
#include "GStreamerFormatHelper.h"
#include "VideoUtils.h"
#include "mozilla/dom/TimeRanges.h"
#include "mozilla/Endian.h"
#include "mozilla/Preferences.h"

View File

@ -15,7 +15,6 @@
#endif
#include "GStreamerFormatHelper.h"
#include "VideoUtils.h"
#include "mozilla/dom/TimeRanges.h"
#include "mozilla/Endian.h"
#include "mozilla/Preferences.h"
#include "mozilla/unused.h"
@ -868,10 +867,11 @@ GStreamerReader::Seek(int64_t aTarget, int64_t aEndTime)
return SeekPromise::CreateAndResolve(aTarget, __func__);
}
nsresult GStreamerReader::GetBuffered(dom::TimeRanges* aBuffered)
media::TimeIntervals GStreamerReader::GetBuffered()
{
media::TimeIntervals buffered;
if (!mInfo.HasValidMedia()) {
return NS_OK;
return buffered;
}
#if GST_VERSION_MAJOR == 0
@ -890,11 +890,12 @@ nsresult GStreamerReader::GetBuffered(dom::TimeRanges* aBuffered)
duration = mDecoder->GetMediaDuration();
}
double end = (double) duration / GST_MSECOND;
LOG(PR_LOG_DEBUG, "complete range [0, %f] for [0, %li]",
end, GetDataLength());
aBuffered->Add(0, end);
return NS_OK;
(double) duration / GST_MSECOND, GetDataLength());
buffered +=
media::TimeInterval(media::TimeUnit::FromMicroseconds(0),
media::TimeUnit::FromMicroseconds(duration));
return buffered;
}
for(uint32_t index = 0; index < ranges.Length(); index++) {
@ -918,14 +919,16 @@ nsresult GStreamerReader::GetBuffered(dom::TimeRanges* aBuffered)
continue;
#endif
double start = (double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND;
double end = (double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND;
LOG(PR_LOG_DEBUG, "adding range [%f, %f] for [%li %li] size %li",
start, end, startOffset, endOffset, GetDataLength());
aBuffered->Add(start, end);
(double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND,
(double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND,
startOffset, endOffset, GetDataLength());
buffered +=
media::TimeInterval(media::TimeUnit::FromMicroseconds(GST_TIME_AS_USECONDS(startTime)),
media::TimeUnit::FromMicroseconds(GST_TIME_AS_USECONDS(endTime)));
}
return NS_OK;
return buffered;
}
void GStreamerReader::ReadAndPushData(guint aLength)

View File

@ -29,10 +29,6 @@ struct GstURIDecodeBin;
namespace mozilla {
namespace dom {
class TimeRanges;
}
class AbstractMediaDecoder;
class GStreamerReader : public MediaDecoderReader
@ -53,7 +49,7 @@ public:
MetadataTags** aTags) override;
virtual nsRefPtr<SeekPromise>
Seek(int64_t aTime, int64_t aEndTime) override;
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
virtual media::TimeIntervals GetBuffered() override;
virtual void NotifyDataArrived(const char *aBuffer,
uint32_t aLength,

View File

@ -10,7 +10,7 @@
#include "MockMediaResource.h"
#include "MockMediaDecoderOwner.h"
#include "mozilla/Preferences.h"
#include "mozilla/dom/TimeRanges.h"
#include "TimeUnits.h"
using namespace mozilla;
using namespace mozilla::dom;
@ -87,15 +87,10 @@ TEST(MP4Reader, BufferedRange)
// Video 3-4 sec, audio 2.986666-4.010666 sec
b->resource->MockAddBufferedRange(248400, 327455);
nsRefPtr<TimeRanges> ranges = new TimeRanges();
EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
EXPECT_EQ(1U, ranges->Length());
double start = 0;
EXPECT_EQ(NS_OK, ranges->Start(0, &start));
EXPECT_NEAR(270000 / 90000.0, start, 0.000001);
double end = 0;
EXPECT_EQ(NS_OK, ranges->End(0, &end));
EXPECT_NEAR(360000 / 90000.0, end, 0.000001);
media::TimeIntervals ranges = b->reader->GetBuffered();
EXPECT_EQ(1U, ranges.Length());
EXPECT_NEAR(270000 / 90000.0, ranges.Start(0).ToSeconds(), 0.000001);
EXPECT_NEAR(360000 / 90000.0, ranges.End(0).ToSeconds(), 0.000001);
}
TEST(MP4Reader, BufferedRangeMissingLastByte)
@ -108,15 +103,10 @@ TEST(MP4Reader, BufferedRangeMissingLastByte)
b->resource->MockAddBufferedRange(248400, 324912);
b->resource->MockAddBufferedRange(324913, 327455);
nsRefPtr<TimeRanges> ranges = new TimeRanges();
EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
EXPECT_EQ(1U, ranges->Length());
double start = 0;
EXPECT_EQ(NS_OK, ranges->Start(0, &start));
EXPECT_NEAR(270000.0 / 90000.0, start, 0.000001);
double end = 0;
EXPECT_EQ(NS_OK, ranges->End(0, &end));
EXPECT_NEAR(357000 / 90000.0, end, 0.000001);
media::TimeIntervals ranges = b->reader->GetBuffered();
EXPECT_EQ(1U, ranges.Length());
EXPECT_NEAR(270000.0 / 90000.0, ranges.Start(0).ToSeconds(), 0.000001);
EXPECT_NEAR(357000 / 90000.0, ranges.End(0).ToSeconds(), 0.000001);
}
TEST(MP4Reader, BufferedRangeSyncFrame)
@ -129,15 +119,10 @@ TEST(MP4Reader, BufferedRangeSyncFrame)
b->resource->MockClearBufferedRanges();
b->resource->MockAddBufferedRange(146336, 327455);
nsRefPtr<TimeRanges> ranges = new TimeRanges();
EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
EXPECT_EQ(1U, ranges->Length());
double start = 0;
EXPECT_EQ(NS_OK, ranges->Start(0, &start));
EXPECT_NEAR(270000.0 / 90000.0, start, 0.000001);
double end = 0;
EXPECT_EQ(NS_OK, ranges->End(0, &end));
EXPECT_NEAR(360000 / 90000.0, end, 0.000001);
media::TimeIntervals ranges = b->reader->GetBuffered();
EXPECT_EQ(1U, ranges.Length());
EXPECT_NEAR(270000.0 / 90000.0, ranges.Start(0).ToSeconds(), 0.000001);
EXPECT_NEAR(360000 / 90000.0, ranges.End(0).ToSeconds(), 0.000001);
}
TEST(MP4Reader, CompositionOrder)
@ -187,23 +172,14 @@ TEST(MP4Reader, CompositionOrder)
b->resource->MockAddBufferedRange(12616, 13196);
b->resource->MockAddBufferedRange(13220, 13901);
nsRefPtr<TimeRanges> ranges = new TimeRanges();
EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
EXPECT_EQ(2U, ranges->Length());
media::TimeIntervals ranges = b->reader->GetBuffered();
EXPECT_EQ(2U, ranges.Length());
double start = 0;
EXPECT_EQ(NS_OK, ranges->Start(0, &start));
EXPECT_NEAR(166.0 / 2500.0, start, 0.000001);
double end = 0;
EXPECT_EQ(NS_OK, ranges->End(0, &end));
EXPECT_NEAR(332.0 / 2500.0, end, 0.000001);
EXPECT_NEAR(166.0 / 2500.0, ranges.Start(0).ToSeconds(), 0.000001);
EXPECT_NEAR(332.0 / 2500.0, ranges.End(0).ToSeconds(), 0.000001);
start = 0;
EXPECT_EQ(NS_OK, ranges->Start(1, &start));
EXPECT_NEAR(581.0 / 2500.0, start, 0.000001);
end = 0;
EXPECT_EQ(NS_OK, ranges->End(1, &end));
EXPECT_NEAR(11255.0 / 44100.0, end, 0.000001);
EXPECT_NEAR(581.0 / 2500.0, ranges.Start(1).ToSeconds(), 0.000001);
EXPECT_NEAR(11255.0 / 44100.0, ranges.End(1).ToSeconds(), 0.000001);
}
TEST(MP4Reader, Normalised)
@ -237,14 +213,9 @@ TEST(MP4Reader, Normalised)
b->resource->MockClearBufferedRanges();
b->resource->MockAddBufferedRange(48, 13901);
nsRefPtr<TimeRanges> ranges = new TimeRanges();
EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
EXPECT_EQ(1U, ranges->Length());
media::TimeIntervals ranges = b->reader->GetBuffered();
EXPECT_EQ(1U, ranges.Length());
double start = 0;
EXPECT_EQ(NS_OK, ranges->Start(0, &start));
EXPECT_NEAR(166.0 / 2500.0, start, 0.000001);
double end = 0;
EXPECT_EQ(NS_OK, ranges->End(0, &end));
EXPECT_NEAR(11255.0 / 44100.0, end, 0.000001);
EXPECT_NEAR(166.0 / 2500.0, ranges.Start(0).ToSeconds(), 0.000001);
EXPECT_NEAR(11255.0 / 44100.0, ranges.End(0).ToSeconds(), 0.000001);
}

View File

@ -16,7 +16,6 @@
#include "mozilla/Preferences.h"
#include "mozilla/dom/BindingDeclarations.h"
#include "mozilla/dom/HTMLMediaElement.h"
#include "mozilla/dom/TimeRanges.h"
#include "mozilla/mozalloc.h"
#include "nsContentTypeParser.h"
#include "nsContentUtils.h"

View File

@ -7,7 +7,6 @@
#include "prlog.h"
#include "mozilla/dom/HTMLMediaElement.h"
#include "mozilla/dom/TimeRanges.h"
#include "MediaDecoderStateMachine.h"
#include "MediaSource.h"
#include "MediaSourceReader.h"
@ -63,26 +62,30 @@ MediaSourceDecoder::Load(nsIStreamListener**, MediaDecoder*)
return ScheduleStateMachine();
}
nsresult
MediaSourceDecoder::GetSeekable(dom::TimeRanges* aSeekable)
media::TimeIntervals
MediaSourceDecoder::GetSeekable()
{
MOZ_ASSERT(NS_IsMainThread());
if (!mMediaSource) {
return NS_ERROR_FAILURE;
NS_WARNING("MediaSource element isn't attached");
return media::TimeIntervals::Invalid();
}
media::TimeIntervals seekable;
double duration = mMediaSource->Duration();
if (IsNaN(duration)) {
// Return empty range.
} else if (duration > 0 && mozilla::IsInfinite(duration)) {
nsRefPtr<dom::TimeRanges> bufferedRanges = new dom::TimeRanges();
mReader->GetBuffered(bufferedRanges);
aSeekable->Add(bufferedRanges->GetStartTime(), bufferedRanges->GetEndTime());
media::TimeIntervals buffered = mReader->GetBuffered();
if (buffered.Length()) {
seekable += media::TimeInterval(buffered.GetStart(), buffered.GetEnd());
}
} else {
aSeekable->Add(0, duration);
seekable += media::TimeInterval(media::TimeUnit::FromSeconds(0),
media::TimeUnit::FromSeconds(duration));
}
MSE_DEBUG("ranges=%s", DumpTimeRanges(aSeekable).get());
return NS_OK;
MSE_DEBUG("ranges=%s", DumpTimeRanges(seekable).get());
return seekable;
}
void
@ -336,22 +339,24 @@ MediaSourceDecoder::SelectDecoder(int64_t aTarget,
{
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
media::TimeUnit target{media::TimeUnit::FromMicroseconds(aTarget)};
media::TimeUnit tolerance{media::TimeUnit::FromMicroseconds(aTolerance + aTarget)};
// aTolerance gives a slight bias toward the start of a range only.
// Consider decoders in order of newest to oldest, as a newer decoder
// providing a given buffered range is expected to replace an older one.
for (int32_t i = aTrackDecoders.Length() - 1; i >= 0; --i) {
nsRefPtr<SourceBufferDecoder> newDecoder = aTrackDecoders[i];
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
newDecoder->GetBuffered(ranges);
if (ranges->Find(double(aTarget) / USECS_PER_S,
double(aTolerance) / USECS_PER_S) == dom::TimeRanges::NoIndex) {
MSE_DEBUGV("SelectDecoder(%lld fuzz:%lld) newDecoder=%p (%d/%d) target not in ranges=%s",
aTarget, aTolerance, newDecoder.get(), i+1,
aTrackDecoders.Length(), DumpTimeRanges(ranges).get());
continue;
media::TimeIntervals ranges = newDecoder->GetBuffered();
for (uint32_t j = 0; j < ranges.Length(); j++) {
if (target < ranges.End(j) && tolerance >= ranges.Start(j)) {
return newDecoder.forget();
}
}
return newDecoder.forget();
MSE_DEBUGV("SelectDecoder(%lld fuzz:%lld) newDecoder=%p (%d/%d) target not in ranges=%s",
aTarget, aTolerance, newDecoder.get(), i+1,
aTrackDecoders.Length(), DumpTimeRanges(ranges).get());
}
return nullptr;

View File

@ -38,7 +38,7 @@ public:
virtual MediaDecoder* Clone() override;
virtual MediaDecoderStateMachine* CreateStateMachine() override;
virtual nsresult Load(nsIStreamListener**, MediaDecoder*) override;
virtual nsresult GetSeekable(dom::TimeRanges* aSeekable) override;
virtual media::TimeIntervals GetSeekable() override;
virtual void Shutdown() override;

View File

@ -7,7 +7,6 @@
#include <cmath>
#include "prlog.h"
#include "mozilla/dom/TimeRanges.h"
#include "DecoderTraits.h"
#include "MediaDecoderOwner.h"
#include "MediaFormatReader.h"
@ -36,8 +35,6 @@ extern PRLogModuleInfo* GetMediaSourceLog();
// default value used in Blink, kDefaultBufferDurationInMs.
#define EOS_FUZZ_US 125000
using mozilla::dom::TimeRanges;
namespace mozilla {
MediaSourceReader::MediaSourceReader(MediaSourceDecoder* aDecoder)
@ -246,10 +243,9 @@ static void
AdjustEndTime(int64_t* aEndTime, SourceBufferDecoder* aDecoder)
{
if (aDecoder) {
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
aDecoder->GetBuffered(ranges);
if (ranges->Length() > 0) {
int64_t end = std::ceil(ranges->GetEndTime() * USECS_PER_S);
media::TimeIntervals ranges = aDecoder->GetBuffered();
if (ranges.Length()) {
int64_t end = ranges.GetEnd().ToMicroseconds();
*aEndTime = std::max(*aEndTime, end);
}
}
@ -601,9 +597,8 @@ MediaSourceReader::SwitchAudioSource(int64_t* aTarget)
// A decoder buffered range is continuous. We would have failed the exact
// search but succeeded the fuzzy one if our target was shortly before
// start time.
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
newDecoder->GetBuffered(ranges);
int64_t startTime = ranges->GetStartTime() * USECS_PER_S;
media::TimeIntervals ranges = newDecoder->GetBuffered();
int64_t startTime = ranges.GetStart().ToMicroseconds();
if (*aTarget < startTime) {
*aTarget = startTime;
}
@ -647,9 +642,8 @@ MediaSourceReader::SwitchVideoSource(int64_t* aTarget)
// A decoder buffered range is continuous. We would have failed the exact
// search but succeeded the fuzzy one if our target was shortly before
// start time.
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
newDecoder->GetBuffered(ranges);
int64_t startTime = ranges->GetStartTime() * USECS_PER_S;
media::TimeIntervals ranges = newDecoder->GetBuffered();
int64_t startTime = ranges.GetStart().ToMicroseconds();
if (*aTarget < startTime) {
*aTarget = startTime;
}
@ -1003,44 +997,38 @@ MediaSourceReader::DoVideoSeek()
MSE_DEBUG("reader=%p", GetVideoReader());
}
nsresult
MediaSourceReader::GetBuffered(dom::TimeRanges* aBuffered)
media::TimeIntervals
MediaSourceReader::GetBuffered()
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
MOZ_ASSERT(aBuffered->Length() == 0);
if (mTrackBuffers.IsEmpty()) {
return NS_OK;
media::TimeIntervals buffered;
media::TimeUnit highestEndTime;
nsTArray<media::TimeIntervals> activeRanges;
// Must set the capacity of the nsTArray first: bug #1164444
activeRanges.SetCapacity(mTrackBuffers.Length());
for (const auto& trackBuffer : mTrackBuffers) {
activeRanges.AppendElement(trackBuffer->Buffered());
highestEndTime = std::max(highestEndTime, activeRanges.LastElement().GetEnd());
}
double highestEndTime = 0;
buffered +=
media::TimeInterval(media::TimeUnit::FromMicroseconds(0), highestEndTime);
nsTArray<nsRefPtr<TimeRanges>> activeRanges;
for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
nsRefPtr<TimeRanges> r = new TimeRanges();
mTrackBuffers[i]->Buffered(r);
activeRanges.AppendElement(r);
highestEndTime = std::max(highestEndTime, activeRanges.LastElement()->GetEndTime());
}
TimeRanges* intersectionRanges = aBuffered;
intersectionRanges->Add(0, highestEndTime);
for (uint32_t i = 0; i < activeRanges.Length(); ++i) {
TimeRanges* sourceRanges = activeRanges[i];
if (IsEnded() && sourceRanges->GetEndTime() >= 0) {
for (auto& range : activeRanges) {
if (IsEnded() && range.Length()) {
// Set the end time on the last range to highestEndTime by adding a
// new range spanning the current end time to highestEndTime, which
// Normalize() will then merge with the old last range.
sourceRanges->Add(sourceRanges->GetEndTime(), highestEndTime);
sourceRanges->Normalize();
range +=
media::TimeInterval(range.GetEnd(), highestEndTime);
}
intersectionRanges->Intersection(sourceRanges);
buffered.Intersection(range);
}
MSE_DEBUG("ranges=%s", DumpTimeRanges(intersectionRanges).get());
return NS_OK;
MSE_DEBUG("ranges=%s", DumpTimeRanges(buffered).get());
return buffered;
}
already_AddRefed<SourceBufferDecoder>
@ -1056,15 +1044,15 @@ MediaSourceReader::FirstDecoder(MediaData::Type aType)
}
nsRefPtr<SourceBufferDecoder> firstDecoder;
double lowestStartTime = PositiveInfinity<double>();
media::TimeUnit lowestStartTime{media::TimeUnit::FromInfinity()};
for (uint32_t i = 0; i < decoders.Length(); ++i) {
nsRefPtr<TimeRanges> r = new TimeRanges();
decoders[i]->GetBuffered(r);
double start = r->GetStartTime();
if (start < 0) {
media::TimeIntervals r = decoders[i]->GetBuffered();
if (!r.Length()) {
continue;
}
media::TimeUnit start = r.GetStart();
if (start < lowestStartTime) {
firstDecoder = decoders[i];
lowestStartTime = start;
@ -1224,9 +1212,8 @@ MediaSourceReader::IsNearEnd(MediaData::Type aType, int64_t aTime)
}
TrackBuffer* trackBuffer =
aType == MediaData::AUDIO_DATA ? mAudioTrack : mVideoTrack;
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
trackBuffer->Buffered(buffered);
return aTime >= (buffered->GetEndTime() * USECS_PER_S - EOS_FUZZ_US);
media::TimeIntervals buffered = trackBuffer->Buffered();
return aTime >= buffered.GetEnd().ToMicroseconds() - EOS_FUZZ_US;
}
int64_t
@ -1235,10 +1222,9 @@ MediaSourceReader::LastSampleTime(MediaData::Type aType)
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
TrackBuffer* trackBuffer =
aType == MediaData::AUDIO_DATA ? mAudioTrack : mVideoTrack;
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
trackBuffer->Buffered(buffered);
return buffered->GetEndTime() * USECS_PER_S - 1;
aType == MediaData::AUDIO_DATA ? mAudioTrack : mVideoTrack;
media::TimeIntervals buffered = trackBuffer->Buffered();
return buffered.GetEnd().ToMicroseconds() - 1;
}
void
@ -1259,8 +1245,7 @@ MediaSourceReader::GetMozDebugReaderData(nsAString& aString)
for (int32_t i = mAudioTrack->Decoders().Length() - 1; i >= 0; --i) {
nsRefPtr<MediaDecoderReader> newReader = mAudioTrack->Decoders()[i]->GetReader();
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
mAudioTrack->Decoders()[i]->GetBuffered(ranges);
media::TimeIntervals ranges = mAudioTrack->Decoders()[i]->GetBuffered();
result += nsPrintfCString("\t\tReader %d: %p ranges=%s active=%s size=%lld\n",
i, newReader.get(), DumpTimeRanges(ranges).get(),
newReader.get() == GetAudioReader() ? "true" : "false",
@ -1273,8 +1258,7 @@ MediaSourceReader::GetMozDebugReaderData(nsAString& aString)
for (int32_t i = mVideoTrack->Decoders().Length() - 1; i >= 0; --i) {
nsRefPtr<MediaDecoderReader> newReader = mVideoTrack->Decoders()[i]->GetReader();
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
mVideoTrack->Decoders()[i]->GetBuffered(ranges);
media::TimeIntervals ranges = mVideoTrack->Decoders()[i]->GetBuffered();
result += nsPrintfCString("\t\tReader %d: %p ranges=%s active=%s size=%lld\n",
i, newReader.get(), DumpTimeRanges(ranges).get(),
newReader.get() == GetVideoReader() ? "true" : "false",

View File

@ -112,7 +112,7 @@ public:
nsresult ResetDecode() override;
// Acquires the decoder monitor, and is thus callable on any thread.
nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
media::TimeIntervals GetBuffered() override;
already_AddRefed<SourceBufferDecoder> CreateSubDecoder(const nsACString& aType,
int64_t aTimestampOffset /* microseconds */);

View File

@ -6,24 +6,24 @@
#include "MediaSourceUtils.h"
#include "prlog.h"
#include "mozilla/dom/TimeRanges.h"
#include "nsPrintfCString.h"
namespace mozilla {
nsCString
DumpTimeRanges(dom::TimeRanges* aRanges)
DumpTimeRanges(const media::TimeIntervals& aRanges)
{
nsCString dump;
dump = "[";
for (uint32_t i = 0; i < aRanges->Length(); ++i) {
for (uint32_t i = 0; i < aRanges.Length(); ++i) {
if (i > 0) {
dump += ", ";
}
ErrorResult dummy;
dump += nsPrintfCString("(%f, %f)", aRanges->Start(i, dummy), aRanges->End(i, dummy));
dump += nsPrintfCString("(%f, %f)",
aRanges.Start(i).ToSeconds(),
aRanges.End(i).ToSeconds());
}
dump += "]";

View File

@ -8,14 +8,11 @@
#define MOZILLA_MEDIASOURCEUTILS_H_
#include "nsString.h"
#include "TimeUnits.h"
namespace mozilla {
namespace dom {
class TimeRanges;
} // namespace dom
nsCString DumpTimeRanges(dom::TimeRanges* aRanges);
nsCString DumpTimeRanges(const media::TimeIntervals& aRanges);
} // namespace mozilla

View File

@ -140,18 +140,14 @@ SourceBuffer::GetBuffered(ErrorResult& aRv)
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return nullptr;
}
nsRefPtr<TimeRanges> ranges = new TimeRanges();
double highestEndTime = mTrackBuffer->Buffered(ranges);
if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended &&
highestEndTime > 0) {
// Set the end time on the last range to highestEndTime by adding a
// new range spanning the current end time to highestEndTime, which
// Normalize() will then merge with the old last range.
ranges->Add(ranges->GetEndTime(), highestEndTime);
ranges->Normalize();
}
// We only manage a single trackbuffer in our source buffer.
// As such, there's no need to adjust the end of the trackbuffers as per
// Step 4: http://w3c.github.io/media-source/index.html#widl-SourceBuffer-buffered
media::TimeIntervals ranges = mTrackBuffer->Buffered();
MSE_DEBUGV("ranges=%s", DumpTimeRanges(ranges).get());
return ranges.forget();
nsRefPtr<dom::TimeRanges> tr = new dom::TimeRanges();
ranges.ToTimeRanges(tr);
return tr.forget();
}
void
@ -282,8 +278,8 @@ SourceBuffer::DoRangeRemoval(double aStart, double aEnd)
{
MSE_DEBUG("DoRangeRemoval(%f, %f)", aStart, aEnd);
if (mTrackBuffer && !IsInfinite(aStart)) {
mTrackBuffer->RangeRemoval(media::Microseconds::FromSeconds(aStart),
media::Microseconds::FromSeconds(aEnd));
mTrackBuffer->RangeRemoval(media::TimeUnit::FromSeconds(aStart),
media::TimeUnit::FromSeconds(aEnd));
}
}

View File

@ -9,7 +9,6 @@
#include "prlog.h"
#include "AbstractMediaDecoder.h"
#include "MediaDecoderReader.h"
#include "mozilla/dom/TimeRanges.h"
extern PRLogModuleInfo* GetMediaSourceLog();
/* Polyfill __func__ on MSVC to pass to the log. */
@ -234,24 +233,23 @@ SourceBufferDecoder::NotifyDataArrived(const char* aBuffer, uint32_t aLength, in
mParentDecoder->NotifyDataArrived(nullptr, 0, 0);
}
nsresult
SourceBufferDecoder::GetBuffered(dom::TimeRanges* aBuffered)
media::TimeIntervals
SourceBufferDecoder::GetBuffered()
{
nsresult rv = mReader->GetBuffered(aBuffered);
if (NS_FAILED(rv)) {
return rv;
media::TimeIntervals buffered = mReader->GetBuffered();
if (buffered.IsInvalid()) {
return buffered;
}
// Adjust buffered range according to timestamp offset.
aBuffered->Shift((double)mTimestampOffset / USECS_PER_S);
buffered.Shift(media::TimeUnit::FromMicroseconds(mTimestampOffset));
if (!WasTrimmed()) {
return NS_OK;
return buffered;
}
nsRefPtr<dom::TimeRanges> tr = new dom::TimeRanges();
tr->Add(0, mTrimmedOffset);
aBuffered->Intersection(tr);
return NS_OK;
media::TimeInterval filter(media::TimeUnit::FromSeconds(0),
media::TimeUnit::FromSeconds(mTrimmedOffset));
return buffered.Intersection(filter);
}
int64_t

View File

@ -21,12 +21,6 @@ namespace mozilla {
class MediaResource;
class MediaDecoderReader;
namespace dom {
class TimeRanges;
} // namespace dom
class SourceBufferDecoder final : public AbstractMediaDecoder
{
public:
@ -72,7 +66,7 @@ public:
// Warning: this mirrors GetBuffered in MediaDecoder, but this class's base is
// AbstractMediaDecoder, which does not supply this interface.
nsresult GetBuffered(dom::TimeRanges* aBuffered);
media::TimeIntervals GetBuffered();
void SetReader(MediaDecoderReader* aReader)
{

View File

@ -273,24 +273,18 @@ class DecoderSorter
public:
bool LessThan(SourceBufferDecoder* aFirst, SourceBufferDecoder* aSecond) const
{
nsRefPtr<dom::TimeRanges> first = new dom::TimeRanges();
aFirst->GetBuffered(first);
media::TimeIntervals first = aFirst->GetBuffered();
media::TimeIntervals second = aSecond->GetBuffered();
nsRefPtr<dom::TimeRanges> second = new dom::TimeRanges();
aSecond->GetBuffered(second);
return first->GetStartTime() < second->GetStartTime();
return first.GetStart() < second.GetStart();
}
bool Equals(SourceBufferDecoder* aFirst, SourceBufferDecoder* aSecond) const
{
nsRefPtr<dom::TimeRanges> first = new dom::TimeRanges();
aFirst->GetBuffered(first);
media::TimeIntervals first = aFirst->GetBuffered();
media::TimeIntervals second = aSecond->GetBuffered();
nsRefPtr<dom::TimeRanges> second = new dom::TimeRanges();
aSecond->GetBuffered(second);
return first->GetStartTime() == second->GetStartTime();
return first.GetStart() == second.GetStart();
}
};
@ -320,8 +314,7 @@ TrackBuffer::EvictData(double aPlaybackTime,
// First try to evict data before the current play position, starting
// with the oldest decoder.
for (uint32_t i = 0; i < decoders.Length() && toEvict > 0; ++i) {
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
decoders[i]->GetBuffered(buffered);
media::TimeIntervals buffered = decoders[i]->GetBuffered();
MSE_DEBUG("Step1. decoder=%u/%u threshold=%u toEvict=%lld",
i, decoders.Length(), aThreshold, toEvict);
@ -330,22 +323,24 @@ TrackBuffer::EvictData(double aPlaybackTime,
// we apply a threshold of a few seconds back and evict data up to
// that point.
if (aPlaybackTime > MSE_EVICT_THRESHOLD_TIME) {
double time = aPlaybackTime - MSE_EVICT_THRESHOLD_TIME;
media::TimeUnit time = media::TimeUnit::FromSeconds(aPlaybackTime) -
media::TimeUnit::FromSeconds(MSE_EVICT_THRESHOLD_TIME);
bool isActive = decoders[i] == mCurrentDecoder ||
mParentDecoder->IsActiveReader(decoders[i]->GetReader());
if (!isActive && buffered->GetEndTime() < time) {
if (!isActive && buffered.GetEnd() < time) {
// The entire decoder is contained before our current playback time.
// It can be fully evicted.
MSE_DEBUG("evicting all bufferedEnd=%f "
"aPlaybackTime=%f time=%f, size=%lld",
buffered->GetEndTime(), aPlaybackTime, time,
buffered.GetEnd().ToSeconds(), aPlaybackTime, time,
decoders[i]->GetResource()->GetSize());
toEvict -= decoders[i]->GetResource()->EvictAll();
} else {
int64_t playbackOffset = decoders[i]->ConvertToByteOffset(time);
int64_t playbackOffset =
decoders[i]->ConvertToByteOffset(time.ToMicroseconds());
MSE_DEBUG("evicting some bufferedEnd=%f "
"aPlaybackTime=%f time=%f, playbackOffset=%lld size=%lld",
buffered->GetEndTime(), aPlaybackTime, time,
buffered.GetEnd().ToSeconds(), aPlaybackTime, time,
playbackOffset, decoders[i]->GetResource()->GetSize());
if (playbackOffset > 0) {
toEvict -= decoders[i]->GetResource()->EvictData(playbackOffset,
@ -365,13 +360,12 @@ TrackBuffer::EvictData(double aPlaybackTime,
if (decoders[i] == mCurrentDecoder) {
continue;
}
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
decoders[i]->GetBuffered(buffered);
media::TimeIntervals buffered = decoders[i]->GetBuffered();
// Remove data from older decoders than the current one.
MSE_DEBUG("evicting all "
"bufferedStart=%f bufferedEnd=%f aPlaybackTime=%f size=%lld",
buffered->GetStartTime(), buffered->GetEndTime(),
buffered.GetStart().ToSeconds(), buffered.GetEnd().ToSeconds(),
aPlaybackTime, decoders[i]->GetResource()->GetSize());
toEvict -= decoders[i]->GetResource()->EvictAll();
}
@ -395,10 +389,9 @@ TrackBuffer::EvictData(double aPlaybackTime,
// Find the next decoder we're likely going to play with.
nsRefPtr<SourceBufferDecoder> nextPlayingDecoder = nullptr;
if (playingDecoder) {
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
playingDecoder->GetBuffered(buffered);
media::TimeIntervals buffered = playingDecoder->GetBuffered();
nextPlayingDecoder =
mParentDecoder->SelectDecoder(buffered->GetEndTime() * USECS_PER_S + 1,
mParentDecoder->SelectDecoder(buffered.GetEnd().ToMicroseconds() + 1,
EOS_FUZZ_US,
mInitializedDecoders);
}
@ -413,12 +406,11 @@ TrackBuffer::EvictData(double aPlaybackTime,
decoders[i] == mCurrentDecoder) {
continue;
}
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
decoders[i]->GetBuffered(buffered);
media::TimeIntervals buffered = decoders[i]->GetBuffered();
MSE_DEBUG("evicting all "
"bufferedStart=%f bufferedEnd=%f aPlaybackTime=%f size=%lld",
buffered->GetStartTime(), buffered->GetEndTime(),
buffered.GetStart().ToSeconds(), buffered.GetEnd().ToSeconds(),
aPlaybackTime, decoders[i]->GetResource()->GetSize());
toEvict -= decoders[i]->GetResource()->EvictAll();
}
@ -428,9 +420,8 @@ TrackBuffer::EvictData(double aPlaybackTime,
bool evicted = toEvict < (totalSize - aThreshold);
if (evicted) {
if (playingDecoder) {
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
playingDecoder->GetBuffered(ranges);
*aBufferStartTime = std::max(0.0, ranges->GetStartTime());
media::TimeIntervals ranges = playingDecoder->GetBuffered();
*aBufferStartTime = std::max(0.0, ranges.GetStart().ToSeconds());
} else {
// We do not currently have data to play yet.
// Avoid evicting anymore data to minimize rebuffering time.
@ -448,20 +439,18 @@ TrackBuffer::RemoveEmptyDecoders(nsTArray<mozilla::SourceBufferDecoder*>& aDecod
// Remove decoders that have no data in them
for (uint32_t i = 0; i < aDecoders.Length(); ++i) {
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
aDecoders[i]->GetBuffered(buffered);
media::TimeIntervals buffered = aDecoders[i]->GetBuffered();
MSE_DEBUG("maybe remove empty decoders=%d "
"size=%lld start=%f end=%f",
i, aDecoders[i]->GetResource()->GetSize(),
buffered->GetStartTime(), buffered->GetEndTime());
buffered.GetStart().ToSeconds(), buffered.GetEnd().ToSeconds());
if (aDecoders[i] == mCurrentDecoder ||
mParentDecoder->IsActiveReader(aDecoders[i]->GetReader())) {
continue;
}
if (aDecoders[i]->GetResource()->GetSize() == 0 ||
buffered->GetStartTime() < 0.0 ||
buffered->GetEndTime() < 0.0) {
if (aDecoders[i]->GetResource()->GetSize() == 0 || !buffered.Length() ||
buffered[0].IsEmpty()) {
MSE_DEBUG("remove empty decoders=%d", i);
RemoveDecoder(aDecoders[i]);
}
@ -484,12 +473,11 @@ TrackBuffer::HasOnlyIncompleteMedia()
if (!mCurrentDecoder) {
return false;
}
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
mCurrentDecoder->GetBuffered(buffered);
media::TimeIntervals buffered = mCurrentDecoder->GetBuffered();
MSE_DEBUG("mCurrentDecoder.size=%lld, start=%f end=%f",
mCurrentDecoder->GetResource()->GetSize(),
buffered->GetStartTime(), buffered->GetEndTime());
return mCurrentDecoder->GetResource()->GetSize() && !buffered->Length();
buffered.GetStart(), buffered.GetEnd());
return mCurrentDecoder->GetResource()->GetSize() && !buffered.Length();
}
void
@ -507,23 +495,23 @@ TrackBuffer::EvictBefore(double aTime)
}
}
double
TrackBuffer::Buffered(dom::TimeRanges* aRanges)
media::TimeIntervals
TrackBuffer::Buffered()
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
double highestEndTime = 0;
media::TimeIntervals buffered;
for (uint32_t i = 0; i < mInitializedDecoders.Length(); ++i) {
nsRefPtr<dom::TimeRanges> r = new dom::TimeRanges();
mInitializedDecoders[i]->GetBuffered(r);
if (r->Length() > 0) {
highestEndTime = std::max(highestEndTime, r->GetEndTime());
aRanges->Union(r, double(mParser->GetRoundingError()) / USECS_PER_S);
}
for (auto& decoder : mInitializedDecoders) {
buffered += decoder->GetBuffered();
}
// mParser may not be initialized yet, and will only be so if we have a
// buffered range.
if (buffered.Length()) {
buffered.SetFuzz(media::TimeUnit::FromMicroseconds(mParser->GetRoundingError()));
}
return highestEndTime;
return buffered;
}
already_AddRefed<SourceBufferDecoder>
@ -898,11 +886,11 @@ bool
TrackBuffer::ContainsTime(int64_t aTime, int64_t aTolerance)
{
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
for (uint32_t i = 0; i < mInitializedDecoders.Length(); ++i) {
nsRefPtr<dom::TimeRanges> r = new dom::TimeRanges();
mInitializedDecoders[i]->GetBuffered(r);
if (r->Find(double(aTime) / USECS_PER_S,
double(aTolerance) / USECS_PER_S) != dom::TimeRanges::NoIndex) {
media::TimeUnit time{media::TimeUnit::FromMicroseconds(aTime)};
for (auto& decoder : mInitializedDecoders) {
media::TimeIntervals r = decoder->GetBuffered();
r.SetFuzz(media::TimeUnit::FromMicroseconds(aTolerance));
if (r.Contains(time)) {
return true;
}
}
@ -1057,17 +1045,17 @@ TrackBuffer::RemoveDecoder(SourceBufferDecoder* aDecoder)
}
bool
TrackBuffer::RangeRemoval(media::Microseconds aStart,
media::Microseconds aEnd)
TrackBuffer::RangeRemoval(media::TimeUnit aStart,
media::TimeUnit aEnd)
{
MOZ_ASSERT(NS_IsMainThread());
ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
media::Microseconds bufferedEnd = media::Microseconds::FromSeconds(Buffered(buffered));
media::Microseconds bufferedStart = media::Microseconds::FromSeconds(buffered->GetStartTime());
media::TimeIntervals buffered = Buffered();
media::TimeUnit bufferedStart = buffered.GetStart();
media::TimeUnit bufferedEnd = buffered.GetEnd();
if (bufferedStart < media::Microseconds(0) || aStart > bufferedEnd || aEnd < bufferedStart) {
if (!buffered.Length() || aStart > bufferedEnd || aEnd < bufferedStart) {
// Nothing to remove.
return false;
}
@ -1085,18 +1073,17 @@ TrackBuffer::RangeRemoval(media::Microseconds aStart,
if (aStart <= bufferedStart && aEnd < bufferedEnd) {
// Evict data from beginning.
for (size_t i = 0; i < decoders.Length(); ++i) {
nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
decoders[i]->GetBuffered(buffered);
if (media::Microseconds::FromSeconds(buffered->GetEndTime()) < aEnd) {
media::TimeIntervals buffered = decoders[i]->GetBuffered();
if (buffered.GetEnd() < aEnd) {
// Can be fully removed.
MSE_DEBUG("remove all bufferedEnd=%f size=%lld",
buffered->GetEndTime(),
buffered.GetEnd().ToSeconds(),
decoders[i]->GetResource()->GetSize());
decoders[i]->GetResource()->EvictAll();
} else {
int64_t offset = decoders[i]->ConvertToByteOffset(aEnd.ToSeconds());
MSE_DEBUG("removing some bufferedEnd=%f offset=%lld size=%lld",
buffered->GetEndTime(), offset,
buffered.GetEnd().ToSeconds(), offset,
decoders[i]->GetResource()->GetSize());
if (offset > 0) {
decoders[i]->GetResource()->EvictData(offset, offset);
@ -1106,11 +1093,11 @@ TrackBuffer::RangeRemoval(media::Microseconds aStart,
} else {
// Only trimming existing buffers.
for (size_t i = 0; i < decoders.Length(); ++i) {
if (aStart <= media::Microseconds::FromSeconds(buffered->GetStartTime())) {
if (aStart <= buffered.GetStart()) {
// It will be entirely emptied, can clear all data.
decoders[i]->GetResource()->EvictAll();
} else {
decoders[i]->Trim(aStart.mValue);
decoders[i]->Trim(aStart.ToMicroseconds());
}
}
}

View File

@ -24,12 +24,6 @@ class ContainerParser;
class MediaSourceDecoder;
class MediaLargeByteBuffer;
namespace dom {
class TimeRanges;
} // namespace dom
class TrackBuffer final {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TrackBuffer);
@ -56,10 +50,9 @@ public:
// of the buffer through to aTime.
void EvictBefore(double aTime);
// Returns the highest end time of all of the buffered ranges in the
// decoders managed by this TrackBuffer, and returns the union of the
// decoders buffered ranges in aRanges. This may be called on any thread.
double Buffered(dom::TimeRanges* aRanges);
// Returns the union of the decoders buffered ranges in aRanges.
// This may be called on any thread.
media::TimeIntervals Buffered();
// Mark the current decoder's resource as ended, clear mCurrentDecoder and
// reset mLast{Start,End}Timestamp. Main thread only.
@ -99,8 +92,8 @@ public:
// Implementation is only partial, we can only trim a buffer.
// Returns true if data was evicted.
// Times are in microseconds.
bool RangeRemoval(mozilla::media::Microseconds aStart,
mozilla::media::Microseconds aEnd);
bool RangeRemoval(mozilla::media::TimeUnit aStart,
mozilla::media::TimeUnit aEnd);
// Abort any pending appendBuffer by rejecting any pending promises.
void AbortAppendData();

View File

@ -17,7 +17,6 @@
extern "C" {
#include "opus/opus_multistream.h"
}
#include "mozilla/dom/TimeRanges.h"
#include "mozilla/TimeStamp.h"
#include "VorbisUtils.h"
#include "MediaMetadataManager.h"
@ -1853,29 +1852,31 @@ nsresult OggReader::SeekBisection(int64_t aTarget,
return NS_OK;
}
nsresult OggReader::GetBuffered(dom::TimeRanges* aBuffered)
media::TimeIntervals OggReader::GetBuffered()
{
MOZ_ASSERT(mStartTime != -1, "Need to finish metadata decode first");
{
mozilla::ReentrantMonitorAutoEnter mon(mMonitor);
if (mIsChained)
return NS_ERROR_FAILURE;
if (mIsChained) {
return media::TimeIntervals::Invalid();
}
}
#ifdef OGG_ESTIMATE_BUFFERED
return MediaDecoderReader::GetBuffered(aBuffered);
return MediaDecoderReader::GetBuffered();
#else
media::TimeIntervals buffered;
// HasAudio and HasVideo are not used here as they take a lock and cause
// a deadlock. Accessing mInfo doesn't require a lock - it doesn't change
// after metadata is read.
if (!mInfo.HasValidMedia()) {
// No need to search through the file if there are no audio or video tracks
return NS_OK;
return buffered;
}
AutoPinned<MediaResource> resource(mDecoder->GetResource());
nsTArray<MediaByteRange> ranges;
nsresult res = resource->GetCachedRanges(ranges);
NS_ENSURE_SUCCESS(res, res);
NS_ENSURE_SUCCESS(res, media::TimeIntervals::Invalid());
// Traverse across the buffered byte ranges, determining the time ranges
// they contain. MediaResource::GetNextCachedData(offset) returns -1 when
@ -1909,7 +1910,7 @@ nsresult OggReader::GetBuffered(dom::TimeRanges* aBuffered)
&page,
discard);
if (res == PAGE_SYNC_ERROR) {
return NS_ERROR_FAILURE;
return media::TimeIntervals::Invalid();
} else if (res == PAGE_SYNC_END_OF_RANGE) {
// Hit the end of range without reading a page, give up trying to
// find a start time for this buffered range, skip onto the next one.
@ -1949,7 +1950,7 @@ nsresult OggReader::GetBuffered(dom::TimeRanges* aBuffered)
// prevents us searching through the rest of the media when we
// may not be able to extract timestamps from it.
SetChained(true);
return NS_OK;
return buffered;
}
}
@ -1957,14 +1958,15 @@ nsresult OggReader::GetBuffered(dom::TimeRanges* aBuffered)
// We were able to find a start time for that range, see if we can
// find an end time.
int64_t endTime = RangeEndTime(startOffset, endOffset, true);
if (endTime != -1) {
aBuffered->Add((startTime - mStartTime) / static_cast<double>(USECS_PER_S),
(endTime - mStartTime) / static_cast<double>(USECS_PER_S));
if (endTime > startTime) {
buffered += media::TimeInterval(
media::TimeUnit::FromMicroseconds(startTime - mStartTime),
media::TimeUnit::FromMicroseconds(endTime - mStartTime));
}
}
}
return NS_OK;
return buffered;
#endif
}

View File

@ -18,12 +18,6 @@
#include "VideoUtils.h"
#include "mozilla/Monitor.h"
namespace mozilla {
namespace dom {
class TimeRanges;
}
}
namespace mozilla {
// Thread safe container to store the codec information and the serial for each
@ -77,7 +71,7 @@ public:
MetadataTags** aTags) override;
virtual nsRefPtr<SeekPromise>
Seek(int64_t aTime, int64_t aEndTime) override;
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
virtual media::TimeIntervals GetBuffered() override;
virtual bool IsMediaSeekable() override;

View File

@ -8,7 +8,6 @@
#include "MediaDecoderStateMachine.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/dom/TimeRanges.h"
#include "MediaResource.h"
#include "VideoUtils.h"
#include "MediaOmxDecoder.h"

View File

@ -11,10 +11,6 @@
namespace mozilla {
namespace dom {
class TimeRanges;
}
class AbstractMediaDecoder;
class RtspMediaResource;
@ -48,8 +44,8 @@ public:
// we returned are not useful for the MediaDecodeStateMachine. Unlike the
// ChannelMediaResource, it has a "cache" that can store the whole streaming
// data so the |GetBuffered| function can retrieve useful time ranges.
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override {
return NS_ERROR_NOT_IMPLEMENTED;
virtual media::TimeIntervals GetBuffered() override {
return media::TimeIntervals::Invalid();
}
virtual void SetIdle() override;

View File

@ -60,8 +60,8 @@ public:
// we returned are not useful for the MediaDecodeStateMachine. Unlike the
// ChannelMediaResource, it has a "cache" that can store the whole streaming
// data so the |GetBuffered| function can retrieve useful time ranges.
virtual nsresult GetBuffered(mozilla::dom::TimeRanges* aBuffered) final override {
return NS_ERROR_NOT_IMPLEMENTED;
virtual media::TimeIntervals GetBuffered() final override {
return media::TimeIntervals::Invalid();
}
virtual void SetIdle() override;

View File

@ -284,7 +284,7 @@ nsresult RawReader::SeekInternal(int64_t aTime)
return NS_OK;
}
nsresult RawReader::GetBuffered(dom::TimeRanges* aBuffered)
media::TimeIntervals RawReader::GetBuffered()
{
return NS_OK;
return media::TimeIntervals();
}

View File

@ -42,7 +42,7 @@ public:
virtual nsRefPtr<SeekPromise>
Seek(int64_t aTime, int64_t aEndTime) override;
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
virtual media::TimeIntervals GetBuffered() override;
virtual bool IsMediaSeekable() override;

View File

@ -7,7 +7,6 @@
#include "AbstractMediaDecoder.h"
#include "MediaResource.h"
#include "WaveReader.h"
#include "mozilla/dom/TimeRanges.h"
#include "MediaDecoderStateMachine.h"
#include "VideoUtils.h"
#include "nsISeekableStream.h"
@ -275,15 +274,12 @@ WaveReader::Seek(int64_t aTarget, int64_t aEndTime)
}
}
static double RoundToUsecs(double aSeconds) {
return floor(aSeconds * USECS_PER_S) / USECS_PER_S;
}
nsresult WaveReader::GetBuffered(dom::TimeRanges* aBuffered)
media::TimeIntervals WaveReader::GetBuffered()
{
if (!mInfo.HasAudio()) {
return NS_OK;
return media::TimeIntervals();
}
media::TimeIntervals buffered;
AutoPinned<MediaResource> resource(mDecoder->GetResource());
int64_t startOffset = resource->GetNextCachedData(mWavePCMOffset);
while (startOffset >= 0) {
@ -295,11 +291,12 @@ nsresult WaveReader::GetBuffered(dom::TimeRanges* aBuffered)
// We need to round the buffered ranges' times to microseconds so that they
// have the same precision as the currentTime and duration attribute on
// the media element.
aBuffered->Add(RoundToUsecs(BytesToTime(startOffset - mWavePCMOffset)),
RoundToUsecs(BytesToTime(endOffset - mWavePCMOffset)));
buffered += media::TimeInterval(
media::TimeUnit::FromSeconds(BytesToTime(startOffset - mWavePCMOffset)),
media::TimeUnit::FromSeconds(BytesToTime(endOffset - mWavePCMOffset)));
startOffset = resource->GetNextCachedData(endOffset);
}
return NS_OK;
return buffered;
}
bool

View File

@ -9,12 +9,6 @@
#include "MediaDecoderReader.h"
#include "mozilla/dom/HTMLMediaElement.h"
namespace mozilla {
namespace dom {
class TimeRanges;
}
}
namespace mozilla {
class WaveReader : public MediaDecoderReader
@ -46,7 +40,7 @@ public:
virtual nsRefPtr<SeekPromise>
Seek(int64_t aTime, int64_t aEndTime) override;
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
virtual media::TimeIntervals GetBuffered() override;
virtual bool IsMediaSeekable() override;

View File

@ -8,9 +8,9 @@
#include "gfx2DGlue.h"
#include "MediaDecoderStateMachine.h"
#include "MediaResource.h"
#include "mozilla/dom/TimeRanges.h"
#include "nsError.h"
#include "OggReader.h"
#include "TimeUnits.h"
#include "VorbisUtils.h"
#include "WebMBufferedParser.h"

View File

@ -6,7 +6,6 @@
#include "nsAlgorithm.h"
#include "WebMBufferedParser.h"
#include "mozilla/dom/TimeRanges.h"
#include "nsThreadUtils.h"
#include <algorithm>

View File

@ -10,7 +10,6 @@
#include "SoftwareWebMVideoDecoder.h"
#include "WebMReader.h"
#include "WebMBufferedParser.h"
#include "mozilla/dom/TimeRanges.h"
#include "VorbisUtils.h"
#include "gfx2DGlue.h"
#include "Layers.h"
@ -1107,21 +1106,20 @@ nsresult WebMReader::SeekInternal(int64_t aTarget)
return NS_OK;
}
nsresult WebMReader::GetBuffered(dom::TimeRanges* aBuffered)
media::TimeIntervals WebMReader::GetBuffered()
{
MOZ_ASSERT(mStartTime != -1, "Need to finish metadata decode first");
if (aBuffered->Length() != 0) {
return NS_ERROR_FAILURE;
}
AutoPinned<MediaResource> resource(mDecoder->GetResource());
media::TimeIntervals buffered;
// Special case completely cached files. This also handles local files.
if (mContext && resource->IsDataCachedToEndOfResource(0)) {
uint64_t duration = 0;
if (nestegg_duration(mContext, &duration) == 0) {
aBuffered->Add(0, duration / NS_PER_S);
return NS_OK;
buffered +=
media::TimeInterval(media::TimeUnit::FromSeconds(0),
media::TimeUnit::FromSeconds(duration / NS_PER_S));
return buffered;
}
}
@ -1129,7 +1127,7 @@ nsresult WebMReader::GetBuffered(dom::TimeRanges* aBuffered)
// the WebM bitstream.
nsTArray<MediaByteRange> ranges;
nsresult res = resource->GetCachedRanges(ranges);
NS_ENSURE_SUCCESS(res, res);
NS_ENSURE_SUCCESS(res, media::TimeIntervals::Invalid());
for (uint32_t index = 0; index < ranges.Length(); index++) {
uint64_t start, end;
@ -1154,12 +1152,12 @@ nsresult WebMReader::GetBuffered(dom::TimeRanges* aBuffered)
endTime = duration / NS_PER_S;
}
}
aBuffered->Add(startTime, endTime);
buffered += media::TimeInterval(media::TimeUnit::FromSeconds(startTime),
media::TimeUnit::FromSeconds(endTime));
}
}
return NS_OK;
return buffered;
}
void WebMReader::NotifyDataArrived(const char* aBuffer, uint32_t aLength,

View File

@ -172,7 +172,7 @@ public:
virtual nsRefPtr<SeekPromise>
Seek(int64_t aTime, int64_t aEndTime) override;
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
virtual media::TimeIntervals GetBuffered() override;
virtual void NotifyDataArrived(const char* aBuffer, uint32_t aLength,
int64_t aOffset) override;
virtual int64_t GetEvictionOffset(double aTime) override;

View File

@ -10,7 +10,6 @@
#include "WMFByteStream.h"
#include "WMFSourceReaderCallback.h"
#include "mozilla/ArrayUtils.h"
#include "mozilla/dom/TimeRanges.h"
#include "mozilla/dom/HTMLMediaElement.h"
#include "mozilla/Preferences.h"
#include "DXVA2Manager.h"