Bug 584615 - Make media progress events be 'simple' Events, not 'progress' Events - r=roc,cpearce,dolske,kinetik a=blocking2.0

This commit is contained in:
Chris Double 2010-09-10 15:29:06 +12:00
parent c944fb3919
commit 9c0d8b2cee
8 changed files with 132 additions and 122 deletions

View File

@ -185,10 +185,8 @@ public:
gfxASurface* GetPrintSurface() { return mPrintSurface; }
// Dispatch events
nsresult DispatchSimpleEvent(const nsAString& aName);
nsresult DispatchProgressEvent(const nsAString& aName);
nsresult DispatchAsyncSimpleEvent(const nsAString& aName);
nsresult DispatchAsyncProgressEvent(const nsAString& aName);
nsresult DispatchEvent(const nsAString& aName);
nsresult DispatchAsyncEvent(const nsAString& aName);
nsresult DispatchAudioAvailableEvent(float* aFrameBuffer,
PRUint32 aFrameBufferLength,
PRUint64 aTime);

View File

@ -70,7 +70,6 @@
#include "nsEventDispatcher.h"
#include "nsIDOMDocumentEvent.h"
#include "nsIDOMProgressEvent.h"
#include "nsMediaError.h"
#include "nsICategoryManager.h"
#include "nsCharSeparatedTokenizer.h"
@ -192,11 +191,10 @@ class nsAsyncEventRunner : public nsMediaEvent
{
private:
nsString mName;
PRPackedBool mProgress;
public:
nsAsyncEventRunner(const nsAString& aName, nsHTMLMediaElement* aElement, PRBool aProgress) :
nsMediaEvent(aElement), mName(aName), mProgress(aProgress)
nsAsyncEventRunner(const nsAString& aName, nsHTMLMediaElement* aElement) :
nsMediaEvent(aElement), mName(aName)
{
}
@ -206,9 +204,7 @@ public:
if (IsCancelled())
return NS_OK;
return mProgress ?
mElement->DispatchProgressEvent(mName) :
mElement->DispatchSimpleEvent(mName);
return mElement->DispatchEvent(mName);
}
};
@ -490,7 +486,7 @@ void nsHTMLMediaElement::AbortExistingLoads()
if (mNetworkState == nsIDOMHTMLMediaElement::NETWORK_LOADING ||
mNetworkState == nsIDOMHTMLMediaElement::NETWORK_IDLE)
{
DispatchProgressEvent(NS_LITERAL_STRING("abort"));
DispatchEvent(NS_LITERAL_STRING("abort"));
}
mError = nsnull;
@ -513,9 +509,9 @@ void nsHTMLMediaElement::AbortExistingLoads()
// will now be reported as 0. The playback position was non-zero when
// we destroyed the decoder, so fire a timeupdate event so that the
// change will be reflected in the controls.
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("timeupdate"));
DispatchAsyncEvent(NS_LITERAL_STRING("timeupdate"));
}
DispatchSimpleEvent(NS_LITERAL_STRING("emptied"));
DispatchEvent(NS_LITERAL_STRING("emptied"));
}
// We may have changed mPaused, mAutoplaying, mNetworkState and other
@ -531,7 +527,7 @@ void nsHTMLMediaElement::NoSupportedMediaSourceError()
mError = new nsMediaError(nsIDOMMediaError::MEDIA_ERR_SRC_NOT_SUPPORTED);
mNetworkState = nsIDOMHTMLMediaElement::NETWORK_NO_SOURCE;
DispatchAsyncProgressEvent(NS_LITERAL_STRING("error"));
DispatchAsyncEvent(NS_LITERAL_STRING("error"));
// This clears mDelayingLoadEvent, so AddRemoveSelfReference will be called
ChangeDelayLoadStatus(PR_FALSE);
}
@ -649,7 +645,7 @@ void nsHTMLMediaElement::SelectResource()
mNetworkState = nsIDOMHTMLMediaElement::NETWORK_LOADING;
// Load event was delayed, and still is, so no need to call
// AddRemoveSelfReference, since it must still be held
DispatchAsyncProgressEvent(NS_LITERAL_STRING("loadstart"));
DispatchAsyncEvent(NS_LITERAL_STRING("loadstart"));
nsAutoString src;
nsCOMPtr<nsIURI> uri;
@ -801,7 +797,7 @@ void nsHTMLMediaElement::SuspendLoad(nsIURI* aURI)
{
mLoadIsSuspended = PR_TRUE;
mNetworkState = nsIDOMHTMLMediaElement::NETWORK_IDLE;
DispatchAsyncProgressEvent(NS_LITERAL_STRING("suspend"));
DispatchAsyncEvent(NS_LITERAL_STRING("suspend"));
ChangeDelayLoadStatus(PR_FALSE);
}
@ -1037,7 +1033,7 @@ nsresult nsHTMLMediaElement::LoadWithChannel(nsIChannel *aChannel,
return rv;
}
DispatchAsyncProgressEvent(NS_LITERAL_STRING("loadstart"));
DispatchAsyncEvent(NS_LITERAL_STRING("loadstart"));
return NS_OK;
}
@ -1061,7 +1057,7 @@ NS_IMETHODIMP nsHTMLMediaElement::MozLoadFrom(nsIDOMHTMLMediaElement* aOther)
return rv;
}
DispatchAsyncProgressEvent(NS_LITERAL_STRING("loadstart"));
DispatchAsyncEvent(NS_LITERAL_STRING("loadstart"));
return NS_OK;
}
@ -1161,8 +1157,8 @@ NS_IMETHODIMP nsHTMLMediaElement::Pause()
AddRemoveSelfReference();
if (!oldPaused) {
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("timeupdate"));
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("pause"));
DispatchAsyncEvent(NS_LITERAL_STRING("timeupdate"));
DispatchAsyncEvent(NS_LITERAL_STRING("pause"));
}
return NS_OK;
@ -1192,7 +1188,7 @@ NS_IMETHODIMP nsHTMLMediaElement::SetVolume(float aVolume)
mAudioStream->SetVolume(mVolume);
}
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("volumechange"));
DispatchAsyncEvent(NS_LITERAL_STRING("volumechange"));
return NS_OK;
}
@ -1262,7 +1258,7 @@ NS_IMETHODIMP nsHTMLMediaElement::SetMuted(PRBool aMuted)
mAudioStream->SetVolume(mMuted ? 0.0 : mVolume);
}
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("volumechange"));
DispatchAsyncEvent(NS_LITERAL_STRING("volumechange"));
return NS_OK;
}
@ -1385,15 +1381,15 @@ NS_IMETHODIMP nsHTMLMediaElement::Play()
// seek to the effective start.
// TODO: The playback rate must be set to the default playback rate.
if (mPaused) {
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("play"));
DispatchAsyncEvent(NS_LITERAL_STRING("play"));
switch (mReadyState) {
case nsIDOMHTMLMediaElement::HAVE_METADATA:
case nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA:
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("waiting"));
DispatchAsyncEvent(NS_LITERAL_STRING("waiting"));
break;
case nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA:
case nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA:
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("playing"));
DispatchAsyncEvent(NS_LITERAL_STRING("playing"));
break;
}
}
@ -1949,8 +1945,8 @@ void nsHTMLMediaElement::MetadataLoaded(PRUint32 aChannels, PRUint32 aRate)
mChannels = aChannels;
mRate = aRate;
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA);
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("durationchange"));
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("loadedmetadata"));
DispatchAsyncEvent(NS_LITERAL_STRING("durationchange"));
DispatchAsyncEvent(NS_LITERAL_STRING("loadedmetadata"));
}
void nsHTMLMediaElement::FirstFrameLoaded(PRBool aResourceFullyLoaded)
@ -1976,9 +1972,9 @@ void nsHTMLMediaElement::ResourceLoaded()
AddRemoveSelfReference();
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA);
// Ensure a progress event is dispatched at the end of download.
DispatchAsyncProgressEvent(NS_LITERAL_STRING("progress"));
DispatchAsyncEvent(NS_LITERAL_STRING("progress"));
// The download has stopped.
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("suspend"));
DispatchAsyncEvent(NS_LITERAL_STRING("suspend"));
}
void nsHTMLMediaElement::NetworkError()
@ -2004,10 +2000,10 @@ void nsHTMLMediaElement::Error(PRUint16 aErrorCode)
"Only use nsIDOMMediaError codes!");
mError = new nsMediaError(aErrorCode);
mBegun = PR_FALSE;
DispatchAsyncProgressEvent(NS_LITERAL_STRING("error"));
DispatchAsyncEvent(NS_LITERAL_STRING("error"));
if (mReadyState == nsIDOMHTMLMediaElement::HAVE_NOTHING) {
mNetworkState = nsIDOMHTMLMediaElement::NETWORK_EMPTY;
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("emptied"));
DispatchAsyncEvent(NS_LITERAL_STRING("emptied"));
} else {
mNetworkState = nsIDOMHTMLMediaElement::NETWORK_IDLE;
}
@ -2021,20 +2017,20 @@ void nsHTMLMediaElement::PlaybackEnded()
// We changed the state of IsPlaybackEnded which can affect AddRemoveSelfReference
AddRemoveSelfReference();
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("ended"));
DispatchAsyncEvent(NS_LITERAL_STRING("ended"));
}
void nsHTMLMediaElement::SeekStarted()
{
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("seeking"));
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("timeupdate"));
DispatchAsyncEvent(NS_LITERAL_STRING("seeking"));
DispatchAsyncEvent(NS_LITERAL_STRING("timeupdate"));
}
void nsHTMLMediaElement::SeekCompleted()
{
mPlayingBeforeSeek = PR_FALSE;
SetPlayedOrSeeked(PR_TRUE);
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("seeked"));
DispatchAsyncEvent(NS_LITERAL_STRING("seeked"));
// We changed whether we're seeking so we need to AddRemoveSelfReference
AddRemoveSelfReference();
}
@ -2044,7 +2040,7 @@ void nsHTMLMediaElement::DownloadSuspended()
if (mBegun) {
mNetworkState = nsIDOMHTMLMediaElement::NETWORK_IDLE;
AddRemoveSelfReference();
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("suspend"));
DispatchAsyncEvent(NS_LITERAL_STRING("suspend"));
}
}
@ -2059,7 +2055,7 @@ void nsHTMLMediaElement::DownloadResumed()
void nsHTMLMediaElement::DownloadStalled()
{
if (mNetworkState == nsIDOMHTMLMediaElement::NETWORK_LOADING) {
DispatchAsyncProgressEvent(NS_LITERAL_STRING("stalled"));
DispatchAsyncEvent(NS_LITERAL_STRING("stalled"));
}
}
@ -2082,7 +2078,7 @@ void nsHTMLMediaElement::UpdateReadyStateForData(NextFrameStatus aNextFrame)
if (aNextFrame != NEXT_FRAME_AVAILABLE) {
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA);
if (!mWaitingFired && aNextFrame == NEXT_FRAME_UNAVAILABLE_BUFFERING) {
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("waiting"));
DispatchAsyncEvent(NS_LITERAL_STRING("waiting"));
mWaitingFired = PR_TRUE;
}
return;
@ -2132,14 +2128,14 @@ void nsHTMLMediaElement::ChangeReadyState(nsMediaReadyState aState)
// Handle raising of "waiting" event during seek (see 4.8.10.9)
if (mPlayingBeforeSeek &&
oldState < nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA) {
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("waiting"));
DispatchAsyncEvent(NS_LITERAL_STRING("waiting"));
}
if (oldState < nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA &&
mReadyState >= nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA &&
!mLoadedFirstFrame)
{
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("loadeddata"));
DispatchAsyncEvent(NS_LITERAL_STRING("loadeddata"));
mLoadedFirstFrame = PR_TRUE;
}
@ -2149,7 +2145,7 @@ void nsHTMLMediaElement::ChangeReadyState(nsMediaReadyState aState)
if (oldState < nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA &&
mReadyState >= nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA) {
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("canplay"));
DispatchAsyncEvent(NS_LITERAL_STRING("canplay"));
}
if (mReadyState == nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA) {
@ -2159,12 +2155,12 @@ void nsHTMLMediaElement::ChangeReadyState(nsMediaReadyState aState)
if (oldState < nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA &&
mReadyState >= nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA &&
IsPotentiallyPlaying()) {
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("playing"));
DispatchAsyncEvent(NS_LITERAL_STRING("playing"));
}
if (oldState < nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA &&
mReadyState >= nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA) {
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("canplaythrough"));
DispatchAsyncEvent(NS_LITERAL_STRING("canplaythrough"));
}
}
@ -2187,7 +2183,7 @@ void nsHTMLMediaElement::NotifyAutoplayDataReady()
SetPlayedOrSeeked(PR_TRUE);
mDecoder->Play();
}
DispatchAsyncSimpleEvent(NS_LITERAL_STRING("play"));
DispatchAsyncEvent(NS_LITERAL_STRING("play"));
}
}
@ -2244,9 +2240,9 @@ nsresult nsHTMLMediaElement::DispatchAudioAvailableEvent(float* aFrameBuffer,
return target->DispatchEvent(event, &dummy);
}
nsresult nsHTMLMediaElement::DispatchSimpleEvent(const nsAString& aName)
nsresult nsHTMLMediaElement::DispatchEvent(const nsAString& aName)
{
LOG_EVENT(PR_LOG_DEBUG, ("%p Dispatching simple event %s", this,
LOG_EVENT(PR_LOG_DEBUG, ("%p Dispatching event %s", this,
NS_ConvertUTF16toUTF8(aName).get()));
return nsContentUtils::DispatchTrustedEvent(GetOwnerDoc(),
@ -2256,55 +2252,16 @@ nsresult nsHTMLMediaElement::DispatchSimpleEvent(const nsAString& aName)
PR_TRUE);
}
nsresult nsHTMLMediaElement::DispatchAsyncSimpleEvent(const nsAString& aName)
nsresult nsHTMLMediaElement::DispatchAsyncEvent(const nsAString& aName)
{
LOG_EVENT(PR_LOG_DEBUG, ("%p Queuing simple event %s", this, NS_ConvertUTF16toUTF8(aName).get()));
LOG_EVENT(PR_LOG_DEBUG, ("%p Queuing event %s", this,
NS_ConvertUTF16toUTF8(aName).get()));
nsCOMPtr<nsIRunnable> event = new nsAsyncEventRunner(aName, this, PR_FALSE);
nsCOMPtr<nsIRunnable> event = new nsAsyncEventRunner(aName, this);
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
return NS_OK;
}
nsresult nsHTMLMediaElement::DispatchAsyncProgressEvent(const nsAString& aName)
{
LOG_EVENT(PR_LOG_DEBUG, ("%p Queuing progress event %s", this, NS_ConvertUTF16toUTF8(aName).get()));
nsCOMPtr<nsIRunnable> event = new nsAsyncEventRunner(aName, this, PR_TRUE);
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
return NS_OK;
}
nsresult nsHTMLMediaElement::DispatchProgressEvent(const nsAString& aName)
{
nsCOMPtr<nsIDOMDocumentEvent> docEvent(do_QueryInterface(GetOwnerDoc()));
nsCOMPtr<nsIDOMEventTarget> target(do_QueryInterface(static_cast<nsIContent*>(this)));
NS_ENSURE_TRUE(docEvent && target, NS_ERROR_INVALID_ARG);
nsCOMPtr<nsIDOMEvent> event;
nsresult rv = docEvent->CreateEvent(NS_LITERAL_STRING("ProgressEvent"), getter_AddRefs(event));
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsIDOMProgressEvent> progressEvent(do_QueryInterface(event));
NS_ENSURE_TRUE(progressEvent, NS_ERROR_FAILURE);
PRInt64 totalBytes = 0;
PRUint64 downloadPosition = 0;
if (mDecoder) {
nsMediaDecoder::Statistics stats = mDecoder->GetStatistics();
totalBytes = stats.mTotalBytes;
downloadPosition = stats.mDownloadPosition;
}
rv = progressEvent->InitProgressEvent(aName, PR_TRUE, PR_TRUE,
totalBytes >= 0, downloadPosition, totalBytes);
NS_ENSURE_SUCCESS(rv, rv);
LOG_EVENT(PR_LOG_DEBUG, ("%p Dispatching progress event %s", this,
NS_ConvertUTF16toUTF8(aName).get()));
PRBool dummy;
return target->DispatchEvent(event, &dummy);
}
PRBool nsHTMLMediaElement::IsPotentiallyPlaying() const
{
// TODO:
@ -2574,7 +2531,9 @@ nsresult nsHTMLMediaElement::GetBuffered(nsIDOMTimeRanges** aBuffered)
nsTimeRanges* ranges = new nsTimeRanges();
NS_ADDREF(*aBuffered = ranges);
if (mReadyState >= nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA && mDecoder) {
return mDecoder->GetBuffered(ranges);
// If GetBuffered fails we ignore the error result and just return the
// time ranges we found up till the error.
mDecoder->GetBuffered(ranges);
}
return NS_OK;
}

View File

@ -359,7 +359,7 @@ void nsBuiltinDecoder::MetadataLoaded(PRUint32 aChannels,
else if (mElement) {
// Resource was loaded during metadata loading, when progress
// events are being ignored. Fire the final progress event.
mElement->DispatchAsyncProgressEvent(NS_LITERAL_STRING("progress"));
mElement->DispatchAsyncEvent(NS_LITERAL_STRING("progress"));
}
// Only inform the element of FirstFrameLoaded if not doing a load() in order
@ -773,7 +773,7 @@ void nsBuiltinDecoder::PlaybackPositionChanged()
Invalidate();
if (mElement && lastTime != mCurrentTime) {
mElement->DispatchSimpleEvent(NS_LITERAL_STRING("timeupdate"));
mElement->DispatchEvent(NS_LITERAL_STRING("timeupdate"));
}
}
@ -788,7 +788,7 @@ void nsBuiltinDecoder::DurationChanged()
if (mElement && oldDuration != mDuration) {
LOG(PR_LOG_DEBUG, ("%p duration changed to %lldms", this, mDuration));
mElement->DispatchSimpleEvent(NS_LITERAL_STRING("durationchange"));
mElement->DispatchEvent(NS_LITERAL_STRING("durationchange"));
}
}

View File

@ -200,7 +200,7 @@ void nsMediaDecoder::Progress(PRBool aTimer)
now - mProgressTime >= TimeDuration::FromMilliseconds(PROGRESS_MS)) &&
!mDataTime.IsNull() &&
now - mDataTime <= TimeDuration::FromMilliseconds(PROGRESS_MS)) {
mElement->DispatchAsyncProgressEvent(NS_LITERAL_STRING("progress"));
mElement->DispatchAsyncEvent(NS_LITERAL_STRING("progress"));
mProgressTime = now;
}

View File

@ -284,14 +284,10 @@ nsresult nsOggReader::ReadMetadata()
// Theora spec these can be considered the 'primary' bitstreams for playback.
// Extract the metadata needed from these streams.
// Set a default callback period for if we have no video data
if (mTheoraState) {
if (mTheoraState->Init()) {
gfxIntSize sz(mTheoraState->mInfo.pic_width,
mTheoraState->mInfo.pic_height);
mDecoder->SetVideoData(sz, mTheoraState->mPixelAspectRatio, nsnull);
} else {
mTheoraState = nsnull;
}
if (mTheoraState && mTheoraState->Init()) {
gfxIntSize sz(mTheoraState->mInfo.pic_width,
mTheoraState->mInfo.pic_height);
mDecoder->SetVideoData(sz, mTheoraState->mPixelAspectRatio, nsnull);
}
if (mVorbisState) {
mVorbisState->Init();
@ -910,7 +906,7 @@ PRInt64 nsOggReader::FindEndTime(PRInt64 aEndOffset,
// We need more data if we've not encountered a page we've seen before,
// or we've read to the end of file.
if (mustBackOff || readHead == aEndOffset) {
if (endTime != -1) {
if (endTime != -1 || readStartOffset == 0) {
// We have encountered a page before, or we're at the end of file.
break;
}
@ -1542,6 +1538,16 @@ nsresult nsOggReader::SeekBisection(PRInt64 aTarget,
nsresult nsOggReader::GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime)
{
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
// HasAudio and HasVideo are not used here as they take a lock and cause
// a deadlock. Accessing mInfo doesn't require a lock - it doesn't change
// after metadata is read and GetBuffered isn't called before metadata is
// read.
if (!mInfo.mHasVideo && !mInfo.mHasAudio) {
// No need to search through the file if there are no audio or video tracks
return NS_OK;
}
nsMediaStream* stream = mDecoder->GetCurrentStream();
// Traverse across the buffered byte ranges, determining the time ranges
@ -1604,6 +1610,16 @@ nsresult nsOggReader::GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime)
startTime = codecState->Time(granulepos) - aStartTime;
NS_ASSERTION(startTime > 0, "Must have positive start time");
}
else if(codecState) {
// Page is for an inactive stream, skip it.
startOffset += page.header_len + page.body_len;
continue;
}
else {
// Page is for a stream we don't know about (possibly a chained
// ogg), return an error.
return PAGE_SYNC_ERROR;
}
}
if (startTime != -1) {

View File

@ -17,10 +17,6 @@ var manager = new MediaTestManager;
function do_progress(e) {
var v = e.target;
ok(!v._finished, "Check no progress events after completed for " + v._name);
ok(e.lengthComputable, "Check progress lengthComputable for " + v._name);
v._last_progress_total = e.loaded;
ok(e.loaded <= e.total, "Check progress in bounds: " + e.loaded + " for " + v._name);
is(e.total, v._size, "Check progress total for " + v._name);
}
function do_ended(e) {
@ -39,9 +35,7 @@ function startTest(test, token) {
v.src = test.name;
v.autoplay = true;
v._name = test.name;
v._size = test.size;
v._finished = false;
v._last_progress_total = 0;
v.addEventListener("ended", do_ended, false);
v.addEventListener("progress", do_progress, false);
document.body.appendChild(v);

View File

@ -1678,7 +1678,7 @@ nsWaveDecoder::PlaybackPositionChanged()
if (mElement && lastTime != mCurrentTime) {
UpdateReadyStateForData();
mElement->DispatchSimpleEvent(NS_LITERAL_STRING("timeupdate"));
mElement->DispatchEvent(NS_LITERAL_STRING("timeupdate"));
}
}

View File

@ -368,8 +368,8 @@
// fully loaded. (If it's still loading, it will fire a progress event
// and we'll figure out the exact state then.)
this.bufferBar.setAttribute("max", 100);
if (this.video.networkState == this.video.NETWORK_LOADED)
this.bufferBar.setAttribute("value", 100);
if (this.video.readyState >= this.video.HAVE_METADATA)
this.showBuffered();
else
this.bufferBar.setAttribute("value", 0);
@ -458,15 +458,7 @@
this.durationChange(duration);
break;
case "progress":
var loaded = aEvent.loaded;
var total = aEvent.total;
this.log("+++ load, " + loaded + " of " + total);
// When the source is streaming, the value of .total is -1. Set the
// progress bar to the maximum, since it's not useful.
if (total == -1)
total = loaded;
this.bufferBar.max = total;
this.bufferBar.value = loaded;
this.showBuffered();
this.setupStatusFader();
break;
case "suspend":
@ -508,6 +500,10 @@
this.bufferBar.value = 0;
break;
case "seeking":
this.showBuffered();
this.statusIcon.setAttribute("type", "throbber");
this.setupStatusFader();
break;
case "waiting":
this.statusIcon.setAttribute("type", "throbber");
this.setupStatusFader();
@ -599,6 +595,53 @@
this.scrubber.value = currentTime;
},
showBuffered : function() {
function bsearch(haystack, needle, cmp) {
var length = haystack.length;
var low = 0;
var high = length;
while (high - low > 1) {
var probe = low + ((high - low) >> 1);
var r = cmp(haystack, probe, needle);
if (r == 0) {
low = probe;
break;
} else if (r > 0) {
low = probe + 1;
} else {
high = probe;
}
}
return low < length ? low : -1;
}
function bufferedCompare(buffered, i, time) {
if (time > buffered.end(i)) {
return 1;
} else if (time >= buffered.start(i)) {
return 0;
}
return -1;
}
var duration = Math.round(this.video.duration * 1000);
if (isNaN(duration))
duration = this.maxCurrentTimeSeen;
// Find the range that the current play position is in and use that
// range for bufferBar. At some point we may support multiple ranges
// displayed in the bar.
var currentTime = this.video.currentTime;
var buffered = this.video.buffered;
var index = bsearch(buffered, currentTime, bufferedCompare);
var endTime = 0;
if (index >= 0) {
endTime = Math.round(buffered.end(index) * 1000);
}
this.bufferBar.max = duration;
this.bufferBar.value = endTime;
},
onVolumeMouseInOut : function (event) {
// Ignore events caused by transitions between mute button and volumeStack,
// or between nodes inside these two elements.