Backed out changeset 7691b13459f4 (bug 744896) for B2G ICS Emulator Opt test failures on a CLOSED TREE

This commit is contained in:
Carsten "Tomcat" Book 2014-07-03 11:51:41 +02:00
parent 5166e4fdc1
commit 0e366174c0
14 changed files with 97 additions and 53 deletions

View File

@ -151,7 +151,10 @@ public:
// Called by the video decoder object, on the main thread, // Called by the video decoder object, on the main thread,
// when it has read the metadata containing video dimensions, // when it has read the metadata containing video dimensions,
// etc. // etc.
virtual void MetadataLoaded(const MediaInfo* aInfo, virtual void MetadataLoaded(int aChannels,
int aRate,
bool aHasAudio,
bool aHasVideo,
const MetadataTags* aTags) MOZ_FINAL MOZ_OVERRIDE; const MetadataTags* aTags) MOZ_FINAL MOZ_OVERRIDE;
// Called by the video decoder object, on the main thread, // Called by the video decoder object, on the main thread,

View File

@ -2876,10 +2876,13 @@ void HTMLMediaElement::ProcessMediaFragmentURI()
} }
} }
void HTMLMediaElement::MetadataLoaded(const MediaInfo* aInfo, void HTMLMediaElement::MetadataLoaded(int aChannels,
int aRate,
bool aHasAudio,
bool aHasVideo,
const MetadataTags* aTags) const MetadataTags* aTags)
{ {
mHasAudio = aInfo->HasAudio(); mHasAudio = aHasAudio;
mTags = aTags; mTags = aTags;
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA); ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA);
DispatchAsyncEvent(NS_LITERAL_STRING("durationchange")); DispatchAsyncEvent(NS_LITERAL_STRING("durationchange"));
@ -2892,7 +2895,7 @@ void HTMLMediaElement::MetadataLoaded(const MediaInfo* aInfo,
// If this element had a video track, but consists only of an audio track now, // If this element had a video track, but consists only of an audio track now,
// delete the VideoFrameContainer. This happens when the src is changed to an // delete the VideoFrameContainer. This happens when the src is changed to an
// audio only file. // audio only file.
if (!aInfo->HasVideo() && mVideoFrameContainer) { if (!aHasVideo && mVideoFrameContainer) {
// call ForgetElement() such that callbacks from |mVideoFrameContainer| // call ForgetElement() such that callbacks from |mVideoFrameContainer|
// won't reach us anymore. // won't reach us anymore.
mVideoFrameContainer->ForgetElement(); mVideoFrameContainer->ForgetElement();

View File

@ -8,7 +8,6 @@
#define AbstractMediaDecoder_h_ #define AbstractMediaDecoder_h_
#include "mozilla/Attributes.h" #include "mozilla/Attributes.h"
#include "MediaInfo.h"
#include "nsISupports.h" #include "nsISupports.h"
#include "nsDataHashtable.h" #include "nsDataHashtable.h"
#include "nsThreadUtils.h" #include "nsThreadUtils.h"
@ -90,8 +89,8 @@ public:
// Return true if the transport layer supports seeking. // Return true if the transport layer supports seeking.
virtual bool IsMediaSeekable() = 0; virtual bool IsMediaSeekable() = 0;
virtual void MetadataLoaded(MediaInfo* aInfo, MetadataTags* aTags) = 0; virtual void MetadataLoaded(int aChannels, int aRate, bool aHasAudio, bool aHasVideo, MetadataTags* aTags) = 0;
virtual void QueueMetadata(int64_t aTime, MediaInfo* aInfo, MetadataTags* aTags) = 0; virtual void QueueMetadata(int64_t aTime, int aChannels, int aRate, bool aHasAudio, bool aHasVideo, MetadataTags* aTags) = 0;
// Set the media end time in microseconds // Set the media end time in microseconds
virtual void SetMediaEndTime(int64_t aTime) = 0; virtual void SetMediaEndTime(int64_t aTime) = 0;
@ -137,27 +136,30 @@ public:
}; };
}; };
class MetadataEventRunner : public nsRunnable class AudioMetadataEventRunner : public nsRunnable
{ {
private: private:
nsRefPtr<AbstractMediaDecoder> mDecoder; nsRefPtr<AbstractMediaDecoder> mDecoder;
public: public:
MetadataEventRunner(AbstractMediaDecoder* aDecoder, MediaInfo* aInfo, MetadataTags* aTags) AudioMetadataEventRunner(AbstractMediaDecoder* aDecoder, int aChannels, int aRate, bool aHasAudio, bool aHasVideo, MetadataTags* aTags)
: mDecoder(aDecoder), : mDecoder(aDecoder),
mInfo(aInfo), mChannels(aChannels),
mTags(aTags) mRate(aRate),
mHasAudio(aHasAudio),
mHasVideo(aHasVideo),
mTags(aTags)
{} {}
NS_IMETHOD Run() MOZ_OVERRIDE NS_IMETHOD Run() MOZ_OVERRIDE
{ {
mDecoder->MetadataLoaded(mInfo, mTags); mDecoder->MetadataLoaded(mChannels, mRate, mHasAudio, mHasVideo, mTags);
return NS_OK; return NS_OK;
} }
// The ownership is transferred to MediaDecoder. int mChannels;
MediaInfo* mInfo; int mRate;
bool mHasAudio;
// The ownership is transferred to its owning element. bool mHasVideo;
MetadataTags* mTags; MetadataTags* mTags;
}; };

View File

@ -147,13 +147,13 @@ BufferDecoder::IsMediaSeekable()
} }
void void
BufferDecoder::MetadataLoaded(MediaInfo* aInfo, MetadataTags* aTags) BufferDecoder::MetadataLoaded(int aChannels, int aRate, bool aHasAudio, bool aHasVideo, MetadataTags* aTags)
{ {
// ignore // ignore
} }
void void
BufferDecoder::QueueMetadata(int64_t aTime, MediaInfo* aInfo, MetadataTags* aTags) BufferDecoder::QueueMetadata(int64_t aTime, int aChannels, int aRate, bool aHasAudio, bool aHasVideo, MetadataTags* aTags)
{ {
// ignore // ignore
} }

View File

@ -60,8 +60,8 @@ public:
virtual bool IsMediaSeekable() MOZ_OVERRIDE; virtual bool IsMediaSeekable() MOZ_OVERRIDE;
virtual void MetadataLoaded(MediaInfo* aInfo, MetadataTags* aTags) MOZ_OVERRIDE; virtual void MetadataLoaded(int aChannels, int aRate, bool aHasAudio, bool aHasVideo, MetadataTags* aTags) MOZ_OVERRIDE;
virtual void QueueMetadata(int64_t aTime, MediaInfo* aInfo, MetadataTags* aTags) MOZ_OVERRIDE; virtual void QueueMetadata(int64_t aTime, int aChannels, int aRate, bool aHasAudio, bool aHasVideo, MetadataTags* aTags) MOZ_OVERRIDE;
virtual void SetMediaEndTime(int64_t aTime) MOZ_OVERRIDE; virtual void SetMediaEndTime(int64_t aTime) MOZ_OVERRIDE;

View File

@ -653,12 +653,15 @@ already_AddRefed<nsIPrincipal> MediaDecoder::GetCurrentPrincipal()
} }
void MediaDecoder::QueueMetadata(int64_t aPublishTime, void MediaDecoder::QueueMetadata(int64_t aPublishTime,
MediaInfo* aInfo, int aChannels,
int aRate,
bool aHasAudio,
bool aHasVideo,
MetadataTags* aTags) MetadataTags* aTags)
{ {
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread."); NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
GetReentrantMonitor().AssertCurrentThreadIn(); GetReentrantMonitor().AssertCurrentThreadIn();
mDecoderStateMachine->QueueMetadata(aPublishTime, aInfo, aTags); mDecoderStateMachine->QueueMetadata(aPublishTime, aChannels, aRate, aHasAudio, aHasVideo, aTags);
} }
bool bool
@ -669,7 +672,7 @@ MediaDecoder::IsDataCachedToEndOfResource()
mResource->IsDataCachedToEndOfResource(mDecoderPosition)); mResource->IsDataCachedToEndOfResource(mDecoderPosition));
} }
void MediaDecoder::MetadataLoaded(MediaInfo* aInfo, MetadataTags* aTags) void MediaDecoder::MetadataLoaded(int aChannels, int aRate, bool aHasAudio, bool aHasVideo, MetadataTags* aTags)
{ {
MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(NS_IsMainThread());
if (mShuttingDown) { if (mShuttingDown) {
@ -697,7 +700,7 @@ void MediaDecoder::MetadataLoaded(MediaInfo* aInfo, MetadataTags* aTags)
// Make sure the element and the frame (if any) are told about // Make sure the element and the frame (if any) are told about
// our new size. // our new size.
Invalidate(); Invalidate();
mOwner->MetadataLoaded(aInfo, aTags); mOwner->MetadataLoaded(aChannels, aRate, aHasAudio, aHasVideo, aTags);
} }
if (!mCalledResourceLoaded) { if (!mCalledResourceLoaded) {

View File

@ -751,7 +751,10 @@ public:
// main thread to be presented when the |currentTime| of the media is greater // main thread to be presented when the |currentTime| of the media is greater
// or equal to aPublishTime. // or equal to aPublishTime.
void QueueMetadata(int64_t aPublishTime, void QueueMetadata(int64_t aPublishTime,
MediaInfo* aInfo, int aChannels,
int aRate,
bool aHasAudio,
bool aHasVideo,
MetadataTags* aTags); MetadataTags* aTags);
/****** /******
@ -774,7 +777,10 @@ public:
// Called when the metadata from the media file has been loaded by the // Called when the metadata from the media file has been loaded by the
// state machine. Call on the main thread only. // state machine. Call on the main thread only.
virtual void MetadataLoaded(MediaInfo* aInfo, virtual void MetadataLoaded(int aChannels,
int aRate,
bool aHasAudio,
bool aHasVideo,
MetadataTags* aTags); MetadataTags* aTags);
// Called when the first frame has been loaded. // Called when the first frame has been loaded.

View File

@ -49,7 +49,10 @@ public:
// Called by the video decoder object, on the main thread, // Called by the video decoder object, on the main thread,
// when it has read the metadata containing video dimensions, // when it has read the metadata containing video dimensions,
// etc. // etc.
virtual void MetadataLoaded(const MediaInfo* aInfo, virtual void MetadataLoaded(int aChannels,
int aRate,
bool aHasAudio,
bool aHasVideo,
const MetadataTags* aTags) = 0; const MetadataTags* aTags) = 0;
// Called by the video decoder object, on the main thread, // Called by the video decoder object, on the main thread,

View File

@ -1983,10 +1983,13 @@ MediaDecoderStateMachine::FinishDecodeMetadata()
} }
// Inform the element that we've loaded the metadata and the first frame. // Inform the element that we've loaded the metadata and the first frame.
nsAutoPtr<MediaInfo> info(new MediaInfo());
*info = mInfo;
nsCOMPtr<nsIRunnable> metadataLoadedEvent = nsCOMPtr<nsIRunnable> metadataLoadedEvent =
new MetadataEventRunner(mDecoder, info.forget(), mMetadataTags.forget()); new AudioMetadataEventRunner(mDecoder,
mInfo.mAudio.mChannels,
mInfo.mAudio.mRate,
HasAudio(),
HasVideo(),
mMetadataTags.forget());
NS_DispatchToMainThread(metadataLoadedEvent, NS_DISPATCH_NORMAL); NS_DispatchToMainThread(metadataLoadedEvent, NS_DISPATCH_NORMAL);
if (mState == DECODER_STATE_DECODING_METADATA) { if (mState == DECODER_STATE_DECODING_METADATA) {
@ -3095,14 +3098,20 @@ bool MediaDecoderStateMachine::IsShutdown()
} }
void MediaDecoderStateMachine::QueueMetadata(int64_t aPublishTime, void MediaDecoderStateMachine::QueueMetadata(int64_t aPublishTime,
MediaInfo* aInfo, int aChannels,
int aRate,
bool aHasAudio,
bool aHasVideo,
MetadataTags* aTags) MetadataTags* aTags)
{ {
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread."); NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
AssertCurrentThreadInMonitor(); AssertCurrentThreadInMonitor();
TimedMetadata* metadata = new TimedMetadata; TimedMetadata* metadata = new TimedMetadata;
metadata->mPublishTime = aPublishTime; metadata->mPublishTime = aPublishTime;
metadata->mInfo = aInfo; metadata->mChannels = aChannels;
metadata->mRate = aRate;
metadata->mHasAudio = aHasAudio;
metadata->mHasVideo = aHasVideo;
metadata->mTags = aTags; metadata->mTags = aTags;
mMetadataManager.QueueMetadata(metadata); mMetadataManager.QueueMetadata(metadata);
} }

View File

@ -323,7 +323,7 @@ public:
// shutting down. The decoder monitor must be held while calling this. // shutting down. The decoder monitor must be held while calling this.
bool IsShutdown(); bool IsShutdown();
void QueueMetadata(int64_t aPublishTime, MediaInfo* aInfo, MetadataTags* aTags); void QueueMetadata(int64_t aPublishTime, int aChannels, int aRate, bool aHasAudio, bool aHasVideo, MetadataTags* aTags);
// Returns true if we're currently playing. The decoder monitor must // Returns true if we're currently playing. The decoder monitor must
// be held. // be held.

View File

@ -22,10 +22,14 @@ namespace mozilla {
// The metadata. The ownership is transfered to the element when dispatching to // The metadata. The ownership is transfered to the element when dispatching to
// the main threads. // the main threads.
nsAutoPtr<MetadataTags> mTags; nsAutoPtr<MetadataTags> mTags;
// The media info, including the info of audio tracks and video tracks. // The sample rate of this media.
// The ownership is transfered to MediaDecoder when dispatching to the int mRate;
// main thread. // The number of channel of this media.
nsAutoPtr<MediaInfo> mInfo; int mChannels;
// True if this media has an audio track.
bool mHasAudio;
// True if this media has a video track.
bool mHasVideo;
}; };
// This class encapsulate the logic to give the metadata from the reader to // This class encapsulate the logic to give the metadata from the reader to
@ -47,9 +51,12 @@ namespace mozilla {
TimedMetadata* metadata = mMetadataQueue.getFirst(); TimedMetadata* metadata = mMetadataQueue.getFirst();
while (metadata && aCurrentTime >= static_cast<double>(metadata->mPublishTime) / USECS_PER_S) { while (metadata && aCurrentTime >= static_cast<double>(metadata->mPublishTime) / USECS_PER_S) {
nsCOMPtr<nsIRunnable> metadataUpdatedEvent = nsCOMPtr<nsIRunnable> metadataUpdatedEvent =
new MetadataEventRunner(aDecoder, new AudioMetadataEventRunner(aDecoder,
metadata->mInfo.forget(), metadata->mChannels,
metadata->mTags.forget()); metadata->mRate,
metadata->mHasAudio,
metadata->mHasVideo,
metadata->mTags.forget());
NS_DispatchToMainThread(metadataUpdatedEvent); NS_DispatchToMainThread(metadataUpdatedEvent);
delete mMetadataQueue.popFirst(); delete mMetadataQueue.popFirst();
metadata = mMetadataQueue.getFirst(); metadata = mMetadataQueue.getFirst();

View File

@ -627,6 +627,8 @@ bool OggReader::ReadOggChain()
OpusState* newOpusState = nullptr; OpusState* newOpusState = nullptr;
#endif /* MOZ_OPUS */ #endif /* MOZ_OPUS */
VorbisState* newVorbisState = nullptr; VorbisState* newVorbisState = nullptr;
int channels = 0;
long rate = 0;
MetadataTags* tags = nullptr; MetadataTags* tags = nullptr;
if (HasVideo() || HasSkeleton() || !HasAudio()) { if (HasVideo() || HasSkeleton() || !HasAudio()) {
@ -671,7 +673,6 @@ bool OggReader::ReadOggChain()
return false; return false;
} }
nsAutoPtr<MediaInfo> info(new MediaInfo());
if ((newVorbisState && ReadHeaders(newVorbisState)) && if ((newVorbisState && ReadHeaders(newVorbisState)) &&
(mVorbisState->mInfo.rate == newVorbisState->mInfo.rate) && (mVorbisState->mInfo.rate == newVorbisState->mInfo.rate) &&
(mVorbisState->mInfo.channels == newVorbisState->mInfo.channels)) { (mVorbisState->mInfo.channels == newVorbisState->mInfo.channels)) {
@ -680,8 +681,8 @@ bool OggReader::ReadOggChain()
mVorbisSerial = mVorbisState->mSerial; mVorbisSerial = mVorbisState->mSerial;
LOG(PR_LOG_DEBUG, ("New vorbis ogg link, serial=%d\n", mVorbisSerial)); LOG(PR_LOG_DEBUG, ("New vorbis ogg link, serial=%d\n", mVorbisSerial));
chained = true; chained = true;
info->mAudio.mRate = mVorbisState->mInfo.rate; rate = mVorbisState->mInfo.rate;
info->mAudio.mChannels = mVorbisState->mInfo.channels; channels = mVorbisState->mInfo.channels;
tags = mVorbisState->GetTags(); tags = mVorbisState->GetTags();
} }
@ -693,8 +694,8 @@ bool OggReader::ReadOggChain()
mOpusState = newOpusState; mOpusState = newOpusState;
mOpusSerial = mOpusState->mSerial; mOpusSerial = mOpusState->mSerial;
chained = true; chained = true;
info->mAudio.mRate = mOpusState->mRate; rate = mOpusState->mRate;
info->mAudio.mChannels = mOpusState->mChannels; channels = mOpusState->mChannels;
tags = mOpusState->GetTags(); tags = mOpusState->GetTags();
} }
#endif #endif
@ -702,12 +703,13 @@ bool OggReader::ReadOggChain()
if (chained) { if (chained) {
SetChained(true); SetChained(true);
{ {
info->mAudio.mHasAudio = HasAudio();
info->mVideo.mHasVideo = HasVideo();
int rate = info->mAudio.mRate;
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
mDecoder->QueueMetadata((mDecodedAudioFrames * USECS_PER_S) / rate, mDecoder->QueueMetadata((mDecodedAudioFrames * USECS_PER_S) / rate,
info.forget(), tags); channels,
rate,
HasAudio(),
HasVideo(),
tags);
} }
return true; return true;
} }

View File

@ -55,11 +55,14 @@ void MediaOmxDecoder::SetCanOffloadAudio(bool aCanOffloadAudio)
mCanOffloadAudio = aCanOffloadAudio; mCanOffloadAudio = aCanOffloadAudio;
} }
void MediaOmxDecoder::MetadataLoaded(MediaInfo* aInfo, void MediaOmxDecoder::MetadataLoaded(int aChannels,
int aRate,
bool aHasAudio,
bool aHasVideo,
MetadataTags* aTags) MetadataTags* aTags)
{ {
MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(NS_IsMainThread());
MediaDecoder::MetadataLoaded(aInfo, aTags); MediaDecoder::MetadataLoaded(aChannels, aRate, aHasAudio, aHasVideo, aTags);
ReentrantMonitorAutoEnter mon(GetReentrantMonitor()); ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
if (!mCanOffloadAudio || mFallbackToStateMachine || mOutputStreams.Length() || if (!mCanOffloadAudio || mFallbackToStateMachine || mOutputStreams.Length() ||

View File

@ -21,7 +21,10 @@ public:
virtual MediaDecoder* Clone(); virtual MediaDecoder* Clone();
virtual MediaDecoderStateMachine* CreateStateMachine(); virtual MediaDecoderStateMachine* CreateStateMachine();
virtual void MetadataLoaded(MediaInfo* aInfo, virtual void MetadataLoaded(int aChannels,
int aRate,
bool aHasAudio,
bool aHasVideo,
MetadataTags* aTags); MetadataTags* aTags);
virtual void ChangeState(PlayState aState); virtual void ChangeState(PlayState aState);
virtual void ApplyStateToStateMachine(PlayState aState); virtual void ApplyStateToStateMachine(PlayState aState);