Bug 920867 - Split VideoInfo into Video and Audio objects, then encapsulate in new MediaInfo object. r=cpearce

This commit is contained in:
Matthew Gregan 2013-09-27 17:22:38 +12:00
parent 9ee50256f5
commit e1f44d3688
28 changed files with 188 additions and 174 deletions

View File

@ -535,8 +535,8 @@ nsresult MediaDecoderReader::DecodeToTarget(int64_t aTarget)
const AudioData* audio = AudioQueue().PeekFront();
if (!audio)
break;
CheckedInt64 startFrame = UsecsToFrames(audio->mTime, mInfo.mAudioRate);
CheckedInt64 targetFrame = UsecsToFrames(aTarget, mInfo.mAudioRate);
CheckedInt64 startFrame = UsecsToFrames(audio->mTime, mInfo.mAudio.mRate);
CheckedInt64 targetFrame = UsecsToFrames(aTarget, mInfo.mAudio.mRate);
if (!startFrame.isValid() || !targetFrame.isValid()) {
return NS_ERROR_FAILURE;
}
@ -580,7 +580,7 @@ nsresult MediaDecoderReader::DecodeToTarget(int64_t aTarget)
memcpy(audioData.get(),
audio->mAudioData.get() + (framesToPrune * channels),
frames * channels * sizeof(AudioDataValue));
CheckedInt64 duration = FramesToUsecs(frames, mInfo.mAudioRate);
CheckedInt64 duration = FramesToUsecs(frames, mInfo.mAudio.mRate);
if (!duration.isValid()) {
return NS_ERROR_FAILURE;
}

View File

@ -31,11 +31,8 @@ class TimeRanges;
class VideoInfo {
public:
VideoInfo()
: mAudioRate(44100),
mAudioChannels(2),
mDisplay(0,0),
: mDisplay(0,0),
mStereoMode(STEREO_MODE_MONO),
mHasAudio(false),
mHasVideo(false)
{}
@ -47,12 +44,6 @@ public:
const nsIntRect& aPicture,
const nsIntSize& aDisplay);
// Sample rate.
uint32_t mAudioRate;
// Number of audio channels.
uint32_t mAudioChannels;
// Size in pixels at which the video is rendered. This is after it has
// been scaled by its aspect ratio.
nsIntSize mDisplay;
@ -60,13 +51,49 @@ public:
// Indicates the frame layout for single track stereo videos.
StereoMode mStereoMode;
// True if we have an active audio bitstream.
bool mHasAudio;
// True if we have an active video bitstream.
bool mHasVideo;
};
class AudioInfo {
public:
AudioInfo()
: mRate(44100),
mChannels(2),
mHasAudio(false)
{}
// Sample rate.
uint32_t mRate;
// Number of audio channels.
uint32_t mChannels;
// True if we have an active audio bitstream.
bool mHasAudio;
};
class MediaInfo {
public:
bool HasVideo() const
{
return mVideo.mHasVideo;
}
bool HasAudio() const
{
return mAudio.mHasAudio;
}
bool HasValidMedia() const
{
return HasVideo() || HasAudio();
}
VideoInfo mVideo;
AudioInfo mAudio;
};
// Holds chunk a decoded audio frames.
class AudioData {
public:
@ -448,7 +475,7 @@ public:
// the data required to present the media, and optionally fills *aTags
// with tag metadata from the file.
// Returns NS_OK on success, or NS_ERROR_FAILURE on failure.
virtual nsresult ReadMetadata(VideoInfo* aInfo,
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags) = 0;
// Stores the presentation time of the first frame we'd be able to play if
@ -558,7 +585,7 @@ protected:
AbstractMediaDecoder* mDecoder;
// Stores presentation info required for playback.
VideoInfo mInfo;
MediaInfo mInfo;
// Whether we should accept media that we know we can't play
// directly, because they have a number of channel higher than

View File

@ -558,9 +558,9 @@ void MediaDecoderStateMachine::SendStreamAudio(AudioData* aAudio,
// This logic has to mimic AudioLoop closely to make sure we write
// the exact same silences
CheckedInt64 audioWrittenOffset = UsecsToFrames(mInfo.mAudioRate,
CheckedInt64 audioWrittenOffset = UsecsToFrames(mInfo.mAudio.mRate,
aStream->mInitialTime + mStartTime) + aStream->mAudioFramesWritten;
CheckedInt64 frameOffset = UsecsToFrames(mInfo.mAudioRate, aAudio->mTime);
CheckedInt64 frameOffset = UsecsToFrames(mInfo.mAudio.mRate, aAudio->mTime);
if (!audioWrittenOffset.isValid() || !frameOffset.isValid())
return;
if (audioWrittenOffset.value() < frameOffset.value()) {
@ -641,18 +641,18 @@ void MediaDecoderStateMachine::SendStreamData()
StreamTime endPosition = 0;
if (!stream->mStreamInitialized) {
if (mInfo.mHasAudio) {
if (mInfo.HasAudio()) {
AudioSegment* audio = new AudioSegment();
mediaStream->AddTrack(TRACK_AUDIO, mInfo.mAudioRate, 0, audio);
mediaStream->AddTrack(TRACK_AUDIO, mInfo.mAudio.mRate, 0, audio);
}
if (mInfo.mHasVideo) {
if (mInfo.HasVideo()) {
VideoSegment* video = new VideoSegment();
mediaStream->AddTrack(TRACK_VIDEO, RATE_VIDEO, 0, video);
}
stream->mStreamInitialized = true;
}
if (mInfo.mHasAudio) {
if (mInfo.HasAudio()) {
nsAutoTArray<AudioData*,10> audio;
// It's OK to hold references to the AudioData because while audio
// is captured, only the decoder thread pops from the queue (see below).
@ -670,10 +670,10 @@ void MediaDecoderStateMachine::SendStreamData()
}
minLastAudioPacketTime = std::min(minLastAudioPacketTime, stream->mLastAudioPacketTime);
endPosition = std::max(endPosition,
TicksToTimeRoundDown(mInfo.mAudioRate, stream->mAudioFramesWritten));
TicksToTimeRoundDown(mInfo.mAudio.mRate, stream->mAudioFramesWritten));
}
if (mInfo.mHasVideo) {
if (mInfo.HasVideo()) {
nsAutoTArray<VideoData*,10> video;
// It's OK to hold references to the VideoData only the decoder thread
// pops from the queue.
@ -723,8 +723,8 @@ void MediaDecoderStateMachine::SendStreamData()
}
bool finished =
(!mInfo.mHasAudio || mReader->AudioQueue().IsFinished()) &&
(!mInfo.mHasVideo || mReader->VideoQueue().IsFinished());
(!mInfo.HasAudio() || mReader->AudioQueue().IsFinished()) &&
(!mInfo.HasVideo() || mReader->VideoQueue().IsFinished());
if (finished && !stream->mHaveSentFinish) {
stream->mHaveSentFinish = true;
stream->mStream->Finish();
@ -1045,8 +1045,8 @@ void MediaDecoderStateMachine::AudioLoop()
mAudioCompleted = false;
audioStartTime = mAudioStartTime;
NS_ASSERTION(audioStartTime != -1, "Should have audio start time by now");
channels = mInfo.mAudioChannels;
rate = mInfo.mAudioRate;
channels = mInfo.mAudio.mChannels;
rate = mInfo.mAudio.mRate;
audioChannelType = mDecoder->GetAudioChannelType();
volume = mVolume;
@ -1898,7 +1898,7 @@ nsresult MediaDecoderStateMachine::DecodeMetadata()
LOG(PR_LOG_DEBUG, ("%p Decoding Media Headers", mDecoder.get()));
nsresult res;
VideoInfo info;
MediaInfo info;
MetadataTags* tags;
{
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
@ -1912,7 +1912,7 @@ nsresult MediaDecoderStateMachine::DecodeMetadata()
mInfo = info;
if (NS_FAILED(res) || (!info.mHasVideo && !info.mHasAudio)) {
if (NS_FAILED(res) || (!info.HasValidMedia())) {
// Dispatch the event to call DecodeError synchronously. This ensures
// we're in shutdown state by the time we exit the decode thread.
// If we just moved to shutdown state here on the decode thread, we may
@ -1957,18 +1957,18 @@ nsresult MediaDecoderStateMachine::DecodeMetadata()
// if there is audio, let the MozAudioAvailable event manager know about
// the metadata.
if (HasAudio()) {
mEventManager.Init(mInfo.mAudioChannels, mInfo.mAudioRate);
mEventManager.Init(mInfo.mAudio.mChannels, mInfo.mAudio.mRate);
// Set the buffer length at the decoder level to be able, to be able
// to retrive the value via media element method. The RequestFrameBufferLength
// will call the MediaDecoderStateMachine::SetFrameBufferLength().
uint32_t frameBufferLength = mInfo.mAudioChannels * FRAMEBUFFER_LENGTH_PER_CHANNEL;
uint32_t frameBufferLength = mInfo.mAudio.mChannels * FRAMEBUFFER_LENGTH_PER_CHANNEL;
mDecoder->RequestFrameBufferLength(frameBufferLength);
}
nsCOMPtr<nsIRunnable> metadataLoadedEvent =
new AudioMetadataEventRunner(mDecoder,
mInfo.mAudioChannels,
mInfo.mAudioRate,
mInfo.mAudio.mChannels,
mInfo.mAudio.mRate,
HasAudio(),
HasVideo(),
tags);

View File

@ -228,14 +228,14 @@ public:
// The decoder monitor must be obtained before calling this.
bool HasAudio() const {
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
return mInfo.mHasAudio;
return mInfo.HasAudio();
}
// This is called on the state machine thread and audio thread.
// The decoder monitor must be obtained before calling this.
bool HasVideo() const {
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
return mInfo.mHasVideo;
return mInfo.HasVideo();
}
// Should be called by main thread.
@ -810,7 +810,7 @@ private:
// Stores presentation info required for playback. The decoder monitor
// must be held when accessing this.
VideoInfo mInfo;
MediaInfo mInfo;
mozilla::MediaMetadataManager mMetadataManager;

View File

@ -360,7 +360,7 @@ GetProperty(AudioFileStreamID aAudioFileStream,
nsresult
AppleMP3Reader::ReadMetadata(VideoInfo* aInfo,
AppleMP3Reader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
MOZ_ASSERT(mDecoder->OnDecodeThread(), "Should be on decode thread");
@ -398,9 +398,9 @@ AppleMP3Reader::ReadMetadata(VideoInfo* aInfo,
return NS_ERROR_FAILURE;
}
aInfo->mAudioRate = mAudioSampleRate;
aInfo->mAudioChannels = mAudioChannels;
aInfo->mHasAudio = mStreamReady;
aInfo->mAudio.mRate = mAudioSampleRate;
aInfo->mAudio.mChannels = mAudioChannels;
aInfo->mAudio.mHasAudio = mStreamReady;
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());

View File

@ -30,7 +30,7 @@ public:
virtual bool HasAudio() MOZ_OVERRIDE;
virtual bool HasVideo() MOZ_OVERRIDE;
virtual nsresult ReadMetadata(VideoInfo* aInfo,
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags) MOZ_OVERRIDE;
virtual nsresult Seek(int64_t aTime,

View File

@ -172,7 +172,7 @@ DASHReader::DecodeAudioData()
}
nsresult
DASHReader::ReadMetadata(VideoInfo* aInfo,
DASHReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
@ -191,7 +191,7 @@ DASHReader::ReadMetadata(VideoInfo* aInfo,
*aTags = nullptr;
// Get metadata from child readers.
VideoInfo audioInfo, videoInfo;
MediaInfo audioInfo, videoInfo;
// Read metadata for all video streams.
for (uint i = 0; i < mVideoReaders.Length(); i++) {
@ -201,8 +201,7 @@ DASHReader::ReadMetadata(VideoInfo* aInfo,
NS_ENSURE_SUCCESS(rv, rv);
// Use metadata from current video sub reader to populate aInfo.
if (mVideoReaders[i] == mVideoReader) {
mInfo.mHasVideo = videoInfo.mHasVideo;
mInfo.mDisplay = videoInfo.mDisplay;
mInfo.mVideo = videoInfo.mVideo;
}
}
// Read metadata for audio stream.
@ -211,10 +210,7 @@ DASHReader::ReadMetadata(VideoInfo* aInfo,
if (mAudioReader) {
rv = mAudioReader->ReadMetadata(&audioInfo, aTags);
NS_ENSURE_SUCCESS(rv, rv);
mInfo.mHasAudio = audioInfo.mHasAudio;
mInfo.mAudioRate = audioInfo.mAudioRate;
mInfo.mAudioChannels = audioInfo.mAudioChannels;
mInfo.mStereoMode = audioInfo.mStereoMode;
mInfo.mAudio = audioInfo.mAudio;
}
*aInfo = mInfo;

View File

@ -38,7 +38,7 @@ public:
// Waits for metadata bytes to be downloaded, then reads and parses them.
// Called on the decode thread only.
nsresult ReadMetadata(VideoInfo* aInfo,
nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags) MOZ_OVERRIDE;
// Waits for |ReadyToReadMetadata| or |NotifyDecoderShuttingDown|

View File

@ -73,7 +73,7 @@ static const GUID CLSID_MPEG_LAYER_3_DECODER_FILTER =
{ 0x38BE3000, 0xDBF4, 0x11D0, 0x86, 0x0E, 0x00, 0xA0, 0x24, 0xCF, 0xEF, 0x6D };
nsresult
DirectShowReader::ReadMetadata(VideoInfo* aInfo,
DirectShowReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
MOZ_ASSERT(mDecoder->OnDecodeThread(), "Should be on decode thread.");
@ -164,11 +164,10 @@ DirectShowReader::ReadMetadata(VideoInfo* aInfo,
mAudioSinkFilter->GetSampleSink()->GetAudioFormat(&format);
NS_ENSURE_TRUE(format.wFormatTag == WAVE_FORMAT_PCM, NS_ERROR_FAILURE);
mInfo.mAudioChannels = mNumChannels = format.nChannels;
mInfo.mAudioRate = mAudioRate = format.nSamplesPerSec;
mInfo.mAudio.mChannels = mNumChannels = format.nChannels;
mInfo.mAudio.mRate = mAudioRate = format.nSamplesPerSec;
mBytesPerSample = format.wBitsPerSample / 8;
mInfo.mHasAudio = true;
mInfo.mHasVideo = false;
mInfo.mAudio.mHasAudio = true;
*aInfo = mInfo;
// Note: The SourceFilter strips ID3v2 tags out of the stream.
@ -194,8 +193,8 @@ DirectShowReader::ReadMetadata(VideoInfo* aInfo,
LOG("Successfully initialized DirectShow MP3 decoder.");
LOG("Channels=%u Hz=%u duration=%lld bytesPerSample=%d",
mInfo.mAudioChannels,
mInfo.mAudioRate,
mInfo.mAudio.mChannels,
mInfo.mAudio.mRate,
RefTimeToUsecs(duration),
mBytesPerSample);

View File

@ -57,7 +57,7 @@ public:
bool HasAudio() MOZ_OVERRIDE;
bool HasVideo() MOZ_OVERRIDE;
nsresult ReadMetadata(VideoInfo* aInfo,
nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags) MOZ_OVERRIDE;
nsresult Seek(int64_t aTime,

View File

@ -243,7 +243,7 @@ void GStreamerReader::PlayBinSourceSetup(GstAppSrc* aSource)
gst_caps_unref(caps);
}
nsresult GStreamerReader::ReadMetadata(VideoInfo* aInfo,
nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
@ -354,8 +354,8 @@ nsresult GStreamerReader::ReadMetadata(VideoInfo* aInfo,
int n_video = 0, n_audio = 0;
g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);
mInfo.mHasVideo = n_video != 0;
mInfo.mHasAudio = n_audio != 0;
mInfo.mVideo.mHasVideo = n_video != 0;
mInfo.mAudio.mHasAudio = n_audio != 0;
*aInfo = mInfo;
@ -490,12 +490,12 @@ bool GStreamerReader::DecodeAudioData()
int64_t offset = GST_BUFFER_OFFSET(buffer);
unsigned int size = GST_BUFFER_SIZE(buffer);
int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudioChannels;
int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;
ssize_t outSize = static_cast<size_t>(size / sizeof(AudioDataValue));
nsAutoArrayPtr<AudioDataValue> data(new AudioDataValue[outSize]);
memcpy(data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
AudioData* audio = new AudioData(offset, timestamp, duration,
frames, data.forget(), mInfo.mAudioChannels);
frames, data.forget(), mInfo.mAudio.mChannels);
mAudioQueue.Push(audio);
gst_buffer_unref(buffer);
@ -623,7 +623,7 @@ bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
/* XXX ? */
int64_t offset = 0;
VideoData* video = VideoData::Create(mInfo, image, offset,
VideoData* video = VideoData::Create(mInfo.mVideo, image, offset,
timestamp, nextTimestamp, b,
isKeyframe, -1, mPicture);
mVideoQueue.Push(video);
@ -656,7 +656,7 @@ nsresult GStreamerReader::Seek(int64_t aTarget,
nsresult GStreamerReader::GetBuffered(TimeRanges* aBuffered,
int64_t aStartTime)
{
if (!mInfo.mHasVideo && !mInfo.mHasAudio) {
if (!mInfo.HasValidMedia()) {
return NS_OK;
}
@ -933,14 +933,14 @@ void GStreamerReader::AudioPreroll()
GstPad* sinkpad = gst_element_get_pad(GST_ELEMENT(mAudioAppSink), "sink");
GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad);
GstStructure* s = gst_caps_get_structure(caps, 0);
mInfo.mAudioRate = mInfo.mAudioChannels = 0;
gst_structure_get_int(s, "rate", (gint*) &mInfo.mAudioRate);
gst_structure_get_int(s, "channels", (gint*) &mInfo.mAudioChannels);
NS_ASSERTION(mInfo.mAudioRate != 0, ("audio rate is zero"));
NS_ASSERTION(mInfo.mAudioChannels != 0, ("audio channels is zero"));
NS_ASSERTION(mInfo.mAudioChannels > 0 && mInfo.mAudioChannels <= MAX_CHANNELS,
mInfo.mAudio.mRate = mInfo.mAudio.mChannels = 0;
gst_structure_get_int(s, "rate", (gint*) &mInfo.mAudio.mRate);
gst_structure_get_int(s, "channels", (gint*) &mInfo.mAudio.mChannels);
NS_ASSERTION(mInfo.mAudio.mRate != 0, ("audio rate is zero"));
NS_ASSERTION(mInfo.mAudio.mChannels != 0, ("audio channels is zero"));
NS_ASSERTION(mInfo.mAudio.mChannels > 0 && mInfo.mAudio.mChannels <= MAX_CHANNELS,
"invalid audio channels number");
mInfo.mHasAudio = true;
mInfo.mAudio.mHasAudio = true;
gst_caps_unref(caps);
gst_object_unref(sinkpad);
}
@ -955,8 +955,8 @@ void GStreamerReader::VideoPreroll()
GstStructure* structure = gst_caps_get_structure(caps, 0);
gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen);
NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution");
mInfo.mDisplay = nsIntSize(mPicture.width, mPicture.height);
mInfo.mHasVideo = true;
mInfo.mVideo.mDisplay = nsIntSize(mPicture.width, mPicture.height);
mInfo.mVideo.mHasVideo = true;
gst_caps_unref(caps);
gst_object_unref(sinkpad);
}

View File

@ -44,7 +44,7 @@ public:
virtual bool DecodeAudioData();
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold);
virtual nsresult ReadMetadata(VideoInfo* aInfo,
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags);
virtual nsresult Seek(int64_t aTime,
int64_t aStartTime,
@ -53,11 +53,11 @@ public:
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);
virtual bool HasAudio() {
return mInfo.mHasAudio;
return mInfo.HasAudio();
}
virtual bool HasVideo() {
return mInfo.mHasVideo;
return mInfo.HasVideo();
}
private:

View File

@ -63,15 +63,15 @@ public:
bool HasVideo() MOZ_OVERRIDE
{
return mInfo.mHasVideo;
return mInfo.HasVideo();
}
bool HasAudio() MOZ_OVERRIDE
{
return mInfo.mHasAudio;
return mInfo.HasAudio();
}
nsresult ReadMetadata(VideoInfo* aInfo, MetadataTags** aTags) MOZ_OVERRIDE;
nsresult ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) MOZ_OVERRIDE;
nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
int64_t aCurrentTime) MOZ_OVERRIDE
@ -177,7 +177,7 @@ MediaSourceDecoder::CreateSubDecoder(const nsACString& aType)
}
nsresult
MediaSourceReader::ReadMetadata(VideoInfo* aInfo, MetadataTags** aTags)
MediaSourceReader::ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags)
{
mDecoder->SetMediaSeekable(true);
mDecoder->SetTransportSeekable(false);
@ -186,22 +186,18 @@ MediaSourceReader::ReadMetadata(VideoInfo* aInfo, MetadataTags** aTags)
const nsTArray<MediaDecoderReader*>& readers = decoder->GetReaders();
for (uint32_t i = 0; i < readers.Length(); ++i) {
MediaDecoderReader* reader = readers[i];
VideoInfo vi;
nsresult rv = reader->ReadMetadata(&vi, aTags);
MediaInfo mi;
nsresult rv = reader->ReadMetadata(&mi, aTags);
LOG(PR_LOG_DEBUG, ("ReadMetadata on SB reader %p", reader));
if (NS_FAILED(rv)) {
return rv;
}
if (vi.mHasVideo && !mInfo.mHasVideo) {
mInfo.mDisplay = vi.mDisplay;
mInfo.mStereoMode = vi.mStereoMode;
mInfo.mHasVideo = true;
if (mi.HasVideo() && !mInfo.HasVideo()) {
mInfo.mVideo = mi.mVideo;
decoder->SetVideoReader(reader);
}
if (vi.mHasAudio && !mInfo.mHasAudio) {
mInfo.mAudioRate = vi.mAudioRate;
mInfo.mAudioChannels = vi.mAudioChannels;
mInfo.mHasAudio = true;
if (mi.HasAudio() && !mInfo.HasAudio()) {
mInfo.mAudio = mi.mAudio;
decoder->SetAudioReader(reader);
}
}

View File

@ -168,8 +168,8 @@ void OggReader::BuildSerialList(nsTArray<uint32_t>& aTracks)
}
}
nsresult OggReader::ReadMetadata(VideoInfo* aInfo,
MetadataTags** aTags)
nsresult OggReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
@ -277,8 +277,8 @@ nsresult OggReader::ReadMetadata(VideoInfo* aInfo,
mTheoraState->mInfo.frame_height);
if (VideoInfo::ValidateVideoRegion(frameSize, picture, displaySize)) {
// Video track's frame sizes will not overflow. Activate the video track.
mInfo.mHasVideo = true;
mInfo.mDisplay = displaySize;
mInfo.mVideo.mHasVideo = true;
mInfo.mVideo.mDisplay = displaySize;
mPicture = picture;
VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
@ -295,9 +295,9 @@ nsresult OggReader::ReadMetadata(VideoInfo* aInfo,
}
if (mVorbisState && ReadHeaders(mVorbisState)) {
mInfo.mHasAudio = true;
mInfo.mAudioRate = mVorbisState->mInfo.rate;
mInfo.mAudioChannels = mVorbisState->mInfo.channels > 2 ? 2 : mVorbisState->mInfo.channels;
mInfo.mAudio.mHasAudio = true;
mInfo.mAudio.mRate = mVorbisState->mInfo.rate;
mInfo.mAudio.mChannels = mVorbisState->mInfo.channels > 2 ? 2 : mVorbisState->mInfo.channels;
// Copy Vorbis info data for time computations on other threads.
memcpy(&mVorbisInfo, &mVorbisState->mInfo, sizeof(mVorbisInfo));
mVorbisInfo.codec_setup = NULL;
@ -308,9 +308,9 @@ nsresult OggReader::ReadMetadata(VideoInfo* aInfo,
}
#ifdef MOZ_OPUS
if (mOpusState && ReadHeaders(mOpusState)) {
mInfo.mHasAudio = true;
mInfo.mAudioRate = mOpusState->mRate;
mInfo.mAudioChannels = mOpusState->mChannels > 2 ? 2 : mOpusState->mChannels;
mInfo.mAudio.mHasAudio = true;
mInfo.mAudio.mRate = mOpusState->mRate;
mInfo.mAudio.mChannels = mOpusState->mChannels > 2 ? 2 : mOpusState->mChannels;
mOpusSerial = mOpusState->mSerial;
mOpusPreSkip = mOpusState->mPreSkip;
@ -799,7 +799,7 @@ nsresult OggReader::DecodeTheora(ogg_packet* aPacket, int64_t aTimeThreshold)
b.mPlanes[i].mOffset = b.mPlanes[i].mSkip = 0;
}
VideoData *v = VideoData::Create(mInfo,
VideoData *v = VideoData::Create(mInfo.mVideo,
mDecoder->GetImageContainer(),
mDecoder->GetResource()->Tell(),
time,
@ -1768,7 +1768,7 @@ nsresult OggReader::GetBuffered(TimeRanges* aBuffered, int64_t aStartTime)
// HasAudio and HasVideo are not used here as they take a lock and cause
// a deadlock. Accessing mInfo doesn't require a lock - it doesn't change
// after metadata is read.
if (!mInfo.mHasVideo && !mInfo.mHasAudio) {
if (!mInfo.HasValidMedia()) {
// No need to search through the file if there are no audio or video tracks
return NS_OK;
}

View File

@ -70,7 +70,7 @@ public:
return mTheoraState != 0 && mTheoraState->mActive;
}
virtual nsresult ReadMetadata(VideoInfo* aInfo,
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags);
virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);

View File

@ -69,7 +69,7 @@ void MediaOmxReader::ReleaseMediaResources()
}
}
nsresult MediaOmxReader::ReadMetadata(VideoInfo* aInfo,
nsresult MediaOmxReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
@ -113,8 +113,8 @@ nsresult MediaOmxReader::ReadMetadata(VideoInfo* aInfo,
}
// Video track's frame sizes will not overflow. Activate the video track.
mHasVideo = mInfo.mHasVideo = true;
mInfo.mDisplay = displaySize;
mHasVideo = mInfo.mVideo.mHasVideo = true;
mInfo.mVideo.mDisplay = displaySize;
mPicture = pictureRect;
mInitialFrame = frameSize;
VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
@ -128,9 +128,9 @@ nsresult MediaOmxReader::ReadMetadata(VideoInfo* aInfo,
if (mOmxDecoder->HasAudio()) {
int32_t numChannels, sampleRate;
mOmxDecoder->GetAudioParameters(&numChannels, &sampleRate);
mHasAudio = mInfo.mHasAudio = true;
mInfo.mAudioChannels = numChannels;
mInfo.mAudioRate = sampleRate;
mHasAudio = mInfo.mAudio.mHasAudio = true;
mInfo.mAudio.mChannels = numChannels;
mInfo.mAudio.mRate = sampleRate;
}
*aInfo = mInfo;
@ -232,7 +232,7 @@ bool MediaOmxReader::DecodeVideoFrame(bool &aKeyframeSkip,
b.mPlanes[2].mOffset = frame.Cr.mOffset;
b.mPlanes[2].mSkip = frame.Cr.mSkip;
v = VideoData::Create(mInfo,
v = VideoData::Create(mInfo.mVideo,
mDecoder->GetImageContainer(),
pos,
frame.mTimeUs,
@ -242,7 +242,7 @@ bool MediaOmxReader::DecodeVideoFrame(bool &aKeyframeSkip,
-1,
picture);
} else {
v = VideoData::Create(mInfo,
v = VideoData::Create(mInfo.mVideo,
mDecoder->GetImageContainer(),
pos,
frame.mTimeUs,

View File

@ -62,7 +62,7 @@ public:
virtual bool IsDormantNeeded();
virtual void ReleaseMediaResources();
virtual nsresult ReadMetadata(VideoInfo* aInfo,
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags);
virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
virtual nsresult GetBuffered(mozilla::dom::TimeRanges* aBuffered, int64_t aStartTime);

View File

@ -41,8 +41,8 @@ nsresult MediaPluginReader::Init(MediaDecoderReader* aCloneDonor)
return NS_OK;
}
nsresult MediaPluginReader::ReadMetadata(VideoInfo* aInfo,
MetadataTags** aTags)
nsresult MediaPluginReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
@ -75,8 +75,8 @@ nsresult MediaPluginReader::ReadMetadata(VideoInfo* aInfo,
}
// Video track's frame sizes will not overflow. Activate the video track.
mHasVideo = mInfo.mHasVideo = true;
mInfo.mDisplay = displaySize;
mHasVideo = mInfo.mVideo.mHasVideo = true;
mInfo.mVideo.mDisplay = displaySize;
mPicture = pictureRect;
mInitialFrame = frameSize;
VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
@ -90,9 +90,9 @@ nsresult MediaPluginReader::ReadMetadata(VideoInfo* aInfo,
if (mPlugin->HasAudio(mPlugin)) {
int32_t numChannels, sampleRate;
mPlugin->GetAudioParameters(mPlugin, &numChannels, &sampleRate);
mHasAudio = mInfo.mHasAudio = true;
mInfo.mAudioChannels = numChannels;
mInfo.mAudioRate = sampleRate;
mHasAudio = mInfo.mAudio.mHasAudio = true;
mInfo.mAudio.mChannels = numChannels;
mInfo.mAudio.mRate = sampleRate;
}
*aInfo = mInfo;
@ -186,7 +186,7 @@ bool MediaPluginReader::DecodeVideoFrame(bool &aKeyframeSkip,
picture.height = (frameSize.height * mPicture.height) / mInitialFrame.height;
}
v = VideoData::CreateFromImage(mInfo,
v = VideoData::CreateFromImage(mInfo.mVideo,
mDecoder->GetImageContainer(),
pos,
frame.mTimeUs,
@ -232,7 +232,7 @@ bool MediaPluginReader::DecodeVideoFrame(bool &aKeyframeSkip,
}
// This is the approximate byte position in the stream.
v = VideoData::Create(mInfo,
v = VideoData::Create(mInfo.mVideo,
mDecoder->GetImageContainer(),
pos,
frame.mTimeUs,

View File

@ -61,7 +61,7 @@ public:
return mHasVideo;
}
virtual nsresult ReadMetadata(VideoInfo* aInfo,
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags);
virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
virtual nsresult GetBuffered(mozilla::dom::TimeRanges* aBuffered, int64_t aStartTime);

View File

@ -35,8 +35,8 @@ nsresult RawReader::ResetDecode()
return MediaDecoderReader::ResetDecode();
}
nsresult RawReader::ReadMetadata(VideoInfo* aInfo,
MetadataTags** aTags)
nsresult RawReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
NS_ASSERTION(mDecoder->OnDecodeThread(),
"Should be on decode thread.");
@ -75,9 +75,8 @@ nsresult RawReader::ReadMetadata(VideoInfo* aInfo,
return NS_ERROR_FAILURE;
}
mInfo.mHasVideo = true;
mInfo.mHasAudio = false;
mInfo.mDisplay = display;
mInfo.mVideo.mHasVideo = true;
mInfo.mVideo.mDisplay = display;
mFrameRate = static_cast<float>(mMetadata.framerateNumerator) /
mMetadata.framerateDenominator;
@ -208,7 +207,7 @@ bool RawReader::DecodeVideoFrame(bool &aKeyframeSkip,
b.mPlanes[2].mWidth = mMetadata.frameWidth / 2;
b.mPlanes[2].mOffset = b.mPlanes[2].mSkip = 0;
VideoData *v = VideoData::Create(mInfo,
VideoData *v = VideoData::Create(mInfo.mVideo,
mDecoder->GetImageContainer(),
-1,
currentFrameTime,

View File

@ -34,7 +34,7 @@ public:
return true;
}
virtual nsresult ReadMetadata(VideoInfo* aInfo,
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags);
virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);

View File

@ -125,8 +125,8 @@ nsresult WaveReader::Init(MediaDecoderReader* aCloneDonor)
return NS_OK;
}
nsresult WaveReader::ReadMetadata(VideoInfo* aInfo,
MetadataTags** aTags)
nsresult WaveReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
@ -142,10 +142,9 @@ nsresult WaveReader::ReadMetadata(VideoInfo* aInfo,
return NS_ERROR_FAILURE;
}
mInfo.mHasAudio = true;
mInfo.mHasVideo = false;
mInfo.mAudioRate = mSampleRate;
mInfo.mAudioChannels = mChannels;
mInfo.mAudio.mHasAudio = true;
mInfo.mAudio.mRate = mSampleRate;
mInfo.mAudio.mChannels = mChannels;
*aInfo = mInfo;
@ -273,7 +272,7 @@ static double RoundToUsecs(double aSeconds) {
nsresult WaveReader::GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime)
{
if (!mInfo.mHasAudio) {
if (!mInfo.HasAudio()) {
return NS_OK;
}
int64_t startOffset = mDecoder->GetResource()->GetNextCachedData(mWavePCMOffset);

View File

@ -38,7 +38,7 @@ public:
return false;
}
virtual nsresult ReadMetadata(VideoInfo* aInfo,
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags);
virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);

View File

@ -268,9 +268,9 @@ MediaDecodeTask::Decode()
mDecoderReader->OnDecodeThreadStart();
VideoInfo videoInfo;
MediaInfo mediaInfo;
nsAutoPtr<MetadataTags> tags;
nsresult rv = mDecoderReader->ReadMetadata(&videoInfo, getter_Transfers(tags));
nsresult rv = mDecoderReader->ReadMetadata(&mediaInfo, getter_Transfers(tags));
if (NS_FAILED(rv)) {
ReportFailureOnMainThread(WebAudioDecodeJob::InvalidContent);
return;
@ -290,8 +290,8 @@ MediaDecodeTask::Decode()
MediaQueue<AudioData>& audioQueue = mDecoderReader->AudioQueue();
uint32_t frameCount = audioQueue.FrameCount();
uint32_t channelCount = videoInfo.mAudioChannels;
uint32_t sampleRate = videoInfo.mAudioRate;
uint32_t channelCount = mediaInfo.mAudio.mChannels;
uint32_t sampleRate = mediaInfo.mAudio.mRate;
if (!frameCount || !channelCount || !sampleRate) {
ReportFailureOnMainThread(WebAudioDecodeJob::InvalidContent);

View File

@ -256,8 +256,8 @@ void WebMReader::Cleanup()
}
}
nsresult WebMReader::ReadMetadata(VideoInfo* aInfo,
MetadataTags** aTags)
nsresult WebMReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
@ -297,8 +297,6 @@ nsresult WebMReader::ReadMetadata(VideoInfo* aInfo,
return NS_ERROR_FAILURE;
}
mInfo.mHasAudio = false;
mInfo.mHasVideo = false;
for (uint32_t track = 0; track < ntracks; ++track) {
int id = nestegg_track_codec_id(mContext, track);
if (id == -1) {
@ -344,27 +342,27 @@ nsresult WebMReader::ReadMetadata(VideoInfo* aInfo,
mVideoTrack = track;
mHasVideo = true;
mInfo.mHasVideo = true;
mInfo.mVideo.mHasVideo = true;
mInfo.mDisplay = displaySize;
mInfo.mVideo.mDisplay = displaySize;
mPicture = pictureRect;
mInitialFrame = frameSize;
switch (params.stereo_mode) {
case NESTEGG_VIDEO_MONO:
mInfo.mStereoMode = STEREO_MODE_MONO;
mInfo.mVideo.mStereoMode = STEREO_MODE_MONO;
break;
case NESTEGG_VIDEO_STEREO_LEFT_RIGHT:
mInfo.mStereoMode = STEREO_MODE_LEFT_RIGHT;
mInfo.mVideo.mStereoMode = STEREO_MODE_LEFT_RIGHT;
break;
case NESTEGG_VIDEO_STEREO_BOTTOM_TOP:
mInfo.mStereoMode = STEREO_MODE_BOTTOM_TOP;
mInfo.mVideo.mStereoMode = STEREO_MODE_BOTTOM_TOP;
break;
case NESTEGG_VIDEO_STEREO_TOP_BOTTOM:
mInfo.mStereoMode = STEREO_MODE_TOP_BOTTOM;
mInfo.mVideo.mStereoMode = STEREO_MODE_TOP_BOTTOM;
break;
case NESTEGG_VIDEO_STEREO_RIGHT_LEFT:
mInfo.mStereoMode = STEREO_MODE_RIGHT_LEFT;
mInfo.mVideo.mStereoMode = STEREO_MODE_RIGHT_LEFT;
break;
}
}
@ -378,7 +376,7 @@ nsresult WebMReader::ReadMetadata(VideoInfo* aInfo,
mAudioTrack = track;
mHasAudio = true;
mInfo.mHasAudio = true;
mInfo.mAudio.mHasAudio = true;
// Get the Vorbis header data
unsigned int nheaders = 0;
@ -421,9 +419,9 @@ nsresult WebMReader::ReadMetadata(VideoInfo* aInfo,
return NS_ERROR_FAILURE;
}
mInfo.mAudioRate = mVorbisDsp.vi->rate;
mInfo.mAudioChannels = mVorbisDsp.vi->channels;
mChannels = mInfo.mAudioChannels;
mInfo.mAudio.mRate = mVorbisDsp.vi->rate;
mInfo.mAudio.mChannels = mVorbisDsp.vi->channels;
mChannels = mInfo.mAudio.mChannels;
}
}
@ -905,7 +903,7 @@ bool WebMReader::DecodeVideoFrame(bool &aKeyframeSkip,
picture.height = (img->d_h * mPicture.height) / mInitialFrame.height;
}
VideoData *v = VideoData::Create(mInfo,
VideoData *v = VideoData::Create(mInfo.mVideo,
mDecoder->GetImageContainer(),
holder->mOffset,
tstamp_usecs,

View File

@ -133,7 +133,7 @@ public:
return mHasVideo;
}
virtual nsresult ReadMetadata(VideoInfo* aInfo,
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags);
virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);

View File

@ -414,7 +414,7 @@ WMFReader::ConfigureVideoFrameGeometry(IMFMediaType* aMediaType)
}
// Success! Save state.
mInfo.mDisplay = displaySize;
mInfo.mVideo.mDisplay = displaySize;
GetDefaultStride(aMediaType, &mVideoStride);
mVideoWidth = width;
mVideoHeight = height;
@ -470,7 +470,7 @@ WMFReader::ConfigureVideoDecoder()
LOG("Successfully configured video stream");
mHasVideo = mInfo.mHasVideo = true;
mHasVideo = mInfo.mVideo.mHasVideo = true;
return S_OK;
}
@ -534,9 +534,9 @@ WMFReader::ConfigureAudioDecoder()
mAudioChannels = MFGetAttributeUINT32(mediaType, MF_MT_AUDIO_NUM_CHANNELS, 0);
mAudioBytesPerSample = MFGetAttributeUINT32(mediaType, MF_MT_AUDIO_BITS_PER_SAMPLE, 16) / 8;
mInfo.mAudioChannels = mAudioChannels;
mInfo.mAudioRate = mAudioRate;
mHasAudio = mInfo.mHasAudio = true;
mInfo.mAudio.mChannels = mAudioChannels;
mInfo.mAudio.mRate = mAudioRate;
mHasAudio = mInfo.mAudio.mHasAudio = true;
LOG("Successfully configured audio stream. rate=%u channels=%u bitsPerSample=%u",
mAudioRate, mAudioChannels, mAudioBytesPerSample);
@ -545,7 +545,7 @@ WMFReader::ConfigureAudioDecoder()
}
nsresult
WMFReader::ReadMetadata(VideoInfo* aInfo,
WMFReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
@ -578,7 +578,7 @@ WMFReader::ReadMetadata(VideoInfo* aInfo,
hr = ConfigureAudioDecoder();
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
if (mUseHwAccel && mInfo.mHasVideo) {
if (mUseHwAccel && mInfo.mVideo.mHasVideo) {
RefPtr<IMFTransform> videoDecoder;
hr = mSourceReader->GetServiceForStream(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
GUID_NULL,
@ -608,12 +608,12 @@ WMFReader::ReadMetadata(VideoInfo* aInfo,
hr = ConfigureVideoDecoder();
}
}
if (mInfo.mHasVideo) {
if (mInfo.HasVideo()) {
LOG("Using DXVA: %s", (mUseHwAccel ? "Yes" : "No"));
}
// Abort if both video and audio failed to initialize.
NS_ENSURE_TRUE(mInfo.mHasAudio || mInfo.mHasVideo, NS_ERROR_FAILURE);
NS_ENSURE_TRUE(mInfo.HasValidMedia(), NS_ERROR_FAILURE);
// Get the duration, and report it to the decoder if we have it.
int64_t duration = 0;
@ -841,7 +841,7 @@ WMFReader::CreateBasicVideoFrame(IMFSample* aSample,
b.mPlanes[2].mOffset = 0;
b.mPlanes[2].mSkip = 0;
VideoData *v = VideoData::Create(mInfo,
VideoData *v = VideoData::Create(mInfo.mVideo,
mDecoder->GetImageContainer(),
aOffsetBytes,
aTimestampUsecs,
@ -884,7 +884,7 @@ WMFReader::CreateD3DVideoFrame(IMFSample* aSample,
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
NS_ENSURE_TRUE(image, E_FAIL);
VideoData *v = VideoData::CreateFromImage(mInfo,
VideoData *v = VideoData::CreateFromImage(mInfo.mVideo,
mDecoder->GetImageContainer(),
aOffsetBytes,
aTimestampUsecs,

View File

@ -40,7 +40,7 @@ public:
bool HasAudio() MOZ_OVERRIDE;
bool HasVideo() MOZ_OVERRIDE;
nsresult ReadMetadata(VideoInfo* aInfo,
nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags) MOZ_OVERRIDE;
nsresult Seek(int64_t aTime,