gecko/content/media/raw/nsRawReader.cpp
Ralph Giles f63a794f44 Bug 763010 - Expose media element metadata. r=cpearce
Implements a media.mozGetMetadata() method returning a new javascript object whose properties are key value pairs respresenting metadata tags from the media resource. This data is available after readystate enters METADATA_LOADED.

Currently this is only implemented for Ogg Vorbis streams.

Media format metadata is parsed out by the media decoders. In the nsCodecStateMachine::ReadMetadata subclasses we fill in an nsDataHashtable pointer using the format-specifc api.

The hash pointer is passed up to the media element as part of the MetadataLoaded event.

The hash is deleted if the load is aborted. The audio metadata is also reset to zero (as in the constructor), resolving a todo comment.
2012-07-30 20:14:29 -04:00

282 lines
8.7 KiB
C++

/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "nsBuiltinDecoderStateMachine.h"
#include "nsBuiltinDecoder.h"
#include "nsRawReader.h"
#include "nsRawDecoder.h"
#include "VideoUtils.h"
nsRawReader::nsRawReader(nsBuiltinDecoder* aDecoder)
: nsBuiltinDecoderReader(aDecoder),
mCurrentFrame(0), mFrameSize(0)
{
MOZ_COUNT_CTOR(nsRawReader);
}
nsRawReader::~nsRawReader()
{
MOZ_COUNT_DTOR(nsRawReader);
}
nsresult nsRawReader::Init(nsBuiltinDecoderReader* aCloneDonor)
{
return NS_OK;
}
nsresult nsRawReader::ResetDecode()
{
mCurrentFrame = 0;
return nsBuiltinDecoderReader::ResetDecode();
}
nsresult nsRawReader::ReadMetadata(nsVideoInfo* aInfo,
nsHTMLMediaElement::MetadataTags** aTags)
{
NS_ASSERTION(mDecoder->OnDecodeThread(),
"Should be on decode thread.");
MediaResource* resource = mDecoder->GetResource();
NS_ASSERTION(resource, "Decoder has no media resource");
if (!ReadFromResource(resource, reinterpret_cast<PRUint8*>(&mMetadata),
sizeof(mMetadata)))
return NS_ERROR_FAILURE;
// Validate the header
if (!(mMetadata.headerPacketID == 0 /* Packet ID of 0 for the header*/ &&
mMetadata.codecID == RAW_ID /* "YUV" */ &&
mMetadata.majorVersion == 0 &&
mMetadata.minorVersion == 1))
return NS_ERROR_FAILURE;
CheckedUint32 dummy = CheckedUint32(static_cast<PRUint32>(mMetadata.frameWidth)) *
static_cast<PRUint32>(mMetadata.frameHeight);
NS_ENSURE_TRUE(dummy.isValid(), NS_ERROR_FAILURE);
if (mMetadata.aspectDenominator == 0 ||
mMetadata.framerateDenominator == 0)
return NS_ERROR_FAILURE; // Invalid data
// Determine and verify frame display size.
float pixelAspectRatio = static_cast<float>(mMetadata.aspectNumerator) /
mMetadata.aspectDenominator;
nsIntSize display(mMetadata.frameWidth, mMetadata.frameHeight);
ScaleDisplayByAspectRatio(display, pixelAspectRatio);
mPicture = nsIntRect(0, 0, mMetadata.frameWidth, mMetadata.frameHeight);
nsIntSize frameSize(mMetadata.frameWidth, mMetadata.frameHeight);
if (!nsVideoInfo::ValidateVideoRegion(frameSize, mPicture, display)) {
// Video track's frame sizes will overflow. Fail.
return NS_ERROR_FAILURE;
}
mInfo.mHasVideo = true;
mInfo.mHasAudio = false;
mInfo.mDisplay = display;
mFrameRate = static_cast<float>(mMetadata.framerateNumerator) /
mMetadata.framerateDenominator;
// Make some sanity checks
if (mFrameRate > 45 ||
mFrameRate == 0 ||
pixelAspectRatio == 0 ||
mMetadata.frameWidth > 2000 ||
mMetadata.frameHeight > 2000 ||
mMetadata.chromaChannelBpp != 4 ||
mMetadata.lumaChannelBpp != 8 ||
mMetadata.colorspace != 1 /* 4:2:0 */)
return NS_ERROR_FAILURE;
mFrameSize = mMetadata.frameWidth * mMetadata.frameHeight *
(mMetadata.lumaChannelBpp + mMetadata.chromaChannelBpp) / 8.0 +
sizeof(nsRawPacketHeader);
PRInt64 length = resource->GetLength();
if (length != -1) {
mozilla::ReentrantMonitorAutoEnter autoMonitor(mDecoder->GetReentrantMonitor());
mDecoder->GetStateMachine()->SetDuration(USECS_PER_S *
(length - sizeof(nsRawVideoHeader)) /
(mFrameSize * mFrameRate));
}
*aInfo = mInfo;
*aTags = nullptr;
return NS_OK;
}
bool nsRawReader::DecodeAudioData()
{
NS_ASSERTION(mDecoder->OnStateMachineThread() || mDecoder->OnDecodeThread(),
"Should be on state machine thread or decode thread.");
return false;
}
// Helper method that either reads until it gets aLength bytes
// or returns false
bool nsRawReader::ReadFromResource(MediaResource *aResource, PRUint8* aBuf,
PRUint32 aLength)
{
while (aLength > 0) {
PRUint32 bytesRead = 0;
nsresult rv;
rv = aResource->Read(reinterpret_cast<char*>(aBuf), aLength, &bytesRead);
NS_ENSURE_SUCCESS(rv, false);
if (bytesRead == 0) {
return false;
}
aLength -= bytesRead;
aBuf += bytesRead;
}
return true;
}
bool nsRawReader::DecodeVideoFrame(bool &aKeyframeSkip,
PRInt64 aTimeThreshold)
{
NS_ASSERTION(mDecoder->OnDecodeThread(),
"Should be on decode thread.");
// Record number of frames decoded and parsed. Automatically update the
// stats counters using the AutoNotifyDecoded stack-based class.
PRUint32 parsed = 0, decoded = 0;
nsMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
if (!mFrameSize)
return false; // Metadata read failed. We should refuse to play.
PRInt64 currentFrameTime = USECS_PER_S * mCurrentFrame / mFrameRate;
PRUint32 length = mFrameSize - sizeof(nsRawPacketHeader);
nsAutoArrayPtr<PRUint8> buffer(new PRUint8[length]);
MediaResource* resource = mDecoder->GetResource();
NS_ASSERTION(resource, "Decoder has no media resource");
// We're always decoding one frame when called
while(true) {
nsRawPacketHeader header;
// Read in a packet header and validate
if (!(ReadFromResource(resource, reinterpret_cast<PRUint8*>(&header),
sizeof(header))) ||
!(header.packetID == 0xFF && header.codecID == RAW_ID /* "YUV" */)) {
return false;
}
if (!ReadFromResource(resource, buffer, length)) {
return false;
}
parsed++;
if (currentFrameTime >= aTimeThreshold)
break;
mCurrentFrame++;
currentFrameTime += static_cast<double>(USECS_PER_S) / mFrameRate;
}
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = buffer;
b.mPlanes[0].mStride = mMetadata.frameWidth * mMetadata.lumaChannelBpp / 8.0;
b.mPlanes[0].mHeight = mMetadata.frameHeight;
b.mPlanes[0].mWidth = mMetadata.frameWidth;
b.mPlanes[0].mOffset = b.mPlanes[0].mSkip = 0;
PRUint32 cbcrStride = mMetadata.frameWidth * mMetadata.chromaChannelBpp / 8.0;
b.mPlanes[1].mData = buffer + mMetadata.frameHeight * b.mPlanes[0].mStride;
b.mPlanes[1].mStride = cbcrStride;
b.mPlanes[1].mHeight = mMetadata.frameHeight / 2;
b.mPlanes[1].mWidth = mMetadata.frameWidth / 2;
b.mPlanes[1].mOffset = b.mPlanes[1].mSkip = 0;
b.mPlanes[2].mData = b.mPlanes[1].mData + mMetadata.frameHeight * cbcrStride / 2;
b.mPlanes[2].mStride = cbcrStride;
b.mPlanes[2].mHeight = mMetadata.frameHeight / 2;
b.mPlanes[2].mWidth = mMetadata.frameWidth / 2;
b.mPlanes[2].mOffset = b.mPlanes[2].mSkip = 0;
VideoData *v = VideoData::Create(mInfo,
mDecoder->GetImageContainer(),
-1,
currentFrameTime,
currentFrameTime + (USECS_PER_S / mFrameRate),
b,
1, // In raw video every frame is a keyframe
-1,
mPicture);
if (!v)
return false;
mVideoQueue.Push(v);
mCurrentFrame++;
decoded++;
currentFrameTime += USECS_PER_S / mFrameRate;
return true;
}
nsresult nsRawReader::Seek(PRInt64 aTime, PRInt64 aStartTime, PRInt64 aEndTime, PRInt64 aCurrentTime)
{
NS_ASSERTION(mDecoder->OnDecodeThread(),
"Should be on decode thread.");
MediaResource *resource = mDecoder->GetResource();
NS_ASSERTION(resource, "Decoder has no media resource");
PRUint32 frame = mCurrentFrame;
if (aTime >= UINT_MAX)
return NS_ERROR_FAILURE;
mCurrentFrame = aTime * mFrameRate / USECS_PER_S;
CheckedUint32 offset = CheckedUint32(mCurrentFrame) * mFrameSize;
offset += sizeof(nsRawVideoHeader);
NS_ENSURE_TRUE(offset.isValid(), NS_ERROR_FAILURE);
nsresult rv = resource->Seek(nsISeekableStream::NS_SEEK_SET, offset.value());
NS_ENSURE_SUCCESS(rv, rv);
mVideoQueue.Erase();
while(mVideoQueue.GetSize() == 0) {
bool keyframeSkip = false;
if (!DecodeVideoFrame(keyframeSkip, 0)) {
mCurrentFrame = frame;
return NS_ERROR_FAILURE;
}
{
mozilla::ReentrantMonitorAutoEnter autoMonitor(mDecoder->GetReentrantMonitor());
if (mDecoder->GetDecodeState() ==
nsBuiltinDecoderStateMachine::DECODER_STATE_SHUTDOWN) {
mCurrentFrame = frame;
return NS_ERROR_FAILURE;
}
}
nsAutoPtr<VideoData> video(mVideoQueue.PeekFront());
if (video && video->mEndTime < aTime) {
mVideoQueue.PopFront();
video = nullptr;
} else {
video.forget();
}
}
return NS_OK;
}
nsresult nsRawReader::GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime)
{
return NS_OK;
}