Bug 803394 - Provide a callback to media plugins to request video buffers owned by gecko r=doublec

--HG--
extra : rebase_source : e3cb97f3307b603bcdfcdbaeffd05a6e2af14097
This commit is contained in:
Edwin Flores 2012-12-13 11:53:58 +13:00
parent 283f1651ad
commit 71779c6bff
6 changed files with 180 additions and 49 deletions

View File

@ -240,6 +240,26 @@ VideoData* VideoData::Create(VideoInfo& aInfo,
return v.forget();
}
VideoData* VideoData::CreateFromImage(VideoInfo& aInfo,
ImageContainer* aContainer,
int64_t aOffset,
int64_t aTime,
int64_t aEndTime,
const nsRefPtr<Image>& aImage,
bool aKeyframe,
int64_t aTimecode,
nsIntRect aPicture)
{
nsAutoPtr<VideoData> v(new VideoData(aOffset,
aTime,
aEndTime,
aKeyframe,
aTimecode,
aInfo.mDisplay));
v->mImage = aImage;
return v.forget();
}
#ifdef MOZ_WIDGET_GONK
VideoData* VideoData::Create(VideoInfo& aInfo,
ImageContainer* aContainer,

View File

@ -158,6 +158,16 @@ public:
int64_t aTimecode,
nsIntRect aPicture);
static VideoData* CreateFromImage(VideoInfo& aInfo,
ImageContainer* aContainer,
int64_t aOffset,
int64_t aTime,
int64_t aEndTime,
const nsRefPtr<Image>& aImage,
bool aKeyframe,
int64_t aTimecode,
nsIntRect aPicture);
// Constructs a duplicate VideoData object. This intrinsically tells the
// player that it does not need to update the displayed frame when this
// frame is played; this frame is identical to the previous.

View File

@ -10,6 +10,21 @@
namespace MPAPI {
enum ColorFormat {
YCbCr,
RGB565,
};
/*
* A callback for the plugin to use to request a buffer owned by gecko. This can
* save us a copy or two down the line.
*/
class BufferCallback {
public:
virtual void *operator()(size_t aWidth, size_t aHeight,
ColorFormat aColorFormat) = 0;
};
struct VideoPlane {
void *mData;
int32_t mStride;
@ -114,7 +129,7 @@ struct Decoder {
void (*GetAudioParameters)(Decoder *aDecoder, int32_t *aNumChannels, int32_t *aSampleRate);
bool (*HasVideo)(Decoder *aDecoder);
bool (*HasAudio)(Decoder *aDecoder);
bool (*ReadVideo)(Decoder *aDecoder, VideoFrame *aFrame, int64_t aSeekTimeUs);
bool (*ReadVideo)(Decoder *aDecoder, VideoFrame *aFrame, int64_t aSeekTimeUs, BufferCallback *aBufferCallback);
bool (*ReadAudio)(Decoder *aDecoder, AudioFrame *aFrame, int64_t aSeekTimeUs);
void (*DestroyDecoder)(Decoder *);
};

View File

@ -11,10 +11,13 @@
#include "MediaPluginDecoder.h"
#include "MediaPluginHost.h"
#include "MediaDecoderStateMachine.h"
#include "ImageContainer.h"
#include "AbstractMediaDecoder.h"
namespace mozilla {
typedef mozilla::layers::Image Image;
MediaPluginReader::MediaPluginReader(AbstractMediaDecoder *aDecoder,
const nsACString& aContentType) :
MediaDecoderReader(aDecoder),
@ -126,10 +129,13 @@ bool MediaPluginReader::DecodeVideoFrame(bool &aKeyframeSkip,
mLastVideoFrame = NULL;
}
ImageBufferCallback bufferCallback(mDecoder->GetImageContainer());
nsRefPtr<Image> currentImage;
// Read next frame
while (true) {
MPAPI::VideoFrame frame;
if (!mPlugin->ReadVideo(mPlugin, &frame, mVideoSeekTimeUs)) {
if (!mPlugin->ReadVideo(mPlugin, &frame, mVideoSeekTimeUs, &bufferCallback)) {
// We reached the end of the video stream. If we have a buffered
// video frame, push it the video queue using the total duration
// of the video as the end time.
@ -163,54 +169,81 @@ bool MediaPluginReader::DecodeVideoFrame(bool &aKeyframeSkip,
if (frame.mSize == 0)
return true;
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = static_cast<uint8_t *>(frame.Y.mData);
b.mPlanes[0].mStride = frame.Y.mStride;
b.mPlanes[0].mHeight = frame.Y.mHeight;
b.mPlanes[0].mWidth = frame.Y.mWidth;
b.mPlanes[0].mOffset = frame.Y.mOffset;
b.mPlanes[0].mSkip = frame.Y.mSkip;
b.mPlanes[1].mData = static_cast<uint8_t *>(frame.Cb.mData);
b.mPlanes[1].mStride = frame.Cb.mStride;
b.mPlanes[1].mHeight = frame.Cb.mHeight;
b.mPlanes[1].mWidth = frame.Cb.mWidth;
b.mPlanes[1].mOffset = frame.Cb.mOffset;
b.mPlanes[1].mSkip = frame.Cb.mSkip;
b.mPlanes[2].mData = static_cast<uint8_t *>(frame.Cr.mData);
b.mPlanes[2].mStride = frame.Cr.mStride;
b.mPlanes[2].mHeight = frame.Cr.mHeight;
b.mPlanes[2].mWidth = frame.Cr.mWidth;
b.mPlanes[2].mOffset = frame.Cr.mOffset;
b.mPlanes[2].mSkip = frame.Cr.mSkip;
nsIntRect picture = mPicture;
if (frame.Y.mWidth != mInitialFrame.width ||
frame.Y.mHeight != mInitialFrame.height) {
// Frame size is different from what the container reports. This is legal,
// and we will preserve the ratio of the crop rectangle as it
// was reported relative to the picture size reported by the container.
picture.x = (mPicture.x * frame.Y.mWidth) / mInitialFrame.width;
picture.y = (mPicture.y * frame.Y.mHeight) / mInitialFrame.height;
picture.width = (frame.Y.mWidth * mPicture.width) / mInitialFrame.width;
picture.height = (frame.Y.mHeight * mPicture.height) / mInitialFrame.height;
}
// This is the approximate byte position in the stream.
currentImage = bufferCallback.GetImage();
int64_t pos = mDecoder->GetResource()->Tell();
nsIntRect picture = mPicture;
VideoData *v;
if (currentImage) {
gfxIntSize frameSize = currentImage->GetSize();
if (frameSize.width != mInitialFrame.width ||
frameSize.height != mInitialFrame.height) {
// Frame size is different from what the container reports. This is legal,
// and we will preserve the ratio of the crop rectangle as it
// was reported relative to the picture size reported by the container.
picture.x = (mPicture.x * frameSize.width) / mInitialFrame.width;
picture.y = (mPicture.y * frameSize.height) / mInitialFrame.height;
picture.width = (frameSize.width * mPicture.width) / mInitialFrame.width;
picture.height = (frameSize.height * mPicture.height) / mInitialFrame.height;
}
VideoData *v = VideoData::Create(mInfo,
v = VideoData::CreateFromImage(mInfo,
mDecoder->GetImageContainer(),
pos,
frame.mTimeUs,
frame.mTimeUs+1, // We don't know the end time.
b,
currentImage,
frame.mKeyFrame,
-1,
picture);
} else {
// Assume YUV
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = static_cast<uint8_t *>(frame.Y.mData);
b.mPlanes[0].mStride = frame.Y.mStride;
b.mPlanes[0].mHeight = frame.Y.mHeight;
b.mPlanes[0].mWidth = frame.Y.mWidth;
b.mPlanes[0].mOffset = frame.Y.mOffset;
b.mPlanes[0].mSkip = frame.Y.mSkip;
b.mPlanes[1].mData = static_cast<uint8_t *>(frame.Cb.mData);
b.mPlanes[1].mStride = frame.Cb.mStride;
b.mPlanes[1].mHeight = frame.Cb.mHeight;
b.mPlanes[1].mWidth = frame.Cb.mWidth;
b.mPlanes[1].mOffset = frame.Cb.mOffset;
b.mPlanes[1].mSkip = frame.Cb.mSkip;
b.mPlanes[2].mData = static_cast<uint8_t *>(frame.Cr.mData);
b.mPlanes[2].mStride = frame.Cr.mStride;
b.mPlanes[2].mHeight = frame.Cr.mHeight;
b.mPlanes[2].mWidth = frame.Cr.mWidth;
b.mPlanes[2].mOffset = frame.Cr.mOffset;
b.mPlanes[2].mSkip = frame.Cr.mSkip;
if (frame.Y.mWidth != mInitialFrame.width ||
frame.Y.mHeight != mInitialFrame.height) {
// Frame size is different from what the container reports. This is legal,
// and we will preserve the ratio of the crop rectangle as it
// was reported relative to the picture size reported by the container.
picture.x = (mPicture.x * frame.Y.mWidth) / mInitialFrame.width;
picture.y = (mPicture.y * frame.Y.mHeight) / mInitialFrame.height;
picture.width = (frame.Y.mWidth * mPicture.width) / mInitialFrame.width;
picture.height = (frame.Y.mHeight * mPicture.height) / mInitialFrame.height;
}
// This is the approximate byte position in the stream.
v = VideoData::Create(mInfo,
mDecoder->GetImageContainer(),
pos,
frame.mTimeUs,
frame.mTimeUs+1, // We don't know the end time.
b,
frame.mKeyFrame,
-1,
picture);
}
if (!v) {
return false;
}
@ -313,4 +346,39 @@ nsresult MediaPluginReader::GetBuffered(nsTimeRanges* aBuffered, int64_t aStartT
return NS_OK;
}
MediaPluginReader::ImageBufferCallback::ImageBufferCallback(mozilla::layers::ImageContainer *aImageContainer) :
mImageContainer(aImageContainer)
{
}
void *
MediaPluginReader::ImageBufferCallback::operator()(size_t aWidth, size_t aHeight,
MPAPI::ColorFormat aColorFormat)
{
if (!mImageContainer) {
NS_WARNING("No image container to construct an image");
return nullptr;
}
nsRefPtr<mozilla::layers::SharedRGBImage> rgbImage;
switch(aColorFormat) {
case MPAPI::RGB565:
rgbImage = mozilla::layers::SharedRGBImage::Create(mImageContainer,
nsIntSize(aWidth, aHeight),
gfxASurface::ImageFormatRGB16_565);
mImage = rgbImage;
return rgbImage->GetBuffer();
case MPAPI::YCbCr:
default:
NS_NOTREACHED("Color format not supported");
return nullptr;
}
}
already_AddRefed<Image>
MediaPluginReader::ImageBufferCallback::GetImage()
{
return mImage.forget();
}
} // namespace mozilla

View File

@ -8,7 +8,9 @@
#include "MediaResource.h"
#include "MediaDecoderReader.h"
#include "ImageContainer.h"
#include "mozilla/layers/SharedRGBImage.h"
#include "MPAPI.h"
class nsACString;
@ -17,6 +19,10 @@ namespace mozilla {
class AbstractMediaDecoder;
namespace layers {
class ImageContainer;
}
class MediaPluginReader : public MediaDecoderReader
{
nsCString mType;
@ -54,6 +60,17 @@ public:
MetadataTags** aTags);
virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
virtual nsresult GetBuffered(nsTimeRanges* aBuffered, int64_t aStartTime);
class ImageBufferCallback : public MPAPI::BufferCallback {
typedef mozilla::layers::Image Image;
public:
ImageBufferCallback(mozilla::layers::ImageContainer *aImageContainer);
void *operator()(size_t aWidth, size_t aHeight,
MPAPI::ColorFormat aColorFormat);
already_AddRefed<Image> GetImage();
private:
mozilla::layers::ImageContainer *mImageContainer;
nsRefPtr<Image> mImage;
};
};
} // namespace mozilla

View File

@ -143,7 +143,7 @@ class OmxDecoder {
void ToVideoFrame_YVU420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
void ToVideoFrame_YUV420PackedSemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
void ToVideoFrame_YVU420PackedSemiPlanar32m4ka(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback);
bool ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize,
int32_t aAudioChannels, int32_t aAudioSampleRate);
public:
@ -176,7 +176,7 @@ public:
return mAudioSource != NULL;
}
bool ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs);
bool ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs, BufferCallback *aBufferCallback);
bool ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs);
};
@ -657,7 +657,7 @@ void OmxDecoder::ToVideoFrame_YVU420PackedSemiPlanar32m4ka(VideoFrame *aFrame, i
uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 1);
}
bool OmxDecoder::ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
bool OmxDecoder::ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback) {
switch (mVideoColorFormat) {
case OMX_COLOR_FormatYUV420Planar: // e.g. Asus Transformer, Stagefright's software decoder
ToVideoFrame_YUV420Planar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
@ -690,7 +690,8 @@ bool OmxDecoder::ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData,
return true;
}
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs)
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs,
BufferCallback *aBufferCallback)
{
MOZ_ASSERT(aSeekTimeUs >= -1);
@ -730,7 +731,7 @@ bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs)
char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
size_t length = mVideoBuffer->range_length();
if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) {
if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame, aBufferCallback)) {
return false;
}
}
@ -740,7 +741,7 @@ bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs)
if (!SetVideoFormat())
return false;
else
return ReadVideo(aFrame, aSeekTimeUs);
return ReadVideo(aFrame, aSeekTimeUs, aBufferCallback);
}
else if (err == ERROR_END_OF_STREAM) {
LOG("mVideoSource END_OF_STREAM");
@ -835,9 +836,9 @@ static bool HasAudio(Decoder *aDecoder) {
return cast(aDecoder)->HasAudio();
}
static bool ReadVideo(Decoder *aDecoder, VideoFrame *aFrame, int64_t aSeekTimeUs)
static bool ReadVideo(Decoder *aDecoder, VideoFrame *aFrame, int64_t aSeekTimeUs, BufferCallback *aBufferCallback)
{
return cast(aDecoder)->ReadVideo(aFrame, aSeekTimeUs);
return cast(aDecoder)->ReadVideo(aFrame, aSeekTimeUs, aBufferCallback);
}
static bool ReadAudio(Decoder *aDecoder, AudioFrame *aFrame, int64_t aSeekTimeUs)