Bug 970787 - Support yuv 4:4:4 4:2:2 video in VP8TrackEncoder by libyuv for color conversion. r=rillian

This commit is contained in:
Benjamin Chen 2014-02-21 16:35:13 +08:00
parent 33f545cb75
commit c959c750ae
3 changed files with 107 additions and 15 deletions

View File

@ -9,6 +9,7 @@
#include "VideoUtils.h"
#include "prsystem.h"
#include "WebMWriter.h"
#include "libyuv.h"
namespace mozilla {
@ -24,6 +25,8 @@ PRLogModuleInfo* gVP8TrackEncoderLog;
#define DEFAULT_BITRATE 2500 // in kbit/s
#define DEFAULT_ENCODE_FRAMERATE 30
using namespace mozilla::layers;
VP8TrackEncoder::VP8TrackEncoder()
: VideoTrackEncoder()
, mEncodedFrameDuration(0)
@ -226,15 +229,15 @@ void VP8TrackEncoder::PrepareMutedFrame()
CreateMutedFrame(&mMuteFrame);
}
uint32_t yPlanSize = mFrameWidth * mFrameHeight;
uint32_t yPlaneSize = mFrameWidth * mFrameHeight;
uint32_t halfWidth = (mFrameWidth + 1) / 2;
uint32_t halfHeight = (mFrameHeight + 1) / 2;
uint32_t uvPlanSize = halfWidth * halfHeight;
uint32_t uvPlaneSize = halfWidth * halfHeight;
MOZ_ASSERT(mMuteFrame.Length() >= (yPlanSize + uvPlanSize));
MOZ_ASSERT(mMuteFrame.Length() >= (yPlaneSize + uvPlaneSize * 2));
uint8_t *y = mMuteFrame.Elements();
uint8_t *cb = mMuteFrame.Elements() + yPlanSize;
uint8_t *cr = mMuteFrame.Elements() + yPlanSize + uvPlanSize;
uint8_t *cb = mMuteFrame.Elements() + yPlaneSize;
uint8_t *cr = mMuteFrame.Elements() + yPlaneSize + uvPlaneSize;
mVPXImageWrapper->planes[PLANE_Y] = y;
mVPXImageWrapper->planes[PLANE_U] = cb;
@ -244,12 +247,37 @@ void VP8TrackEncoder::PrepareMutedFrame()
mVPXImageWrapper->stride[VPX_PLANE_V] = halfWidth;
}
static bool isYUV420(const PlanarYCbCrImage::Data *aData)
{
if (aData->mYSize == aData->mCbCrSize * 2) {
return true;
}
return false;
}
static bool isYUV422(const PlanarYCbCrImage::Data *aData)
{
if ((aData->mYSize.width == aData->mCbCrSize.width * 2) &&
(aData->mYSize.height == aData->mCbCrSize.height)) {
return true;
}
return false;
}
static bool isYUV444(const PlanarYCbCrImage::Data *aData)
{
if (aData->mYSize == aData->mCbCrSize) {
return true;
}
return false;
}
nsresult VP8TrackEncoder::PrepareRawFrame(VideoChunk &aChunk)
{
if (aChunk.mFrame.GetForceBlack() || aChunk.IsNull()) {
PrepareMutedFrame();
} else {
layers::Image* img = aChunk.mFrame.GetImage();
Image* img = aChunk.mFrame.GetImage();
ImageFormat format = img->GetFormat();
if (format != ImageFormat::PLANAR_YCBCR) {
VP8LOG("Unsupported video format\n");
@ -257,19 +285,79 @@ nsresult VP8TrackEncoder::PrepareRawFrame(VideoChunk &aChunk)
}
// Cast away constness b/c some of the accessors are non-const
layers::PlanarYCbCrImage* yuv =
const_cast<layers::PlanarYCbCrImage *>(static_cast<const layers::PlanarYCbCrImage *>(img));
PlanarYCbCrImage* yuv =
const_cast<PlanarYCbCrImage *>(static_cast<const PlanarYCbCrImage *>(img));
// Big-time assumption here that this is all contiguous data coming
// from getUserMedia or other sources.
MOZ_ASSERT(yuv);
const layers::PlanarYCbCrImage::Data *data = yuv->GetData();
const PlanarYCbCrImage::Data *data = yuv->GetData();
if (isYUV420(data) && !data->mCbSkip) { // 420 planar
mVPXImageWrapper->planes[PLANE_Y] = data->mYChannel;
mVPXImageWrapper->planes[PLANE_U] = data->mCbChannel;
mVPXImageWrapper->planes[PLANE_V] = data->mCrChannel;
mVPXImageWrapper->stride[VPX_PLANE_Y] = data->mYStride;
mVPXImageWrapper->stride[VPX_PLANE_U] = data->mCbCrStride;
mVPXImageWrapper->stride[VPX_PLANE_V] = data->mCbCrStride;
} else {
uint32_t yPlaneSize = mFrameWidth * mFrameHeight;
uint32_t halfWidth = (mFrameWidth + 1) / 2;
uint32_t halfHeight = (mFrameHeight + 1) / 2;
uint32_t uvPlaneSize = halfWidth * halfHeight;
if (mI420Frame.IsEmpty()) {
mI420Frame.SetLength(yPlaneSize + uvPlaneSize * 2);
}
MOZ_ASSERT(mI420Frame.Length() >= (yPlaneSize + uvPlaneSize * 2));
uint8_t *y = mI420Frame.Elements();
uint8_t *cb = mI420Frame.Elements() + yPlaneSize;
uint8_t *cr = mI420Frame.Elements() + yPlaneSize + uvPlaneSize;
if (isYUV420(data) && data->mCbSkip) {
// If mCbSkip is set, we assume it's nv12 or nv21.
if (data->mCbChannel < data->mCrChannel) { // nv12
libyuv::NV12ToI420(data->mYChannel, data->mYStride,
data->mCbChannel, data->mCbCrStride,
y, mFrameWidth,
cb, halfWidth,
cr, halfWidth,
mFrameWidth, mFrameHeight);
} else { // nv21
libyuv::NV21ToI420(data->mYChannel, data->mYStride,
data->mCrChannel, data->mCbCrStride,
y, mFrameWidth,
cb, halfWidth,
cr, halfWidth,
mFrameWidth, mFrameHeight);
}
} else if (isYUV444(data) && !data->mCbSkip) {
libyuv::I444ToI420(data->mYChannel, data->mYStride,
data->mCbChannel, data->mCbCrStride,
data->mCrChannel, data->mCbCrStride,
y, mFrameWidth,
cb, halfWidth,
cr, halfWidth,
mFrameWidth, mFrameHeight);
} else if (isYUV422(data) && !data->mCbSkip) {
libyuv::I422ToI420(data->mYChannel, data->mYStride,
data->mCbChannel, data->mCbCrStride,
data->mCrChannel, data->mCbCrStride,
y, mFrameWidth,
cb, halfWidth,
cr, halfWidth,
mFrameWidth, mFrameHeight);
} else {
VP8LOG("Unsupported planar format\n");
return NS_ERROR_NOT_IMPLEMENTED;
}
mVPXImageWrapper->planes[PLANE_Y] = y;
mVPXImageWrapper->planes[PLANE_U] = cb;
mVPXImageWrapper->planes[PLANE_V] = cr;
mVPXImageWrapper->stride[VPX_PLANE_Y] = mFrameWidth;
mVPXImageWrapper->stride[VPX_PLANE_U] = halfWidth;
mVPXImageWrapper->stride[VPX_PLANE_V] = halfWidth;
}
}
return NS_OK;
}

View File

@ -74,6 +74,9 @@ private:
// Muted frame, we only create it once.
nsTArray<uint8_t> mMuteFrame;
// I420 frame, convert the 4:4:4, 4:2:2 to I420.
nsTArray<uint8_t> mI420Frame;
/**
* A local segment queue which takes the raw data out from mRawSegment in the
* call of GetEncodedTrack(). Since we implement the fixed FPS encoding

View File

@ -35,6 +35,7 @@ if CONFIG['MOZ_WEBM_ENCODER']:
UNIFIED_SOURCES += ['VorbisTrackEncoder.cpp',
'VP8TrackEncoder.cpp',
]
LOCAL_INCLUDES += ['/media/libyuv/include']
FAIL_ON_WARNINGS = True