Bug 1043274 - Use GraphicBuffer on GonkDecoderModule. r=edwin, r=sotaro

This commit is contained in:
Blake 2014-11-12 17:34:21 +08:00
parent a37f273a14
commit 2446097203
5 changed files with 211 additions and 78 deletions

View File

@ -205,12 +205,8 @@ GonkAudioDecoderManager::Output(int64_t aStreamOffset,
void GonkAudioDecoderManager::ReleaseAudioBuffer() {
if (mAudioBuffer) {
sp<MetaData> metaData = mAudioBuffer->meta_data();
int32_t index;
metaData->findInt32(android::MediaCodecProxy::kKeyBufferIndex, &index);
mAudioBuffer->release();
mDecoder->ReleaseMediaBuffer(mAudioBuffer);
mAudioBuffer = nullptr;
mDecoder->releaseOutputBuffer(index);
}
}

View File

@ -22,6 +22,10 @@
#include <stagefright/foundation/AString.h>
#include <stagefright/foundation/ALooper.h>
#include "mp4_demuxer/AnnexB.h"
#include "GonkNativeWindow.h"
#include "GonkNativeWindowClient.h"
#include "mozilla/layers/GrallocTextureClient.h"
#include "mozilla/layers/TextureClient.h"
#define READ_OUTPUT_BUFFER_TIMEOUT_US 3000
@ -46,12 +50,14 @@ enum {
};
GonkVideoDecoderManager::GonkVideoDecoderManager(
mozilla::layers::ImageContainer* aImageContainer,
const mp4_demuxer::VideoDecoderConfig& aConfig)
mozilla::layers::ImageContainer* aImageContainer,
const mp4_demuxer::VideoDecoderConfig& aConfig)
: mImageContainer(aImageContainer)
, mConfig(aConfig)
, mReaderCallback(nullptr)
, mColorConverterBufferSize(0)
, mNativeWindow(nullptr)
, mPendingVideoBuffersLock("GonkVideoDecoderManager::mPendingVideoBuffersLock")
{
NS_ASSERTION(!NS_IsMainThread(), "Should not be on main thread.");
MOZ_ASSERT(mImageContainer);
@ -98,14 +104,21 @@ GonkVideoDecoderManager::Init(MediaDataDecoderCallback* aCallback)
}
// Create ALooper
mLooper = new ALooper;
mLooper->setName("GonkVideoDecoderManager");
mManagerLooper = new ALooper;
mManagerLooper->setName("GonkVideoDecoderManager");
// Register AMessage handler to ALooper.
mLooper->registerHandler(mHandler);
mManagerLooper->registerHandler(mHandler);
// Start ALooper thread.
if (mLooper->start() != OK) {
if (mLooper->start() != OK || mManagerLooper->start() != OK ) {
return nullptr;
}
mDecoder = MediaCodecProxy::CreateByType(mLooper, "video/avc", false, true, mVideoListener);
uint32_t capability = MediaCodecProxy::kEmptyCapability;
if (mDecoder->getCapability(&capability) == OK && (capability &
MediaCodecProxy::kCanExposeGraphicBuffer)) {
mNativeWindow = new GonkNativeWindow();
}
return mDecoder;
}
@ -116,7 +129,7 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
int64_t timeUs;
int32_t keyFrame;
if (!(mVideoBuffer != nullptr && mVideoBuffer->data() != nullptr)) {
if (mVideoBuffer == nullptr) {
ALOG("Video Buffer is not valid!");
return NS_ERROR_UNEXPECTED;
}
@ -150,70 +163,97 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height;
}
uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data();
int32_t stride = mFrameInfo.mStride;
int32_t slice_height = mFrameInfo.mSliceHeight;
RefPtr<mozilla::layers::TextureClient> textureClient;
// Converts to OMX_COLOR_FormatYUV420Planar
if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) {
ARect crop;
crop.top = 0;
crop.bottom = mFrameInfo.mHeight;
crop.left = 0;
crop.right = mFrameInfo.mWidth;
yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight);
if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(),
mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) {
ReleaseVideoBuffer();
ALOG("Color conversion failed!");
return NS_ERROR_UNEXPECTED;
}
stride = mFrameInfo.mWidth;
slice_height = mFrameInfo.mHeight;
if ((mVideoBuffer->graphicBuffer().get())) {
textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
}
size_t yuv420p_y_size = stride * slice_height;
size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
uint8_t *yuv420p_y = yuv420p_buffer;
uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size;
uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size;
if (textureClient) {
GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
grallocClient->SetMediaBuffer(mVideoBuffer);
textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);
// This is the approximate byte position in the stream.
int64_t pos = aStreamOffset;
*v = VideoData::Create(mInfo.mVideo,
mImageContainer,
aStreamOffset,
timeUs,
1, // We don't know the duration.
textureClient,
keyFrame,
-1,
picture);
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = yuv420p_y;
b.mPlanes[0].mWidth = mFrameInfo.mWidth;
b.mPlanes[0].mHeight = mFrameInfo.mHeight;
b.mPlanes[0].mStride = stride;
b.mPlanes[0].mOffset = 0;
b.mPlanes[0].mSkip = 0;
} else {
if (!mVideoBuffer->data()) {
ALOG("No data in Video Buffer!");
return NS_ERROR_UNEXPECTED;
}
uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data();
int32_t stride = mFrameInfo.mStride;
int32_t slice_height = mFrameInfo.mSliceHeight;
b.mPlanes[1].mData = yuv420p_u;
b.mPlanes[1].mWidth = (mFrameInfo.mWidth + 1) / 2;
b.mPlanes[1].mHeight = (mFrameInfo.mHeight + 1) / 2;
b.mPlanes[1].mStride = (stride + 1) / 2;
b.mPlanes[1].mOffset = 0;
b.mPlanes[1].mSkip = 0;
// Converts to OMX_COLOR_FormatYUV420Planar
if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) {
ARect crop;
crop.top = 0;
crop.bottom = mFrameInfo.mHeight;
crop.left = 0;
crop.right = mFrameInfo.mWidth;
yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight);
if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(),
mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) {
ReleaseVideoBuffer();
ALOG("Color conversion failed!");
return NS_ERROR_UNEXPECTED;
}
stride = mFrameInfo.mWidth;
slice_height = mFrameInfo.mHeight;
}
b.mPlanes[2].mData = yuv420p_v;
b.mPlanes[2].mWidth =(mFrameInfo.mWidth + 1) / 2;
b.mPlanes[2].mHeight = (mFrameInfo.mHeight + 1) / 2;
b.mPlanes[2].mStride = (stride + 1) / 2;
b.mPlanes[2].mOffset = 0;
b.mPlanes[2].mSkip = 0;
size_t yuv420p_y_size = stride * slice_height;
size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
uint8_t *yuv420p_y = yuv420p_buffer;
uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size;
uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size;
*v = VideoData::Create(
mInfo.mVideo,
mImageContainer,
pos,
timeUs,
1, // We don't know the duration.
b,
keyFrame,
-1,
picture);
ReleaseVideoBuffer();
// This is the approximate byte position in the stream.
int64_t pos = aStreamOffset;
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = yuv420p_y;
b.mPlanes[0].mWidth = mFrameInfo.mWidth;
b.mPlanes[0].mHeight = mFrameInfo.mHeight;
b.mPlanes[0].mStride = stride;
b.mPlanes[0].mOffset = 0;
b.mPlanes[0].mSkip = 0;
b.mPlanes[1].mData = yuv420p_u;
b.mPlanes[1].mWidth = (mFrameInfo.mWidth + 1) / 2;
b.mPlanes[1].mHeight = (mFrameInfo.mHeight + 1) / 2;
b.mPlanes[1].mStride = (stride + 1) / 2;
b.mPlanes[1].mOffset = 0;
b.mPlanes[1].mSkip = 0;
b.mPlanes[2].mData = yuv420p_v;
b.mPlanes[2].mWidth =(mFrameInfo.mWidth + 1) / 2;
b.mPlanes[2].mHeight = (mFrameInfo.mHeight + 1) / 2;
b.mPlanes[2].mStride = (stride + 1) / 2;
b.mPlanes[2].mOffset = 0;
b.mPlanes[2].mSkip = 0;
*v = VideoData::Create(
mInfo.mVideo,
mImageContainer,
pos,
timeUs,
1, // We don't know the duration.
b,
keyFrame,
-1,
picture);
ReleaseVideoBuffer();
}
return NS_OK;
}
@ -334,12 +374,8 @@ GonkVideoDecoderManager::Output(int64_t aStreamOffset,
void GonkVideoDecoderManager::ReleaseVideoBuffer() {
if (mVideoBuffer) {
sp<MetaData> metaData = mVideoBuffer->meta_data();
int32_t index;
metaData->findInt32(android::MediaCodecProxy::kKeyBufferIndex, &index);
mVideoBuffer->release();
mDecoder->ReleaseMediaBuffer(mVideoBuffer);
mVideoBuffer = nullptr;
mDecoder->releaseOutputBuffer(index);
}
}
@ -371,12 +407,16 @@ void
GonkVideoDecoderManager::codecReserved()
{
sp<AMessage> format = new AMessage;
sp<Surface> surface;
// Fixed values
format->setString("mime", "video/avc");
format->setInt32("width", mVideoWidth);
format->setInt32("height", mVideoHeight);
mDecoder->configure(format, nullptr, nullptr, 0);
if (mNativeWindow != nullptr) {
surface = new Surface(mNativeWindow->getBufferQueue());
}
status_t err = mDecoder->configure(format, surface, nullptr, 0);
mDecoder->Prepare();
SetVideoFormat();
@ -399,7 +439,7 @@ GonkVideoDecoderManager::codecCanceled()
}
// Called on GonkVideoDecoderManager::mLooper thread.
// Called on GonkVideoDecoderManager::mManagerLooper thread.
void
GonkVideoDecoderManager::onMessageReceived(const sp<AMessage> &aMessage)
{
@ -418,6 +458,12 @@ GonkVideoDecoderManager::onMessageReceived(const sp<AMessage> &aMessage)
break;
}
case kNotifyPostReleaseBuffer:
{
ReleaseAllPendingVideoBuffersLocked();
break;
}
default:
TRESPASS();
break;
@ -484,4 +530,51 @@ GonkVideoDecoderManager::GetColorConverterBuffer(int32_t aWidth, int32_t aHeight
return mColorConverterBuffer.get();
}
/* static */
void
GonkVideoDecoderManager::RecycleCallback(TextureClient* aClient, void* aClosure)
{
GonkVideoDecoderManager* videoManager = static_cast<GonkVideoDecoderManager*>(aClosure);
GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(aClient);
aClient->ClearRecycleCallback();
videoManager->PostReleaseVideoBuffer(client->GetMediaBuffer());
}
void GonkVideoDecoderManager::PostReleaseVideoBuffer(
android::MediaBuffer *aBuffer)
{
{
MutexAutoLock autoLock(mPendingVideoBuffersLock);
if (aBuffer) {
mPendingVideoBuffers.append(aBuffer);
}
}
sp<AMessage> notify =
new AMessage(kNotifyPostReleaseBuffer, mHandler->id());
notify->post();
}
void GonkVideoDecoderManager::ReleaseAllPendingVideoBuffersLocked()
{
Vector<android::MediaBuffer*> releasingVideoBuffers;
{
MutexAutoLock autoLock(mPendingVideoBuffersLock);
int size = mPendingVideoBuffers.length();
for (int i = 0; i < size; i++) {
releasingVideoBuffers.append(mPendingVideoBuffers[i]);
}
mPendingVideoBuffers.clear();
}
// Free all pending video buffers without holding mPendingVideoBuffersLock.
int size = releasingVideoBuffers.length();
for (int i = 0; i < size; i++) {
android::MediaBuffer *buffer;
buffer = releasingVideoBuffers[i];
mDecoder->ReleaseMediaBuffer(buffer);
buffer = nullptr;
}
releasingVideoBuffers.clear();
}
} // namespace mozilla

View File

@ -7,6 +7,7 @@
#if !defined(GonkVideoDecoderManager_h_)
#define GonkVideoDecoderManager_h_
#include <set>
#include "MP4Reader.h"
#include "nsRect.h"
#include "GonkMediaDataDecoder.h"
@ -14,6 +15,8 @@
#include "I420ColorConverterHelper.h"
#include "MediaCodecProxy.h"
#include <stagefright/foundation/AHandler.h>
#include "GonkNativeWindow.h"
#include "GonkNativeWindowClient.h"
using namespace android;
@ -21,12 +24,18 @@ namespace android {
struct MOZ_EXPORT ALooper;
class MOZ_EXPORT MediaBuffer;
struct MOZ_EXPORT AString;
class GonkNativeWindow;
} // namespace android
namespace mozilla {
namespace layers {
class TextureClient;
} // namespace mozilla::layers
class GonkVideoDecoderManager : public GonkDecoderManager {
typedef android::MediaCodecProxy MediaCodecProxy;
typedef mozilla::layers::TextureClient TextureClient;
public:
GonkVideoDecoderManager(mozilla::layers::ImageContainer* aImageContainer,
@ -41,6 +50,7 @@ public:
virtual nsresult Output(int64_t aStreamOffset,
nsAutoPtr<MediaData>& aOutput) MOZ_OVERRIDE;
static void RecycleCallback(TextureClient* aClient, void* aClosure);
private:
struct FrameInfo
{
@ -102,6 +112,9 @@ private:
void codecCanceled();
void onMessageReceived(const sp<AMessage> &aMessage);
void ReleaseAllPendingVideoBuffersLocked();
void PostReleaseVideoBuffer(android::MediaBuffer *aBuffer);
const mp4_demuxer::VideoDecoderConfig& mConfig;
uint32_t mVideoWidth;
uint32_t mVideoHeight;
@ -121,12 +134,25 @@ private:
android::sp<VideoResourceListener> mVideoListener;
android::sp<MessageHandler> mHandler;
android::sp<ALooper> mLooper;
android::sp<ALooper> mManagerLooper;
FrameInfo mFrameInfo;
// color converter
android::I420ColorConverterHelper mColorConverter;
nsAutoArrayPtr<uint8_t> mColorConverterBuffer;
size_t mColorConverterBufferSize;
android::sp<android::GonkNativeWindow> mNativeWindow;
enum {
kNotifyPostReleaseBuffer = 'nprb',
};
// Hold video's MediaBuffers that are released.
// The holded MediaBuffers are released soon after flush.
Vector<android::MediaBuffer*> mPendingVideoBuffers;
// The lock protects mPendingVideoBuffers.
Mutex mPendingVideoBuffersLock;
};
} // namespace mozilla

View File

@ -16,6 +16,7 @@
#include <android/log.h>
#define ALOG(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define TIMEOUT_DEQUEUE_INPUTBUFFER_MS 1000000ll
namespace android {
// General Template: MediaCodec::getOutputGraphicBufferFromIndex(...)
@ -224,6 +225,7 @@ MediaCodecProxy::start()
if (mCodec == nullptr) {
return NO_INIT;
}
return mCodec->start();
}
@ -560,8 +562,14 @@ status_t MediaCodecProxy::Output(MediaBuffer** aBuffer, int64_t aTimeoutUs)
}
MediaBuffer *buffer;
sp<GraphicBuffer> graphicBuffer;
buffer = new MediaBuffer(mOutputBuffers.itemAt(index));
if (getOutputGraphicBufferFromIndex(index, &graphicBuffer) == OK &&
graphicBuffer != nullptr) {
buffer = new MediaBuffer(graphicBuffer);
} else {
buffer = new MediaBuffer(mOutputBuffers.itemAt(index));
}
sp<MetaData> metaData = buffer->meta_data();
metaData->setInt32(kKeyBufferIndex, index);
metaData->setInt64(kKeyTime, timeUs);
@ -592,4 +600,14 @@ void MediaCodecProxy::ReleaseMediaResources()
}
}
void MediaCodecProxy::ReleaseMediaBuffer(MediaBuffer* aBuffer) {
if (aBuffer) {
sp<MetaData> metaData = aBuffer->meta_data();
int32_t index;
metaData->findInt32(kKeyBufferIndex, &index);
aBuffer->release();
releaseOutputBuffer(index);
}
}
} // namespace android

View File

@ -8,11 +8,9 @@
#define MEDIA_CODEC_PROXY_H
#include <nsString.h>
#include <stagefright/MediaCodec.h>
#include <stagefright/MediaBuffer.h>
#include <utils/threads.h>
#include "MediaResourceHandler.h"
namespace android {
@ -135,6 +133,8 @@ public:
bool IsDormantNeeded();
void ReleaseMediaResources();
void ReleaseMediaBuffer(MediaBuffer* abuffer);
protected:
virtual ~MediaCodecProxy();