Bug 1224889 - OpenMax IL video decoding for PDM. r=sotaro, r=nical

This commit is contained in:
Alfredo Yang 2016-01-08 06:24:00 -05:00
parent cd1f126100
commit edad6e51e4
11 changed files with 618 additions and 192 deletions

View File

@ -8,6 +8,11 @@
#include "OmxPromiseLayer.h"
#include "GonkOmxPlatformLayer.h"
#include "MediaInfo.h"
#include "ImageContainer.h"
#include "mozilla/Monitor.h"
#include "mozilla/layers/TextureClient.h"
#include "mozilla/layers/GrallocTextureClient.h"
#include "mozilla/layers/ImageBridgeChild.h"
#include <binder/MemoryDealer.h>
#include <media/IOMX.h>
#include <utils/List.h>
@ -21,6 +26,12 @@ extern mozilla::LogModule* GetPDMLog();
#define LOG(arg, ...) MOZ_LOG(GetPDMLog(), mozilla::LogLevel::Debug, ("GonkOmxPlatformLayer:: " arg, ##__VA_ARGS__))
#define CHECK_ERR(err) \
if (err != OK) { \
LOG("error %d at %s", err, __func__); \
return NS_ERROR_FAILURE; \
} \
using namespace android;
namespace mozilla {
@ -114,29 +125,211 @@ public:
protected:
RefPtr<TaskQueue> mTaskQueue;
// TODO:
// we should combination both event handlers into one. And we should provide
// we should combine both event handlers into one. And we should provide
// an unified way for event handling in OmxPlatforLayer class.
RefPtr<OmxPromiseLayer> mPromiseLayer;
RefPtr<OmxDataDecoder> mClient;
};
GonkBufferData::GonkBufferData(android::IOMX::buffer_id aId, bool aLiveInLocal, android::IMemory* aMemory)
: BufferData((OMX_BUFFERHEADERTYPE*)aId)
, mId(aId)
// This class allocates Gralloc buffer and manages TextureClient's recycle.
class GonkTextureClientRecycleHandler : public layers::ITextureClientRecycleAllocator
{
typedef MozPromise<layers::TextureClient*, nsresult, /* IsExclusive = */ true> TextureClientRecyclePromise;
public:
GonkTextureClientRecycleHandler(OMX_VIDEO_PORTDEFINITIONTYPE& aDef)
: ITextureClientRecycleAllocator()
, mMonitor("GonkTextureClientRecycleHandler")
{
// Allocate Gralloc texture memory.
layers::GrallocTextureData* textureData =
layers::GrallocTextureData::Create(gfx::IntSize(aDef.nFrameWidth, aDef.nFrameHeight),
aDef.eColorFormat,
gfx::BackendType::NONE,
GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_READ_OFTEN,
layers::ImageBridgeChild::GetSingleton());
mGraphBuffer = textureData->GetGraphicBuffer();
MOZ_ASSERT(mGraphBuffer.get());
mTextureClient =
layers::TextureClient::CreateWithData(textureData,
layers::TextureFlags::DEALLOCATE_CLIENT | layers::TextureFlags::RECYCLE,
layers::ImageBridgeChild::GetSingleton());
MOZ_ASSERT(mTextureClient);
mPromise.SetMonitor(&mMonitor);
}
RefPtr<TextureClientRecyclePromise> WaitforRecycle()
{
MonitorAutoLock lock(mMonitor);
MOZ_ASSERT(!!mGraphBuffer.get());
mTextureClient->SetRecycleAllocator(this);
return mPromise.Ensure(__func__);
}
// DO NOT use smart pointer to receive TextureClient; otherwise it will
// distrupt the reference count.
layers::TextureClient* GetTextureClient()
{
return mTextureClient;
}
GraphicBuffer* GetGraphicBuffer()
{
MonitorAutoLock lock(mMonitor);
return mGraphBuffer.get();
}
// This function is called from layers thread.
void RecycleTextureClient(layers::TextureClient* aClient) override
{
MOZ_ASSERT(mTextureClient == aClient);
// Clearing the recycle allocator drops a reference, so make sure we stay alive
// for the duration of this function.
RefPtr<GonkTextureClientRecycleHandler> kungFuDeathGrip(this);
aClient->SetRecycleAllocator(nullptr);
{
MonitorAutoLock lock(mMonitor);
mPromise.ResolveIfExists(mTextureClient, __func__);
}
}
void Shutdown()
{
MonitorAutoLock lock(mMonitor);
mPromise.RejectIfExists(NS_ERROR_FAILURE, __func__);
// DO NOT clear TextureClient here.
// The ref count could be 1 and RecycleCallback will be called if we clear
// the ref count here. That breaks the whole mechanism. (RecycleCallback
// should be called from layers)
mGraphBuffer = nullptr;
}
private:
// Because TextureClient calls RecycleCallbackl when ref count is 1, so we
// should hold only one reference here and use raw pointer when out of this
// class.
RefPtr<layers::TextureClient> mTextureClient;
// It is protected by mMonitor.
sp<android::GraphicBuffer> mGraphBuffer;
// It is protected by mMonitor.
MozPromiseHolder<TextureClientRecyclePromise> mPromise;
Monitor mMonitor;
};
GonkBufferData::GonkBufferData(bool aLiveInLocal,
GonkOmxPlatformLayer* aGonkPlatformLayer)
: BufferData(nullptr)
, mId(0)
, mGonkPlatformLayer(aGonkPlatformLayer)
{
if (!aLiveInLocal) {
mLocalBuffer = new OMX_BUFFERHEADERTYPE;
PodZero(mLocalBuffer.get());
// aMemory is a IPC memory, it is safe to use it here.
mLocalBuffer->pBuffer = (OMX_U8*)aMemory->pointer();
mBuffer = mLocalBuffer.get();
mMirrorBuffer = new OMX_BUFFERHEADERTYPE;
PodZero(mMirrorBuffer.get());
mBuffer = mMirrorBuffer.get();
}
}
void
GonkBufferData::ReleaseBuffer()
{
if (mTextureClientRecycleHandler) {
mTextureClientRecycleHandler->Shutdown();
mTextureClientRecycleHandler = nullptr;
}
}
nsresult
GonkBufferData::InitSharedMemory(android::IMemory* aMemory)
{
MOZ_RELEASE_ASSERT(mMirrorBuffer.get());
// aMemory is a IPC memory, it is safe to use it here.
mBuffer->pBuffer = (OMX_U8*)aMemory->pointer();
mBuffer->nAllocLen = aMemory->size();
return NS_OK;
}
nsresult
GonkBufferData::InitLocalBuffer(IOMX::buffer_id aId)
{
MOZ_RELEASE_ASSERT(!mMirrorBuffer.get());
mBuffer = (OMX_BUFFERHEADERTYPE*)aId;
return NS_OK;
}
nsresult
GonkBufferData::InitGraphicBuffer(OMX_VIDEO_PORTDEFINITIONTYPE& aDef)
{
mTextureClientRecycleHandler = new GonkTextureClientRecycleHandler(aDef);
if (!mTextureClientRecycleHandler->GetGraphicBuffer()) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
already_AddRefed<MediaData>
GonkBufferData::GetPlatformMediaData()
{
if (mGonkPlatformLayer->GetTrackInfo()->GetAsAudioInfo()) {
// This is audio decoding.
return nullptr;
}
MOZ_RELEASE_ASSERT(mTextureClientRecycleHandler);
VideoInfo info;
info.mDisplay = mGonkPlatformLayer->GetTrackInfo()->GetAsVideoInfo()->mDisplay;
info.mImage = mGonkPlatformLayer->GetTrackInfo()->GetAsVideoInfo()->mImage;
RefPtr<VideoData> data = VideoData::Create(info,
mGonkPlatformLayer->GetImageContainer(),
0,
mBuffer->nTimeStamp,
1,
mTextureClientRecycleHandler->GetTextureClient(),
false,
0,
info.mImage);
LOG("GetMediaData: %p, disp width %d, height %d, pic width %d, height %d, time %ld",
this, info.mDisplay.width, info.mDisplay.height,
info.mImage.width, info.mImage.height, mBuffer->nTimeStamp);
// Get TextureClient Promise here to wait for resolved.
RefPtr<GonkBufferData> self(this);
mTextureClientRecycleHandler->WaitforRecycle()
->Then(mGonkPlatformLayer->GetTaskQueue(), __func__,
[self] () {
// Waiting for texture to be freed.
self->mTextureClientRecycleHandler->GetTextureClient()->WaitForBufferOwnership();
self->mPromise.ResolveIfExists(self, __func__);
},
[self] () {
OmxBufferFailureHolder failure(OMX_ErrorUndefined, self);
self->mPromise.RejectIfExists(failure, __func__);
});
return data.forget();
}
GonkOmxPlatformLayer::GonkOmxPlatformLayer(OmxDataDecoder* aDataDecoder,
OmxPromiseLayer* aPromiseLayer,
TaskQueue* aTaskQueue)
TaskQueue* aTaskQueue,
layers::ImageContainer* aImageContainer)
: mTaskQueue(aTaskQueue)
, mImageContainer(aImageContainer)
, mNode(0)
, mQuirks(0)
, mUsingHardwareCodec(false)
@ -169,8 +362,27 @@ GonkOmxPlatformLayer::AllocateOmxBuffer(OMX_DIRTYPE aType,
}
}
size_t t = def.nBufferCountActual * def.nBufferSize;
LOG("Buffer count %d, buffer size %d", def.nBufferCountActual, def.nBufferSize);
size_t t = 0;
// Configure video output GraphicBuffer for video decoding acceleration.
bool useGralloc = false;
if ((aType == OMX_DirOutput) &&
(mQuirks & OMXCodec::kRequiresAllocateBufferOnOutputPorts) &&
(def.eDomain == OMX_PortDomainVideo)) {
if (NS_FAILED(EnableOmxGraphicBufferPort(def))) {
return NS_ERROR_FAILURE;
}
LOG("Enable OMX GraphicBuffer port, number %d, width %d, height %d", def.nBufferCountActual,
def.format.video.nFrameWidth, def.format.video.nFrameHeight);
useGralloc = true;
t = 1024; // MemoryDealer doesn't like 0, it's just for MemoryDealer happy.
} else {
t = def.nBufferCountActual * def.nBufferSize;
LOG("Buffer count %d, buffer size %d", def.nBufferCountActual, def.nBufferSize);
}
bool liveinlocal = mOmx->livesLocally(mNode, getpid());
@ -178,24 +390,47 @@ GonkOmxPlatformLayer::AllocateOmxBuffer(OMX_DIRTYPE aType,
// lives in mediaserver.
mMemoryDealer[aType] = new MemoryDealer(t, "Gecko-OMX");
for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
sp<IMemory> mem = mMemoryDealer[aType]->allocate(def.nBufferSize);
MOZ_ASSERT(mem.get());
RefPtr<GonkBufferData> buffer;
IOMX::buffer_id bufferID;
status_t st;
nsresult rv;
if ((mQuirks & OMXCodec::kRequiresAllocateBufferOnInputPorts && aType == OMX_DirInput) ||
(mQuirks & OMXCodec::kRequiresAllocateBufferOnOutputPorts && aType == OMX_DirOutput)) {
st = mOmx->allocateBufferWithBackup(mNode, aType, mem, &bufferID);
buffer = new GonkBufferData(liveinlocal, this);
if (useGralloc) {
// Buffer is lived remotely. Use GraphicBuffer for decoded video frame display.
rv = buffer->InitGraphicBuffer(def.format.video);
NS_ENSURE_SUCCESS(rv, rv);
st = mOmx->useGraphicBuffer(mNode,
def.nPortIndex,
buffer->mTextureClientRecycleHandler->GetGraphicBuffer(),
&bufferID);
CHECK_ERR(st);
} else {
st = mOmx->useBuffer(mNode, aType, mem, &bufferID);
sp<IMemory> mem = mMemoryDealer[aType]->allocate(def.nBufferSize);
MOZ_ASSERT(mem.get());
if ((mQuirks & OMXCodec::kRequiresAllocateBufferOnInputPorts && aType == OMX_DirInput) ||
(mQuirks & OMXCodec::kRequiresAllocateBufferOnOutputPorts && aType == OMX_DirOutput)) {
// Buffer is lived remotely. We allocate a local OMX_BUFFERHEADERTYPE
// as the mirror of the remote OMX_BUFFERHEADERTYPE.
st = mOmx->allocateBufferWithBackup(mNode, aType, mem, &bufferID);
CHECK_ERR(st);
rv = buffer->InitSharedMemory(mem.get());
NS_ENSURE_SUCCESS(rv, rv);
} else {
// Buffer is lived locally, bufferID is the actually OMX_BUFFERHEADERTYPE
// pointer.
st = mOmx->useBuffer(mNode, aType, mem, &bufferID);
CHECK_ERR(st);
rv = buffer->InitLocalBuffer(bufferID);
NS_ENSURE_SUCCESS(rv, rv);
}
}
if (st != OK) {
return NS_ERROR_FAILURE;
}
rv = buffer->SetBufferId(bufferID);
NS_ENSURE_SUCCESS(rv, rv);
aBufferList->AppendElement(new GonkBufferData(bufferID, liveinlocal, mem.get()));
aBufferList->AppendElement(buffer);
}
return NS_OK;
@ -206,12 +441,15 @@ GonkOmxPlatformLayer::ReleaseOmxBuffer(OMX_DIRTYPE aType,
BUFFERLIST* aBufferList)
{
status_t st;
for (uint32_t i = 0; i < aBufferList->Length(); i++) {
IOMX::buffer_id id = (OMX_BUFFERHEADERTYPE*) aBufferList->ElementAt(i)->ID();
uint32_t len = aBufferList->Length();
for (uint32_t i = 0; i < len; i++) {
GonkBufferData* buffer = static_cast<GonkBufferData*>(aBufferList->ElementAt(i).get());
IOMX::buffer_id id = (OMX_BUFFERHEADERTYPE*) buffer->ID();
st = mOmx->freeBuffer(mNode, aType, id);
if (st != OK) {
return NS_ERROR_FAILURE;
}
buffer->ReleaseBuffer();
}
aBufferList->Clear();
mMemoryDealer[aType].clear();
@ -219,6 +457,17 @@ GonkOmxPlatformLayer::ReleaseOmxBuffer(OMX_DIRTYPE aType,
return NS_OK;
}
nsresult
GonkOmxPlatformLayer::EnableOmxGraphicBufferPort(OMX_PARAM_PORTDEFINITIONTYPE& aDef)
{
status_t st;
st = mOmx->enableGraphicBuffers(mNode, aDef.nPortIndex, OMX_TRUE);
CHECK_ERR(st);
return NS_OK;
}
OMX_ERRORTYPE
GonkOmxPlatformLayer::GetState(OMX_STATETYPE* aType)
{
@ -261,6 +510,7 @@ GonkOmxPlatformLayer::Shutdown()
OMX_ERRORTYPE
GonkOmxPlatformLayer::InitOmxToStateLoaded(const TrackInfo* aInfo)
{
mInfo = aInfo;
status_t err = mOmxClient.connect();
if (err != OK) {
return OMX_ErrorUndefined;
@ -270,11 +520,11 @@ GonkOmxPlatformLayer::InitOmxToStateLoaded(const TrackInfo* aInfo)
return OMX_ErrorUndefined;
}
// In Gonk, the software compoment name has prefix "OMX.google". It needs to
// In Gonk, the software component name has prefix "OMX.google". It needs to
// have a way to use hardware codec first.
android::Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs;
const char* swcomponent = nullptr;
OMXCodec::findMatchingCodecs(aInfo->mMimeType.Data(),
OMXCodec::findMatchingCodecs(mInfo->mMimeType.Data(),
0,
nullptr,
0,
@ -289,14 +539,15 @@ GonkOmxPlatformLayer::InitOmxToStateLoaded(const TrackInfo* aInfo)
mUsingHardwareCodec = true;
return OMX_ErrorNone;
}
LOG("failed to load component %s", componentName);
}
}
// TODO: in android ICS, the software codec is allocated in mediaserver by
// default, it may be necessay to allocate it in local process.
// default, it may be necessary to allocate it in local process.
//
// fallback to sw codec
if (LoadComponent(swcomponent)) {
if (swcomponent && LoadComponent(swcomponent)) {
return OMX_ErrorNone;
}
@ -318,7 +569,7 @@ GonkOmxPlatformLayer::EmptyThisBuffer(BufferData* aData)
OMX_ERRORTYPE
GonkOmxPlatformLayer::FillThisBuffer(BufferData* aData)
{
return (OMX_ERRORTYPE)mOmx->fillBuffer(mNode, (IOMX::buffer_id)aData->mBuffer);
return (OMX_ERRORTYPE)mOmx->fillBuffer(mNode, (IOMX::buffer_id)aData->ID());
}
OMX_ERRORTYPE
@ -329,6 +580,14 @@ GonkOmxPlatformLayer::SendCommand(OMX_COMMANDTYPE aCmd,
return (OMX_ERRORTYPE)mOmx->sendCommand(mNode, aCmd, aParam1);
}
template<class T> void
GonkOmxPlatformLayer::InitOmxParameter(T* aParam)
{
PodZero(aParam);
aParam->nSize = sizeof(T);
aParam->nVersion.s.nVersionMajor = 1;
}
bool
GonkOmxPlatformLayer::LoadComponent(const char* aName)
{
@ -342,12 +601,16 @@ GonkOmxPlatformLayer::LoadComponent(const char* aName)
return false;
}
template<class T> void
GonkOmxPlatformLayer::InitOmxParameter(T* aParam)
layers::ImageContainer*
GonkOmxPlatformLayer::GetImageContainer()
{
PodZero(aParam);
aParam->nSize = sizeof(T);
aParam->nVersion.s.nVersionMajor = 1;
return mImageContainer;
}
const TrackInfo*
GonkOmxPlatformLayer::GetTrackInfo()
{
return mInfo;
}
} // mozilla

View File

@ -13,62 +13,102 @@
#include "OMX_Component.h"
#include <utils/RefBase.h>
#include <media/stagefright/OMXClient.h>
#include "mozilla/layers/TextureClientRecycleAllocator.h"
namespace android {
class MemoryDealer;
class IMemory;
class MemoryDealer;
}
namespace mozilla {
class GonkOmxObserver;
class GonkOmxPlatformLayer;
class GonkTextureClientRecycleHandler;
/*
* Due to Android's omx node could live in local process (client) or remote
* process (mediaserver).
* process (mediaserver). And there are 3 kinds of buffer in Android OMX.
*
* When it is in local process, the IOMX::buffer_id is OMX_BUFFERHEADERTYPE
* 1.
* When buffer is in local process, the IOMX::buffer_id is OMX_BUFFERHEADERTYPE
* pointer actually, it is safe to use it directly.
*
* When it is in remote process, the OMX_BUFFERHEADERTYPE pointer is 'IN' the
* 2.
* When buffer is in remote process, the OMX_BUFFERHEADERTYPE pointer is 'IN' the
* remote process. It can't be used in local process, so here it allocates a
* local OMX_BUFFERHEADERTYPE.
* local OMX_BUFFERHEADERTYPE. The raw/decoded data is in the android shared
* memory, IMemory.
*
* 3.
* When buffer is in remote process for the display output port. It uses
* GraphicBuffer to accelerate the decoding and display.
*
*/
class GonkBufferData : public OmxPromiseLayer::BufferData {
protected:
virtual ~GonkBufferData() {}
public:
// aMemory is an IPC based memory which will be used as the pBuffer in
// mLocalBuffer.
GonkBufferData(android::IOMX::buffer_id aId, bool aLiveInLocal, android::IMemory* aMemory);
GonkBufferData(bool aLiveInLocal,
GonkOmxPlatformLayer* aLayer);
BufferID ID() override
{
return mId;
}
already_AddRefed<MediaData> GetPlatformMediaData() override;
bool IsLocalBuffer()
{
return !!mLocalBuffer.get();
return !!mMirrorBuffer.get();
}
void ReleaseBuffer();
nsresult SetBufferId(android::IOMX::buffer_id aId)
{
mId = aId;
return NS_OK;
}
// The mBuffer is in local process. And aId is actually the OMX_BUFFERHEADERTYPE
// pointer. It doesn't need a mirror buffer.
nsresult InitLocalBuffer(android::IOMX::buffer_id aId);
// aMemory is an IPC based memory which will be used as the pBuffer in
// mBuffer. And the mBuffer will be the mirror OMX_BUFFERHEADERTYPE
// of the one in the remote process.
nsresult InitSharedMemory(android::IMemory* aMemory);
// GraphicBuffer is for video decoding acceleration on output port.
// Then mBuffer is the mirror OMX_BUFFERHEADERTYPE of the one in the remote
// process.
nsresult InitGraphicBuffer(OMX_VIDEO_PORTDEFINITIONTYPE& aDef);
// Android OMX uses this id to pass the buffer between OMX component and
// client.
android::IOMX::buffer_id mId;
// mLocalBuffer are used only when the omx node is in mediaserver.
// mMirrorBuffer are used only when the omx node is in mediaserver.
// Due to IPC problem, the mId is the OMX_BUFFERHEADERTYPE address in mediaserver.
// It can't mapping to client process, so we need a local OMX_BUFFERHEADERTYPE
// here.
nsAutoPtr<OMX_BUFFERHEADERTYPE> mLocalBuffer;
// here to mirror the remote OMX_BUFFERHEADERTYPE in mediaserver.
nsAutoPtr<OMX_BUFFERHEADERTYPE> mMirrorBuffer;
// It creates GraphicBuffer and manages TextureClient.
RefPtr<GonkTextureClientRecycleHandler> mTextureClientRecycleHandler;
GonkOmxPlatformLayer* mGonkPlatformLayer;
};
class GonkOmxPlatformLayer : public OmxPlatformLayer {
public:
GonkOmxPlatformLayer(OmxDataDecoder* aDataDecoder,
OmxPromiseLayer* aPromiseLayer,
TaskQueue* aTaskQueue);
TaskQueue* aTaskQueue,
layers::ImageContainer* aImageContainer);
nsresult AllocateOmxBuffer(OMX_DIRTYPE aType, BUFFERLIST* aBufferList) override;
@ -97,17 +137,32 @@ public:
nsresult Shutdown() override;
// TODO:
// There is another InitOmxParameter in OmxDataDecoder. They need to combinate
// There is another InitOmxParameter in OmxDataDecoder. They need to combine
// to one function.
template<class T> void InitOmxParameter(T* aParam);
protected:
friend GonkBufferData;
layers::ImageContainer* GetImageContainer();
const TrackInfo* GetTrackInfo();
TaskQueue* GetTaskQueue()
{
return mTaskQueue;
}
nsresult EnableOmxGraphicBufferPort(OMX_PARAM_PORTDEFINITIONTYPE& aDef);
bool LoadComponent(const char* aName);
friend class GonkOmxObserver;
RefPtr<TaskQueue> mTaskQueue;
RefPtr<layers::ImageContainer> mImageContainer;
// OMX_DirInput is 0, OMX_DirOutput is 1.
android::sp<android::MemoryDealer> mMemoryDealer[2];
@ -122,6 +177,8 @@ protected:
uint32_t mQuirks;
bool mUsingHardwareCodec;
const TrackInfo* mInfo;
};
}

View File

@ -61,7 +61,8 @@ void GetPortIndex(nsTArray<uint32_t>& aPortIndex) {
}
OmxDataDecoder::OmxDataDecoder(const TrackInfo& aTrackInfo,
MediaDataDecoderCallback* aCallback)
MediaDataDecoderCallback* aCallback,
layers::ImageContainer* aImageContainer)
: mMonitor("OmxDataDecoder")
, mOmxTaskQueue(CreateMediaDecodeTaskQueue())
, mWatchManager(this, mOmxTaskQueue)
@ -75,7 +76,7 @@ OmxDataDecoder::OmxDataDecoder(const TrackInfo& aTrackInfo,
, mCallback(aCallback)
{
LOG("(%p)", this);
mOmxLayer = new OmxPromiseLayer(mOmxTaskQueue, this);
mOmxLayer = new OmxPromiseLayer(mOmxTaskQueue, this, aImageContainer);
nsCOMPtr<nsIRunnable> r =
NS_NewRunnableMethod(this, &OmxDataDecoder::InitializationTask);
@ -85,7 +86,6 @@ OmxDataDecoder::OmxDataDecoder(const TrackInfo& aTrackInfo,
OmxDataDecoder::~OmxDataDecoder()
{
LOG("(%p)", this);
mWatchManager.Shutdown();
}
void
@ -123,15 +123,11 @@ OmxDataDecoder::Init()
// Omx component.
InvokeAsync(mOmxTaskQueue, mOmxLayer.get(), __func__, &OmxPromiseLayer::Init,
mOmxTaskQueue, mTrackInfo.get())
->Then(mReaderTaskQueue, __func__,
->Then(mOmxTaskQueue, __func__,
[self] () {
// Omx state should be OMX_StateIdle.
nsCOMPtr<nsIRunnable> r =
NS_NewRunnableFunction([self] () {
self->mOmxState = self->mOmxLayer->GetState();
MOZ_ASSERT(self->mOmxState != OMX_StateIdle);
});
self->mOmxTaskQueue->Dispatch(r.forget());
self->mOmxState = self->mOmxLayer->GetState();
MOZ_ASSERT(self->mOmxState != OMX_StateIdle);
},
[self] () {
self->RejectInitPromise(DecoderFailureReason::INIT_ERROR, __func__);
@ -191,9 +187,6 @@ OmxDataDecoder::Drain()
{
LOG("(%p)", this);
// TODO: For video decoding, it needs to copy the latest video frame to yuv
// and output to layer again, because all video buffers will be released
// later.
nsCOMPtr<nsIRunnable> r =
NS_NewRunnableMethod(this, &OmxDataDecoder::SendEosBuffer);
mOmxTaskQueue->Dispatch(r.forget());
@ -233,7 +226,7 @@ OmxDataDecoder::DoAsyncShutdown()
{
LOG("(%p)", this);
MOZ_ASSERT(mOmxTaskQueue->IsCurrentThreadIn());
MOZ_ASSERT(mFlushing);
MOZ_ASSERT(!mFlushing);
mWatchManager.Unwatch(mOmxState, &OmxDataDecoder::OmxStateRunner);
mWatchManager.Unwatch(mPortSettingsChanged, &OmxDataDecoder::PortSettingsChanged);
@ -255,25 +248,17 @@ OmxDataDecoder::DoAsyncShutdown()
RefPtr<OmxCommandPromise> p =
self->mOmxLayer->SendCommand(OMX_CommandStateSet, OMX_StateLoaded, nullptr);
LOG("DoAsyncShutdown: collecting buffers...");
self->CollectBufferPromises(OMX_DirMax)
->Then(self->mOmxTaskQueue, __func__,
[self] () {
// According to spec 3.1.1.2.2.1:
// OMX_StateLoaded needs to be sent before releasing buffers.
// And state transition from OMX_StateIdle to OMX_StateLoaded
// is completed when all of the buffers have been removed
// from the component.
// Here the buffer promises are not resolved due to displaying
// in layer, it needs to wait before the layer returns the
// buffers.
LOG("DoAsyncShutdown: all buffers collected, releasing buffers...");
self->ReleaseBuffers(OMX_DirInput);
self->ReleaseBuffers(OMX_DirOutput);
},
[self] () {
self->mOmxLayer->Shutdown();
});
// According to spec 3.1.1.2.2.1:
// OMX_StateLoaded needs to be sent before releasing buffers.
// And state transition from OMX_StateIdle to OMX_StateLoaded
// is completed when all of the buffers have been removed
// from the component.
// Here the buffer promises are not resolved due to displaying
// in layer, it needs to wait before the layer returns the
// buffers.
LOG("DoAsyncShutdown: releasing buffers...");
self->ReleaseBuffers(OMX_DirInput);
self->ReleaseBuffers(OMX_DirOutput);
return p;
},
@ -285,6 +270,8 @@ OmxDataDecoder::DoAsyncShutdown()
[self] () {
LOG("DoAsyncShutdown: OMX_StateLoaded, it is safe to shutdown omx");
self->mOmxLayer->Shutdown();
self->mWatchManager.Shutdown();
self->mOmxLayer = nullptr;
MonitorAutoLock lock(self->mMonitor);
self->mShuttingDown = false;
@ -292,6 +279,8 @@ OmxDataDecoder::DoAsyncShutdown()
},
[self] () {
self->mOmxLayer->Shutdown();
self->mWatchManager.Shutdown();
self->mOmxLayer = nullptr;
MonitorAutoLock lock(self->mMonitor);
self->mShuttingDown = false;
@ -299,40 +288,11 @@ OmxDataDecoder::DoAsyncShutdown()
});
}
void
OmxDataDecoder::CheckIfInputExhausted()
{
MOZ_ASSERT(mOmxTaskQueue->IsCurrentThreadIn());
MOZ_ASSERT(!mCheckingInputExhausted);
mCheckingInputExhausted = false;
if (mMediaRawDatas.Length()) {
return;
}
// When all input buffers are not in omx component, it means all samples have
// been fed into OMX component.
for (auto buf : mInPortBuffers) {
if (buf->mStatus == BufferData::BufferStatus::OMX_COMPONENT) {
return;
}
}
// When all output buffers are held by component, it means client is waiting for output.
for (auto buf : mOutPortBuffers) {
if (buf->mStatus != BufferData::BufferStatus::OMX_COMPONENT) {
return;
}
}
LOG("Call InputExhausted()");
mCallback->InputExhausted();
}
void
OmxDataDecoder::OutputAudio(BufferData* aBufferData)
{
// TODO: it'd be better to move these code to BufferData::GetPlatformMediaData() or
// some kind of abstract layer.
MOZ_ASSERT(mOmxTaskQueue->IsCurrentThreadIn());
OMX_BUFFERHEADERTYPE* buf = aBufferData->mBuffer;
AudioInfo* info = mTrackInfo->GetAsAudioInfo();
@ -357,31 +317,80 @@ OmxDataDecoder::OutputAudio(BufferData* aBufferData)
aBufferData->mStatus = BufferData::BufferStatus::FREE;
}
void
OmxDataDecoder::OutputVideo(BufferData* aBufferData)
{
MOZ_ASSERT(mOmxTaskQueue->IsCurrentThreadIn());
RefPtr<MediaData> data = aBufferData->GetPlatformMediaData();
MOZ_RELEASE_ASSERT(data);
VideoData* video(data->As<VideoData>());
if (aBufferData->mRawData) {
video->mTime = aBufferData->mRawData->mTime;
video->mTimecode = aBufferData->mRawData->mTimecode;
video->mOffset = aBufferData->mRawData->mOffset;
video->mDuration = aBufferData->mRawData->mDuration;
video->mKeyframe = aBufferData->mRawData->mKeyframe;
}
aBufferData->mStatus = BufferData::BufferStatus::OMX_CLIENT_OUTPUT;
// TextureClient's recycle callback is called when reference count of
// TextureClient becomes 1. In most cases, the last reference count is held
// by ITextureClientRecycleAllocator.
// And then promise will be resolved in the callback.
// TODO:
// Because it is gonk specific behaviour, it needs to find a way to
// proper abstracting it.
MOZ_RELEASE_ASSERT(aBufferData->mPromise.IsEmpty());
RefPtr<OmxBufferPromise> p = aBufferData->mPromise.Ensure(__func__);
RefPtr<OmxDataDecoder> self = this;
RefPtr<BufferData> buffer = aBufferData;
p->Then(mOmxTaskQueue, __func__,
[self, buffer] () {
MOZ_RELEASE_ASSERT(buffer->mStatus == BufferData::BufferStatus::OMX_CLIENT_OUTPUT);
buffer->mStatus = BufferData::BufferStatus::FREE;
self->FillAndEmptyBuffers();
},
[buffer] () {
MOZ_RELEASE_ASSERT(buffer->mStatus == BufferData::BufferStatus::OMX_CLIENT_OUTPUT);
buffer->mStatus = BufferData::BufferStatus::FREE;
});
mCallback->Output(video);
}
void
OmxDataDecoder::FillBufferDone(BufferData* aData)
{
MOZ_ASSERT(!aData || aData->mStatus == BufferData::BufferStatus::OMX_CLIENT);
if (mTrackInfo->IsAudio()) {
OutputAudio(aData);
} else {
MOZ_ASSERT(0);
// Don't output sample when flush or shutting down, especially for video
// decoded frame. Because video decoded frame has a promise in BufferData
// waiting for layer to resolve it via recycle callback on Gonk, if other
// module doesn't send it to layer, it will cause a unresolved promise and
// waiting for resolve infinitely.
if (mFlushing || mShuttingDown) {
LOG("mFlush or mShuttingDown, drop data");
aData->mStatus = BufferData::BufferStatus::FREE;
return;
}
if (aData->mBuffer->nFlags & OMX_BUFFERFLAG_EOS) {
// Reach eos, it's an empty data so it doesn't need to output.
EndOfStream();
aData->mStatus = BufferData::BufferStatus::FREE;
} else {
FillAndEmptyBuffers();
// If the latest decoded sample's MediaRawData is also the latest input
// sample, it means there is no input data in queue and component, calling
// CheckIfInputExhausted().
if (aData->mRawData == mLatestInputRawData && !mCheckingInputExhausted) {
mCheckingInputExhausted = true;
nsCOMPtr<nsIRunnable> r =
NS_NewRunnableMethod(this, &OmxDataDecoder::CheckIfInputExhausted);
mOmxTaskQueue->Dispatch(r.forget());
if (mTrackInfo->IsAudio()) {
OutputAudio(aData);
} else if (mTrackInfo->IsVideo()) {
OutputVideo(aData);
} else {
MOZ_ASSERT(0);
}
FillAndEmptyBuffers();
}
}
@ -399,6 +408,30 @@ OmxDataDecoder::EmptyBufferDone(BufferData* aData)
// Nothing to do when status of input buffer is OMX_CLIENT.
aData->mStatus = BufferData::BufferStatus::FREE;
FillAndEmptyBuffers();
// There is no way to know if component gets enough raw samples to generate
// output, especially for video decoding. So here it needs to request raw
// samples aggressively.
if (!mCheckingInputExhausted && !mMediaRawDatas.Length()) {
mCheckingInputExhausted = true;
RefPtr<OmxDataDecoder> self = this;
nsCOMPtr<nsIRunnable> r =
NS_NewRunnableFunction([self] () {
MOZ_ASSERT(self->mOmxTaskQueue->IsCurrentThreadIn());
self->mCheckingInputExhausted = false;
if (self->mMediaRawDatas.Length()) {
return;
}
LOG("Call InputExhausted()");
self->mCallback->InputExhausted();
});
mOmxTaskQueue->Dispatch(r.forget());
}
}
void
@ -427,7 +460,7 @@ OmxDataDecoder::FillAndEmptyBuffers()
// Trigger input port.
while (!!mMediaRawDatas.Length()) {
// input buffer must be usedi by component if there is data available.
// input buffer must be used by component if there is data available.
RefPtr<BufferData> inbuf = FindAvailableBuffer(OMX_DirInput);
if (!inbuf) {
LOG("no input buffer!");
@ -435,10 +468,12 @@ OmxDataDecoder::FillAndEmptyBuffers()
}
RefPtr<MediaRawData> data = mMediaRawDatas[0];
// Buffer size should large enough for raw data.
MOZ_RELEASE_ASSERT(inbuf->mBuffer->nAllocLen >= data->Size());
memcpy(inbuf->mBuffer->pBuffer, data->Data(), data->Size());
inbuf->mBuffer->nFilledLen = data->Size();
inbuf->mBuffer->nOffset = 0;
// TODO: the frame size could larger than buffer size in video case.
inbuf->mBuffer->nFlags = inbuf->mBuffer->nAllocLen > data->Size() ?
OMX_BUFFERFLAG_ENDOFFRAME : 0;
inbuf->mBuffer->nTimeStamp = data->mTime;
@ -454,7 +489,6 @@ OmxDataDecoder::FillAndEmptyBuffers()
mOmxLayer->EmptyBuffer(inbuf)->Then(mOmxTaskQueue, __func__, this,
&OmxDataDecoder::EmptyBufferDone,
&OmxDataDecoder::EmptyBufferFailure);
mLatestInputRawData.swap(mMediaRawDatas[0]);
mMediaRawDatas.RemoveElementAt(0);
}
@ -551,9 +585,11 @@ OmxDataDecoder::OmxStateRunner()
// Config codec parameters by minetype.
if (mTrackInfo->IsAudio()) {
ConfigAudioCodec();
} else if (mTrackInfo->IsVideo()) {
ConfigVideoCodec();
}
// Send OpenMax state commane to OMX_StateIdle.
// Send OpenMax state command to OMX_StateIdle.
RefPtr<OmxDataDecoder> self = this;
mOmxLayer->SendCommand(OMX_CommandStateSet, OMX_StateIdle, nullptr)
->Then(mOmxTaskQueue, __func__,
@ -589,7 +625,7 @@ OmxDataDecoder::OmxStateRunner()
self->RejectInitPromise(DecoderFailureReason::INIT_ERROR, __func__);
});
} else if (mOmxState == OMX_StateExecuting) {
// Config codec once it gets OMX_StateExecuting state.
// Configure codec once it gets OMX_StateExecuting state.
FillCodecConfigDataToOmx();
} else {
MOZ_ASSERT(0);
@ -618,32 +654,87 @@ OmxDataDecoder::ConfigAudioCodec()
}
}
void
OmxDataDecoder::ConfigVideoCodec()
{
OMX_ERRORTYPE err;
const VideoInfo* videoInfo = mTrackInfo->GetAsVideoInfo();
OMX_PARAM_PORTDEFINITIONTYPE def;
// Set up in/out port definition.
nsTArray<uint32_t> ports;
GetPortIndex(ports);
for (auto idx : ports) {
InitOmxParameter(&def);
def.nPortIndex = idx;
err = mOmxLayer->GetParameter(OMX_IndexParamPortDefinition,
&def,
sizeof(def));
if (err != OMX_ErrorNone) {
return;
}
def.format.video.nFrameWidth = videoInfo->mDisplay.width;
def.format.video.nFrameHeight = videoInfo->mDisplay.height;
def.format.video.nStride = videoInfo->mImage.width;
def.format.video.nSliceHeight = videoInfo->mImage.height;
// TODO: it needs to add other formats like webm, mp4, h263... etc.
OMX_VIDEO_CODINGTYPE codetype;
if (videoInfo->mMimeType.EqualsLiteral("video/avc")) {
codetype = OMX_VIDEO_CodingAVC;
}
if (def.eDir == OMX_DirInput) {
def.format.video.eCompressionFormat = codetype;
def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
} else {
def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
}
err = mOmxLayer->SetParameter(OMX_IndexParamPortDefinition,
&def,
sizeof(def));
if (err != OMX_ErrorNone) {
return;
}
}
}
void
OmxDataDecoder::FillCodecConfigDataToOmx()
{
// Codec config data should be the first sample running on Omx TaskQueue.
// Codec configure data should be the first sample running on Omx TaskQueue.
MOZ_ASSERT(mOmxTaskQueue->IsCurrentThreadIn());
MOZ_ASSERT(!mMediaRawDatas.Length());
MOZ_ASSERT(mOmxState == OMX_StateIdle || mOmxState == OMX_StateExecuting);
RefPtr<BufferData> inbuf = FindAvailableBuffer(OMX_DirInput);
RefPtr<MediaByteBuffer> csc;
if (mTrackInfo->IsAudio()) {
AudioInfo* audio_info = mTrackInfo->GetAsAudioInfo();
memcpy(inbuf->mBuffer->pBuffer,
audio_info->mCodecSpecificConfig->Elements(),
audio_info->mCodecSpecificConfig->Length());
inbuf->mBuffer->nFilledLen = audio_info->mCodecSpecificConfig->Length();
inbuf->mBuffer->nOffset = 0;
inbuf->mBuffer->nFlags = (OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG);
} else {
MOZ_ASSERT(0);
csc = mTrackInfo->GetAsAudioInfo()->mCodecSpecificConfig;
} else if (mTrackInfo->IsVideo()) {
csc = mTrackInfo->GetAsVideoInfo()->mCodecSpecificConfig;
}
LOG("Feed codec configure data to OMX component");
mOmxLayer->EmptyBuffer(inbuf)->Then(mOmxTaskQueue, __func__, this,
&OmxDataDecoder::EmptyBufferDone,
&OmxDataDecoder::EmptyBufferFailure);
MOZ_RELEASE_ASSERT(csc);
// Some codecs like h264, its codec specific data is at the first packet, not in container.
if (csc->Length()) {
memcpy(inbuf->mBuffer->pBuffer,
csc->Elements(),
csc->Length());
inbuf->mBuffer->nFilledLen = csc->Length();
inbuf->mBuffer->nOffset = 0;
inbuf->mBuffer->nFlags = (OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG);
LOG("Feed codec configure data to OMX component");
mOmxLayer->EmptyBuffer(inbuf)->Then(mOmxTaskQueue, __func__, this,
&OmxDataDecoder::EmptyBufferDone,
&OmxDataDecoder::EmptyBufferFailure);
}
}
bool

View File

@ -27,7 +27,7 @@ typedef OmxPromiseLayer::BUFFERLIST BUFFERLIST;
* 2. Keeping the buffers between client and component.
* 3. Manage the OMX state.
*
* From the definiton in OpenMax spec. "2.2.1", there are 3 major roles in
* From the definition in OpenMax spec. "2.2.1", there are 3 major roles in
* OpenMax IL.
*
* IL client:
@ -54,7 +54,8 @@ protected:
public:
OmxDataDecoder(const TrackInfo& aTrackInfo,
MediaDataDecoderCallback* aCallback);
MediaDataDecoderCallback* aCallback,
layers::ImageContainer* aImageContainer);
RefPtr<InitPromise> Init() override;
@ -90,10 +91,11 @@ protected:
void NotifyError(OMX_ERRORTYPE aError, const char* aLine);
// Config audio codec.
// Configure audio codec.
// Some codec may just ignore this and rely on codec specific data in
// FillCodecConfigDataToOmx().
void ConfigAudioCodec();
void ConfigVideoCodec();
// Sending codec specific data to OMX component. OMX component could send a
// OMX_EventPortSettingsChanged back to client. And then client needs to
@ -110,10 +112,7 @@ protected:
void OutputAudio(BufferData* aBufferData);
// Notify InputExhausted when:
// 1. all input buffers are not held by component.
// 2. all output buffers are waiting for filling complete.
void CheckIfInputExhausted();
void OutputVideo(BufferData* aBufferData);
// Buffer can be released if its status is not OMX_COMPONENT or
// OMX_CLIENT_OUTPUT.
@ -182,9 +181,6 @@ protected:
// It is access in Omx TaskQueue.
nsTArray<RefPtr<MediaRawData>> mMediaRawDatas;
// It is access in Omx TaskQueue. The latest input MediaRawData.
RefPtr<MediaRawData> mLatestInputRawData;
BUFFERLIST mInPortBuffers;
BUFFERLIST mOutPortBuffers;

View File

@ -16,7 +16,8 @@ OmxDecoderModule::CreateVideoDecoder(const VideoInfo& aConfig,
FlushableTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback)
{
return nullptr;
RefPtr<OmxDataDecoder> decoder = new OmxDataDecoder(aConfig, aCallback, aImageContainer);
return decoder.forget();
}
already_AddRefed<MediaDataDecoder>
@ -24,7 +25,7 @@ OmxDecoderModule::CreateAudioDecoder(const AudioInfo& aConfig,
FlushableTaskQueue* aAudioTaskQueue,
MediaDataDecoderCallback* aCallback)
{
RefPtr<OmxDataDecoder> decoder = new OmxDataDecoder(aConfig, aCallback);
RefPtr<OmxDataDecoder> decoder = new OmxDataDecoder(aConfig, aCallback, nullptr);
return decoder.forget();
}
@ -43,7 +44,8 @@ OmxDecoderModule::DecoderNeedsConversion(const TrackInfo& aConfig) const
bool
OmxDecoderModule::SupportsMimeType(const nsACString& aMimeType) const
{
return aMimeType.EqualsLiteral("audio/mp4a-latm");
return aMimeType.EqualsLiteral("audio/mp4a-latm") ||
aMimeType.EqualsLiteral("video/avc");
}
}

View File

@ -16,6 +16,7 @@
namespace mozilla {
class TrackInfo;
class VideoData;
/*
* This class the the abstract layer of the platform OpenMax IL implementation.

View File

@ -8,7 +8,7 @@
#include "OmxPlatformLayer.h"
#include "OmxDataDecoder.h"
#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION < 21
#if defined(MOZ_WIDGET_GONK) && (ANDROID_VERSION == 20 || ANDROID_VERSION == 19)
#include "GonkOmxPlatformLayer.h"
#endif
@ -24,11 +24,13 @@ namespace mozilla {
extern void GetPortIndex(nsTArray<uint32_t>& aPortIndex);
OmxPromiseLayer::OmxPromiseLayer(TaskQueue* aTaskQueue, OmxDataDecoder* aDataDecoder)
OmxPromiseLayer::OmxPromiseLayer(TaskQueue* aTaskQueue,
OmxDataDecoder* aDataDecoder,
layers::ImageContainer* aImageContainer)
: mTaskQueue(aTaskQueue)
{
#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION < 21
mPlatformLayer = new GonkOmxPlatformLayer(aDataDecoder, this, aTaskQueue);
#if defined(MOZ_WIDGET_GONK) && (ANDROID_VERSION == 20 || ANDROID_VERSION == 19)
mPlatformLayer = new GonkOmxPlatformLayer(aDataDecoder, this, aTaskQueue, aImageContainer);
#endif
MOZ_ASSERT(!!mPlatformLayer);
}
@ -182,16 +184,7 @@ void
OmxPromiseLayer::EmptyFillBufferDone(OMX_DIRTYPE aType, BufferData::BufferID aID)
{
RefPtr<BufferData> holder = FindAndRemoveBufferHolder(aType, aID);
MOZ_ASSERT(!!holder);
LOG("EmptyFillBufferDone: type %d, buffer %p", aType, holder->mBuffer);
if (holder) {
if (aType == OMX_DirOutput) {
holder->mRawData = nullptr;
holder->mRawData = FindAndRemoveRawData(holder->mBuffer->nTimeStamp);
}
holder->mStatus = BufferData::BufferStatus::OMX_CLIENT;
holder->mPromise.Resolve(holder, __func__);
}
EmptyFillBufferDone(aType, holder);
}
RefPtr<OmxPromiseLayer::OmxCommandPromise>
@ -203,7 +196,7 @@ OmxPromiseLayer::SendCommand(OMX_COMMANDTYPE aCmd, OMX_U32 aParam1, OMX_PTR aCmd
// Some coomponents don't send event with OMX_ALL, they send flush complete
// event with input port and another event for output port.
// In prupose of better compatibility, we inteprete the OMX_ALL to OMX_DirInput
// In prupose of better compatibility, we interpret the OMX_ALL to OMX_DirInput
// and OMX_DirOutput flush separately.
OMX_DIRTYPE types[] = {OMX_DIRTYPE::OMX_DirInput, OMX_DIRTYPE::OMX_DirOutput};
for(const auto type : types) {
@ -217,7 +210,7 @@ OmxPromiseLayer::SendCommand(OMX_COMMANDTYPE aCmd, OMX_U32 aParam1, OMX_PTR aCmd
}
}
// Don't overlay more than one fush command, some components can't overlay flush commands.
// Don't overlay more than one flush command, some components can't overlay flush commands.
// So here we send another flush after receiving the previous flush completed event.
if (mFlushCommands.Length()) {
OMX_ERRORTYPE err =
@ -306,6 +299,8 @@ OmxPromiseLayer::Event(OMX_EVENTTYPE aEvent, OMX_U32 aData1, OMX_U32 aData2)
} else if (cmd == OMX_CommandPortEnable) {
OmxCommandFailureHolder failure(OMX_ErrorUndefined, OMX_CommandPortEnable);
mPortEnablePromise.Reject(failure, __func__);
} else {
return false;
}
break;
}

View File

@ -14,9 +14,10 @@
namespace mozilla {
class TrackInfo;
class OmxPlatformLayer;
class OmxDataDecoder;
class TrackInfo;
class MediaData;
/* This class acts as a middle layer between OmxDataDecoder and the underlying
* OmxPlatformLayer.
@ -38,7 +39,9 @@ protected:
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(OmxPromiseLayer)
OmxPromiseLayer(TaskQueue* aTaskQueue, OmxDataDecoder* aDataDecoder);
OmxPromiseLayer(TaskQueue* aTaskQueue,
OmxDataDecoder* aDataDecoder,
layers::ImageContainer* aImageContainer);
class BufferData;
@ -125,6 +128,15 @@ public:
return mBuffer;
}
// Return the platform dependent MediaData().
// For example, it returns the MediaData with Gralloc texture.
// If it returns nullptr, then caller uses the normal way to
// create MediaData().
virtual already_AddRefed<MediaData> GetPlatformMediaData()
{
return nullptr;
}
// The buffer could be used by several objects. And only one object owns the
// buffer the same time.
// FREE:
@ -162,7 +174,7 @@ public:
// records of the original data from demuxer, like duration, stream offset...etc.
RefPtr<MediaRawData> mRawData;
// Because OMX buffer works acorssing threads, so it uses a promise
// Because OMX buffer works across threads, so it uses a promise
// for each buffer when the buffer is used by Omx component.
MozPromiseHolder<OmxBufferPromise> mPromise;
BufferStatus mStatus;
@ -179,7 +191,7 @@ public:
already_AddRefed<BufferData>
FindAndRemoveBufferHolder(OMX_DIRTYPE aType, BufferData::BufferID aId);
// Return truen if event is handled.
// Return true if event is handled.
bool Event(OMX_EVENTTYPE aEvent, OMX_U32 aData1, OMX_U32 aData2);
protected:
@ -208,9 +220,9 @@ protected:
private:
// Elements are added to holders when FillBuffer() or FillBuffer(). And
// removing elelments when the promise is resolved. Buffers in these lists
// removing element when the promise is resolved. Buffers in these lists
// should NOT be used by other component; for example, output it to audio
// output. These list should be empty when engine is about to shutdown.
// output. These lists should be empty when engine is about to shutdown.
//
// Note:
// There bufferlist should not be used by other class directly.

View File

@ -660,7 +660,7 @@ TextureClient::TextureClientRecycleCallback(TextureClient* aClient, void* aClosu
}
void
TextureClient::SetRecycleAllocator(TextureClientRecycleAllocator* aAllocator)
TextureClient::SetRecycleAllocator(ITextureClientRecycleAllocator* aAllocator)
{
mRecycleAllocator = aAllocator;
if (aAllocator) {

View File

@ -57,7 +57,7 @@ class TextureData;
struct RawTextureBuffer;
class RawYCbCrTextureBuffer;
class TextureClient;
class TextureClientRecycleAllocator;
class ITextureClientRecycleAllocator;
#ifdef GFX_DEBUG_TRACK_CLIENTS_IN_POOL
class TextureClientPool;
#endif
@ -569,8 +569,8 @@ public:
ISurfaceAllocator* GetAllocator() { return mAllocator; }
TextureClientRecycleAllocator* GetRecycleAllocator() { return mRecycleAllocator; }
void SetRecycleAllocator(TextureClientRecycleAllocator* aAllocator);
ITextureClientRecycleAllocator* GetRecycleAllocator() { return mRecycleAllocator; }
void SetRecycleAllocator(ITextureClientRecycleAllocator* aAllocator);
/// If you add new code that uses this method, you are probably doing something wrong.
TextureData* GetInternalData() { return mData; }
@ -604,7 +604,7 @@ protected:
RefPtr<ISurfaceAllocator> mAllocator;
RefPtr<TextureChild> mActor;
RefPtr<TextureClientRecycleAllocator> mRecycleAllocator;
RefPtr<ITextureClientRecycleAllocator> mRecycleAllocator;
RefPtr<AsyncTransactionWaiter> mRemoveFromCompositableWaiter;
TextureData* mData;

View File

@ -18,6 +18,19 @@ namespace layers {
class TextureClientHolder;
class ITextureClientRecycleAllocator
{
protected:
virtual ~ITextureClientRecycleAllocator() {}
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(ITextureClientRecycleAllocator)
protected:
friend class TextureClient;
virtual void RecycleTextureClient(TextureClient* aClient) = 0;
};
class ITextureClientAllocationHelper
{
public:
@ -52,14 +65,12 @@ public:
* By default this uses TextureClient::CreateForDrawing to allocate new texture
* clients.
*/
class TextureClientRecycleAllocator
class TextureClientRecycleAllocator : public ITextureClientRecycleAllocator
{
protected:
virtual ~TextureClientRecycleAllocator();
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TextureClientRecycleAllocator)
explicit TextureClientRecycleAllocator(CompositableForwarder* aAllocator);
void SetMaxPoolSize(uint32_t aMax);
@ -85,10 +96,8 @@ protected:
RefPtr<CompositableForwarder> mSurfaceAllocator;
private:
friend class TextureClient;
friend class DefaultTextureClientAllocationHelper;
void RecycleTextureClient(TextureClient* aClient);
void RecycleTextureClient(TextureClient* aClient) override;
static const uint32_t kMaxPooledSized = 2;
uint32_t mMaxPooledSize;