Bug 1146086: use promise to Init() in PlatformDecoderModule. r=jya,r=cpearce

This commit is contained in:
Alfredo Yang 2015-08-11 13:50:07 +10:00 committed by Jean-Yves Avenard
parent 47b63e9cc6
commit fa4f511520
54 changed files with 490 additions and 224 deletions

View File

@ -89,6 +89,7 @@ MediaFormatReader::Shutdown()
MOZ_ASSERT(OnTaskQueue());
mDemuxerInitRequest.DisconnectIfExists();
mDecodersInitRequest.DisconnectIfExists();
mMetadataPromise.RejectIfExists(ReadMetadataFailureReason::METADATA_ERROR, __func__);
mSeekPromise.RejectIfExists(NS_ERROR_FAILURE, __func__);
mSkipRequest.DisconnectIfExists();
@ -270,19 +271,19 @@ MediaFormatReader::AsyncReadMetadata()
{
MOZ_ASSERT(OnTaskQueue());
nsRefPtr<MetadataPromise> p = mMetadataPromise.Ensure(__func__);
if (mInitDone) {
// We are returning from dormant.
if (!EnsureDecodersSetup()) {
return MetadataPromise::CreateAndReject(ReadMetadataFailureReason::METADATA_ERROR, __func__);
if (!EnsureDecodersCreated()) {
mMetadataPromise.Reject(ReadMetadataFailureReason::METADATA_ERROR, __func__);
return p;
}
nsRefPtr<MetadataHolder> metadata = new MetadataHolder();
metadata->mInfo = mInfo;
metadata->mTags = nullptr;
return MetadataPromise::CreateAndResolve(metadata, __func__);
MOZ_ASSERT(!mDecodersInitRequest.Exists());
EnsureDecodersInitialized();
return p;
}
nsRefPtr<MetadataPromise> p = mMetadataPromise.Ensure(__func__);
mDemuxerInitRequest.Begin(mDemuxer->Init()
->Then(OwnerThread(), __func__, this,
&MediaFormatReader::OnDemuxerInitDone,
@ -372,16 +373,24 @@ MediaFormatReader::OnDemuxerInitDone(nsresult)
MOZ_ASSERT(mAudioTrackDemuxer);
}
mInitDone = true;
if (!IsWaitingOnCDMResource() && !EnsureDecodersSetup()) {
mMetadataPromise.Reject(ReadMetadataFailureReason::METADATA_ERROR, __func__);
} else {
if (IsWaitingOnCDMResource()) {
// Decoder can't be allocated before CDM resource is ready, so resolving the
// mMetadataPromise here to wait for CDM on MDSM.
mInitDone = true;
nsRefPtr<MetadataHolder> metadata = new MetadataHolder();
metadata->mInfo = mInfo;
metadata->mTags = nullptr;
mMetadataPromise.Resolve(metadata, __func__);
return;
}
if (!EnsureDecodersCreated()) {
mMetadataPromise.Reject(ReadMetadataFailureReason::METADATA_ERROR, __func__);
return;
}
MOZ_ASSERT(!mDecodersInitRequest.Exists());
EnsureDecodersInitialized();
}
void
@ -396,10 +405,9 @@ MediaFormatReader::OnDemuxerInitFailed(DemuxerFailureReason aFailure)
}
bool
MediaFormatReader::EnsureDecodersSetup()
MediaFormatReader::EnsureDecodersCreated()
{
MOZ_ASSERT(OnTaskQueue());
MOZ_ASSERT(mInitDone);
if (!mPlatform) {
if (IsEncrypted()) {
@ -439,6 +447,7 @@ MediaFormatReader::EnsureDecodersSetup()
NS_ENSURE_TRUE(IsSupportedAudioMimeType(mInfo.mAudio.mMimeType),
false);
mAudio.mDecoderInitialized = false;
mAudio.mDecoder =
mPlatform->CreateDecoder(mAudio.mInfo ?
*mAudio.mInfo->GetAsAudioInfo() :
@ -446,14 +455,13 @@ MediaFormatReader::EnsureDecodersSetup()
mAudio.mTaskQueue,
mAudio.mCallback);
NS_ENSURE_TRUE(mAudio.mDecoder != nullptr, false);
nsresult rv = mAudio.mDecoder->Init();
NS_ENSURE_SUCCESS(rv, false);
}
if (HasVideo() && !mVideo.mDecoder) {
NS_ENSURE_TRUE(IsSupportedVideoMimeType(mInfo.mVideo.mMimeType),
false);
mVideo.mDecoderInitialized = false;
if (mSharedDecoderManager &&
mPlatform->SupportsSharedDecoders(mInfo.mVideo)) {
mVideo.mDecoder =
@ -476,13 +484,86 @@ MediaFormatReader::EnsureDecodersSetup()
mDecoder->GetImageContainer());
}
NS_ENSURE_TRUE(mVideo.mDecoder != nullptr, false);
nsresult rv = mVideo.mDecoder->Init();
NS_ENSURE_SUCCESS(rv, false);
}
return true;
}
bool
MediaFormatReader::EnsureDecodersInitialized()
{
MOZ_ASSERT(OnTaskQueue());
MOZ_ASSERT(mVideo.mDecoder || mAudio.mDecoder);
// DecodeDemuxedSample() could call this function before mDecodersInitRequest
// is completed. And it is ok to return false here because DecodeDemuxedSample
// will call ScheduleUpdate() again.
// It also avoids calling decoder->Init() multiple times.
if (mDecodersInitRequest.Exists()) {
return false;
}
nsTArray<nsRefPtr<MediaDataDecoder::InitPromise>> promises;
if (mVideo.mDecoder && !mVideo.mDecoderInitialized) {
promises.AppendElement(mVideo.mDecoder->Init());
}
if (mAudio.mDecoder && !mAudio.mDecoderInitialized) {
promises.AppendElement(mAudio.mDecoder->Init());
}
if (promises.Length()) {
mDecodersInitRequest.Begin(MediaDataDecoder::InitPromise::All(OwnerThread(), promises)
->Then(OwnerThread(), __func__, this,
&MediaFormatReader::OnDecoderInitDone,
&MediaFormatReader::OnDecoderInitFailed));
}
LOG("Init decoders: audio: %p, audio init: %d, video: %p, video init: %d",
mAudio.mDecoder.get(), mAudio.mDecoderInitialized,
mVideo.mDecoder.get(), mVideo.mDecoderInitialized);
// Return false if any decoder is under initialization.
return !promises.Length();
}
void
MediaFormatReader::OnDecoderInitDone(const nsTArray<TrackType>& aTrackTypes)
{
MOZ_ASSERT(OnTaskQueue());
mDecodersInitRequest.Complete();
for (const auto& track : aTrackTypes) {
auto& decoder = GetDecoderData(track);
decoder.mDecoderInitialized = true;
ScheduleUpdate(track);
}
if (!mMetadataPromise.IsEmpty()) {
mInitDone = true;
nsRefPtr<MetadataHolder> metadata = new MetadataHolder();
metadata->mInfo = mInfo;
metadata->mTags = nullptr;
mMetadataPromise.Resolve(metadata, __func__);
}
}
void
MediaFormatReader::OnDecoderInitFailed(MediaDataDecoder::DecoderFailureReason aReason)
{
MOZ_ASSERT(OnTaskQueue());
mDecodersInitRequest.Complete();
NS_WARNING("Failed to init decoder");
mMetadataPromise.RejectIfExists(ReadMetadataFailureReason::METADATA_ERROR, __func__);
NotifyError(TrackType::kAudioTrack);
NotifyError(TrackType::kVideoTrack);
}
void
MediaFormatReader::ReadUpdatedMetadata(MediaInfo* aInfo)
{
@ -509,7 +590,7 @@ MediaFormatReader::DisableHardwareAcceleration()
Flush(TrackInfo::kVideoTrack);
mVideo.mDecoder->Shutdown();
mVideo.mDecoder = nullptr;
if (!EnsureDecodersSetup()) {
if (!EnsureDecodersCreated()) {
LOG("Unable to re-create decoder, aborting");
NotifyError(TrackInfo::kVideoTrack);
return;
@ -559,13 +640,6 @@ MediaFormatReader::RequestVideoData(bool aSkipToNextKeyframe,
return VideoDataPromise::CreateAndReject(CANCELED, __func__);
}
if (!EnsureDecodersSetup()) {
NS_WARNING("Error constructing decoders");
return VideoDataPromise::CreateAndReject(DECODE_ERROR, __func__);
}
MOZ_ASSERT(HasVideo() && mPlatform && mVideo.mDecoder);
mVideo.mForceDecodeAhead = aForceDecodeAhead;
media::TimeUnit timeThreshold{media::TimeUnit::FromMicroseconds(aTimeThreshold)};
if (ShouldSkip(aSkipToNextKeyframe, timeThreshold)) {
@ -657,13 +731,9 @@ MediaFormatReader::RequestAudioData()
return AudioDataPromise::CreateAndReject(CANCELED, __func__);
}
if (!EnsureDecodersSetup()) {
NS_WARNING("Error constructing decoders");
return AudioDataPromise::CreateAndReject(DECODE_ERROR, __func__);
}
nsRefPtr<AudioDataPromise> p = mAudio.mPromise.Ensure(__func__);
ScheduleUpdate(TrackInfo::kAudioTrack);
ScheduleUpdate(TrackType::kAudioTrack);
return p;
}
@ -877,6 +947,17 @@ MediaFormatReader::DecodeDemuxedSamples(TrackType aTrack,
return;
}
if (!EnsureDecodersCreated()) {
NS_WARNING("Error constructing decoders");
NotifyError(aTrack);
return;
}
if (!EnsureDecodersInitialized()) {
ScheduleUpdate(aTrack);
return;
}
LOGV("Giving %s input to decoder", TrackTypeToStr(aTrack));
// Decode all our demuxed frames.
@ -912,15 +993,9 @@ MediaFormatReader::DecodeDemuxedSamples(TrackType aTrack,
Flush(aTrack);
decoder.mDecoder->Shutdown();
decoder.mDecoder = nullptr;
if (!EnsureDecodersSetup()) {
LOG("Unable to re-create decoder, aborting");
NotifyError(aTrack);
return;
}
LOGV("%s decoder:%p created for sid:%u",
TrackTypeToStr(aTrack), decoder.mDecoder.get(), info->GetID());
if (sample->mKeyframe) {
decoder.mQueuedSamples.MoveElementsFrom(samples);
ScheduleUpdate(aTrack);
} else {
MOZ_ASSERT(decoder.mTimeThreshold.isNothing());
LOG("Stream change occurred on a non-keyframe. Seeking to:%lld",
@ -953,8 +1028,8 @@ MediaFormatReader::DecodeDemuxedSamples(TrackType aTrack,
}
decoder.mTimeThreshold.reset();
}));
return;
}
return;
}
LOGV("Input:%lld (dts:%lld kf:%d)",

View File

@ -105,7 +105,10 @@ private:
void NotifyDemuxer(uint32_t aLength, int64_t aOffset);
void ReturnOutput(MediaData* aData, TrackType aTrack);
bool EnsureDecodersSetup();
bool EnsureDecodersCreated();
// It returns true when all decoders are initialized. False when there is pending
// initialization.
bool EnsureDecodersInitialized();
// Enqueues a task to call Update(aTrack) on the decoder task queue.
// Lock for corresponding track must be held.
@ -194,6 +197,7 @@ private:
, mWaitingForData(false)
, mReceivedNewData(false)
, mDiscontinuity(true)
, mDecoderInitialized(false)
, mOutputRequested(false)
, mInputExhausted(false)
, mError(false)
@ -242,6 +246,8 @@ private:
}
// MediaDataDecoder handler's variables.
// False when decoder is created. True when decoder Init() promise is resolved.
bool mDecoderInitialized;
bool mOutputRequested;
bool mInputExhausted;
bool mError;
@ -337,6 +343,9 @@ private:
DecoderData& GetDecoderData(TrackType aTrack);
void OnDecoderInitDone(const nsTArray<TrackType>& aTrackTypes);
void OnDecoderInitFailed(MediaDataDecoder::DecoderFailureReason aReason);
// Demuxer objects.
void OnDemuxerInitDone(nsresult);
void OnDemuxerInitFailed(DemuxerFailureReason aFailure);
@ -411,6 +420,9 @@ private:
Maybe<media::TimeUnit> mPendingSeekTime;
MozPromiseHolder<SeekPromise> mSeekPromise;
// Pending decoders initialization.
MozPromiseRequestHolder<MediaDataDecoder::InitPromise::AllPromiseType> mDecodersInitRequest;
#ifdef MOZ_EME
nsRefPtr<CDMProxy> mCDMProxy;
#endif

View File

@ -278,8 +278,6 @@ CreateTestH264Decoder(layers::LayersBackend aBackend,
if (!decoder) {
return nullptr;
}
nsresult rv = decoder->Init();
NS_ENSURE_SUCCESS(rv, nullptr);
return decoder.forget();
}
@ -293,7 +291,6 @@ MP4Decoder::IsVideoAccelerated(layers::LayersBackend aBackend)
return false;
}
bool result = decoder->IsHardwareAccelerated();
decoder->Shutdown();
return result;
}
@ -336,8 +333,6 @@ CreateTestAACDecoder(AudioInfo& aConfig)
if (!decoder) {
return nullptr;
}
nsresult rv = decoder->Init();
NS_ENSURE_SUCCESS(rv, nullptr);
return decoder.forget();
}
@ -376,7 +371,6 @@ MP4Decoder::CanCreateAACDecoder()
MOZ_ARRAY_LENGTH(sTestAACExtraData));
nsRefPtr<MediaDataDecoder> decoder(CreateTestAACDecoder(config));
if (decoder) {
decoder->Shutdown();
result = true;
}
haveCachedResult = true;

View File

@ -8,6 +8,7 @@
#define PlatformDecoderModule_h_
#include "MediaDecoderReader.h"
#include "mozilla/MozPromise.h"
#include "mozilla/layers/LayersTypes.h"
#include "nsTArray.h"
#include "mozilla/RefPtr.h"
@ -207,16 +208,24 @@ protected:
virtual ~MediaDataDecoder() {};
public:
enum DecoderFailureReason {
INIT_ERROR,
CANCELED
};
typedef TrackInfo::TrackType TrackType;
typedef MozPromise<TrackType, DecoderFailureReason, /* IsExclusive = */ true> InitPromise;
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaDataDecoder)
// Initialize the decoder. The decoder should be ready to decode after
// this returns. The decoder should do any initialization here, rather
// Initialize the decoder. The decoder should be ready to decode once
// promise resolves. The decoder should do any initialization here, rather
// than in its constructor or PlatformDecoderModule::Create*Decoder(),
// so that if the MP4Reader needs to shutdown during initialization,
// it can call Shutdown() to cancel this operation. Any initialization
// that requires blocking the calling thread in this function *must*
// be done here so that it can be canceled by calling Shutdown()!
virtual nsresult Init() = 0;
virtual nsRefPtr<InitPromise> Init() = 0;
// Inserts a sample into the decoder's decode pipeline.
virtual nsresult Input(MediaRawData* aSample) = 0;
@ -251,6 +260,8 @@ public:
virtual nsresult Shutdown() = 0;
// Called from the state machine task queue or main thread.
// Decoder needs to decide whether or not hardware accelearation is supported
// after creating. It doesn't need to call Init() before calling this function.
virtual bool IsHardwareAccelerated() const { return false; }
// ConfigurationChanged will be called to inform the video or audio decoder

View File

@ -73,6 +73,7 @@ SharedDecoderManager::SharedDecoderManager()
: mTaskQueue(new FlushableTaskQueue(GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER)))
, mActiveProxy(nullptr)
, mActiveCallback(nullptr)
, mInit(false)
, mWaitForInternalDrain(false)
, mMonitor("SharedDecoderManager")
, mDecoderReleasedResources(false)
@ -111,11 +112,7 @@ SharedDecoderManager::CreateVideoDecoder(
mPDM = nullptr;
return nullptr;
}
nsresult rv = mDecoder->Init();
if (NS_FAILED(rv)) {
mDecoder = nullptr;
return nullptr;
}
mPDM = aPDM;
}
@ -143,8 +140,8 @@ SharedDecoderManager::Recreate(const VideoInfo& aConfig)
if (!mDecoder) {
return false;
}
nsresult rv = mDecoder->Init();
return rv == NS_OK;
mInit = false;
return true;
}
void
@ -181,6 +178,35 @@ SharedDecoderManager::SetIdle(MediaDataDecoder* aProxy)
}
}
nsRefPtr<MediaDataDecoder::InitPromise>
SharedDecoderManager::InitDecoder()
{
if (!mInit && mDecoder) {
MOZ_ASSERT(mCallback->OnReaderTaskQueue());
nsRefPtr<SharedDecoderManager> self = this;
nsRefPtr<MediaDataDecoder::InitPromise> p = mDecoderInitPromise.Ensure(__func__);
// The mTaskQueue is flushable which can't be used in MediaPromise. So we get
// the current AbstractThread instead of it. The MOZ_ASSERT above ensures
// we are running in AbstractThread so we won't get a nullptr.
mDecoderInitPromiseRequest.Begin(
mDecoder->Init()->Then(AbstractThread::GetCurrent(), __func__,
[self] (TrackInfo::TrackType aType) -> void {
self->mDecoderInitPromiseRequest.Complete();
self->mInit = true;
self->mDecoderInitPromise.ResolveIfExists(aType, __func__);
},
[self] (MediaDataDecoder::DecoderFailureReason aReason) -> void {
self->mDecoderInitPromiseRequest.Complete();
self->mDecoderInitPromise.RejectIfExists(aReason, __func__);
}));
return p;
}
return MediaDataDecoder::InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
}
void
SharedDecoderManager::DrainComplete()
{
@ -211,6 +237,7 @@ SharedDecoderManager::Shutdown()
mTaskQueue->AwaitShutdownAndIdle();
mTaskQueue = nullptr;
}
mDecoderInitPromiseRequest.DisconnectIfExists();
}
SharedDecoderProxy::SharedDecoderProxy(SharedDecoderManager* aManager,
@ -225,10 +252,14 @@ SharedDecoderProxy::~SharedDecoderProxy()
Shutdown();
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
SharedDecoderProxy::Init()
{
return NS_OK;
if (mManager->mActiveProxy != this) {
mManager->Select(this);
}
return mManager->InitDecoder();
}
nsresult

View File

@ -48,6 +48,8 @@ private:
virtual ~SharedDecoderManager();
void DrainComplete();
nsRefPtr<MediaDataDecoder::InitPromise> InitDecoder();
nsRefPtr<PlatformDecoderModule> mPDM;
nsRefPtr<MediaDataDecoder> mDecoder;
layers::LayersBackend mLayersBackend;
@ -56,6 +58,9 @@ private:
SharedDecoderProxy* mActiveProxy;
MediaDataDecoderCallback* mActiveCallback;
nsAutoPtr<MediaDataDecoderCallback> mCallback;
MozPromiseHolder<MediaDataDecoder::InitPromise> mDecoderInitPromise;
MozPromiseRequestHolder<MediaDataDecoder::InitPromise> mDecoderInitPromiseRequest;
bool mInit;
// access protected by mMonitor
bool mWaitForInternalDrain;
Monitor mMonitor;
@ -69,7 +74,7 @@ public:
MediaDataDecoderCallback* aCallback);
virtual ~SharedDecoderProxy();
virtual nsresult Init() override;
virtual nsRefPtr<MediaDataDecoder::InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample) override;
virtual nsresult Flush() override;
virtual nsresult Drain() override;

View File

@ -25,15 +25,17 @@ public:
BlankMediaDataDecoder(BlankMediaDataCreator* aCreator,
FlushableTaskQueue* aTaskQueue,
MediaDataDecoderCallback* aCallback)
MediaDataDecoderCallback* aCallback,
TrackInfo::TrackType aType)
: mCreator(aCreator)
, mTaskQueue(aTaskQueue)
, mCallback(aCallback)
, mType(aType)
{
}
virtual nsresult Init() override {
return NS_OK;
virtual nsRefPtr<InitPromise> Init() override {
return InitPromise::CreateAndResolve(mType, __func__);
}
virtual nsresult Shutdown() override {
@ -89,6 +91,7 @@ private:
nsAutoPtr<BlankMediaDataCreator> mCreator;
RefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
TrackInfo::TrackType mType;
};
class BlankVideoDataCreator {
@ -221,7 +224,8 @@ public:
nsRefPtr<MediaDataDecoder> decoder =
new BlankMediaDataDecoder<BlankVideoDataCreator>(creator,
aVideoTaskQueue,
aCallback);
aCallback,
TrackInfo::kVideoTrack);
return decoder.forget();
}
@ -236,7 +240,8 @@ public:
nsRefPtr<MediaDataDecoder> decoder =
new BlankMediaDataDecoder<BlankAudioDataCreator>(creator,
aAudioTaskQueue,
aCallback);
aCallback,
TrackInfo::kAudioTrack);
return decoder.forget();
}

View File

@ -47,19 +47,19 @@ OpusDataDecoder::Shutdown()
return NS_OK;
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
OpusDataDecoder::Init()
{
size_t length = mInfo.mCodecSpecificConfig->Length();
uint8_t *p = mInfo.mCodecSpecificConfig->Elements();
if (length < sizeof(uint64_t)) {
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
int64_t codecDelay = BigEndian::readUint64(p);
length -= sizeof(uint64_t);
p += sizeof(uint64_t);
if (NS_FAILED(DecodeHeader(p, length))) {
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
int r;
@ -75,7 +75,7 @@ OpusDataDecoder::Init()
if (codecDelay != FramesToUsecs(mOpusParser->mPreSkip,
mOpusParser->mRate).value()) {
NS_WARNING("Invalid Opus header: CodecDelay and pre-skip do not match!");
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
if (mInfo.mRate != (uint32_t)mOpusParser->mRate) {
@ -85,7 +85,8 @@ OpusDataDecoder::Init()
NS_WARNING("Invalid Opus header: container and codec channels do not match!");
}
return r == OPUS_OK ? NS_OK : NS_ERROR_FAILURE;
return r == OPUS_OK ? InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__)
: InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
nsresult

View File

@ -21,7 +21,7 @@ public:
MediaDataDecoderCallback* aCallback);
~OpusDataDecoder();
nsresult Init() override;
nsRefPtr<InitPromise> Init() override;
nsresult Input(MediaRawData* aSample) override;
nsresult Flush() override;
nsresult Drain() override;

View File

@ -55,7 +55,7 @@ VPXDecoder::Shutdown()
return NS_OK;
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
VPXDecoder::Init()
{
vpx_codec_iface_t* dx = nullptr;
@ -65,9 +65,9 @@ VPXDecoder::Init()
dx = vpx_codec_vp9_dx();
}
if (!dx || vpx_codec_dec_init(&mVPX, dx, nullptr, 0)) {
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
return NS_OK;
return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
}
nsresult

View File

@ -28,7 +28,7 @@ public:
~VPXDecoder();
nsresult Init() override;
nsRefPtr<InitPromise> Init() override;
nsresult Input(MediaRawData* aSample) override;
nsresult Flush() override;
nsresult Drain() override;

View File

@ -63,7 +63,7 @@ VorbisDataDecoder::Shutdown()
return NS_OK;
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
VorbisDataDecoder::Init()
{
vorbis_info_init(&mVorbisInfo);
@ -75,17 +75,17 @@ VorbisDataDecoder::Init()
uint8_t *p = mInfo.mCodecSpecificConfig->Elements();
for(int i = 0; i < 3; i++) {
if (available < 2) {
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
available -= 2;
size_t length = BigEndian::readUint16(p);
p += 2;
if (available < length) {
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
available -= length;
if (NS_FAILED(DecodeHeader((const unsigned char*)p, length))) {
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
p += length;
}
@ -94,12 +94,12 @@ VorbisDataDecoder::Init()
int r = vorbis_synthesis_init(&mVorbisDsp, &mVorbisInfo);
if (r) {
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
r = vorbis_block_init(&mVorbisDsp, &mVorbisBlock);
if (r) {
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
if (mInfo.mRate != (uint32_t)mVorbisDsp.vi->rate) {
@ -111,7 +111,7 @@ VorbisDataDecoder::Init()
("Invalid Vorbis header: container and codec channels do not match!"));
}
return NS_OK;
return InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__);
}
nsresult

View File

@ -24,7 +24,7 @@ public:
MediaDataDecoderCallback* aCallback);
~VorbisDataDecoder();
nsresult Init() override;
nsRefPtr<InitPromise> Init() override;
nsresult Input(MediaRawData* aSample) override;
nsresult Flush() override;
nsresult Drain() override;

View File

@ -45,7 +45,7 @@ public:
{
}
virtual nsresult Init() override {
virtual nsRefPtr<InitPromise> Init() override {
MOZ_ASSERT(!mIsShutdown);
return mDecoder->Init();
}

View File

@ -167,7 +167,7 @@ GMPAudioDecoder::GMPInitDone(GMPAudioDecoderProxy* aGMP)
}
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
GMPAudioDecoder::Init()
{
MOZ_ASSERT(IsOnGMPThread());
@ -188,7 +188,8 @@ GMPAudioDecoder::Init()
NS_ProcessNextEvent(gmpThread, true);
}
return mGMP ? NS_OK : NS_ERROR_FAILURE;
return mGMP ? InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__)
: InitPromise::CreateAndReject(MediaDataDecoder::DecoderFailureReason::INIT_ERROR, __func__);
}
nsresult

View File

@ -69,7 +69,7 @@ public:
{
}
virtual nsresult Init() override;
virtual nsRefPtr<InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample) override;
virtual nsresult Flush() override;
virtual nsresult Drain() override;

View File

@ -211,7 +211,7 @@ GMPVideoDecoder::GMPInitDone(GMPVideoDecoderProxy* aGMP, GMPVideoHost* aHost)
}
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
GMPVideoDecoder::Init()
{
MOZ_ASSERT(IsOnGMPThread());
@ -232,7 +232,8 @@ GMPVideoDecoder::Init()
NS_ProcessNextEvent(gmpThread, true);
}
return mGMP ? NS_OK : NS_ERROR_FAILURE;
return mGMP ? InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__)
: InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
nsresult

View File

@ -84,7 +84,7 @@ public:
{
}
virtual nsresult Init() override;
virtual nsRefPtr<InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample) override;
virtual nsresult Flush() override;
virtual nsresult Drain() override;

View File

@ -21,16 +21,21 @@ MediaDataDecoderCallbackProxy::FlushComplete()
mProxyDecoder->FlushComplete();
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
MediaDataDecoderProxy::InternalInit()
{
MOZ_ASSERT(!mIsShutdown);
return mProxyDecoder->Init();
}
nsRefPtr<MediaDataDecoder::InitPromise>
MediaDataDecoderProxy::Init()
{
MOZ_ASSERT(!mIsShutdown);
nsRefPtr<InitTask> task(new InitTask(mProxyDecoder));
nsresult rv = mProxyThread->Dispatch(task, NS_DISPATCH_SYNC);
NS_ENSURE_SUCCESS(rv, rv);
NS_ENSURE_SUCCESS(task->Result(), task->Result());
return NS_OK;
return ProxyMediaCall(mProxyThreadWrapper, this, __func__,
&MediaDataDecoderProxy::InternalInit);
}
nsresult

View File

@ -32,30 +32,6 @@ private:
nsRefPtr<MediaRawData> mSample;
};
class InitTask : public nsRunnable {
public:
explicit InitTask(MediaDataDecoder* aDecoder)
: mDecoder(aDecoder)
, mResultValid(false)
{}
NS_IMETHOD Run() {
mResult = mDecoder->Init();
mResultValid = true;
return NS_OK;
}
nsresult Result() {
MOZ_ASSERT(mResultValid);
return mResult;
}
private:
MediaDataDecoder* mDecoder;
nsresult mResult;
bool mResultValid;
};
template<typename T>
class Condition {
public:
@ -132,6 +108,7 @@ public:
, mIsShutdown(false)
#endif
{
mProxyThreadWrapper = CreateXPCOMAbstractThreadWrapper(aProxyThread, false);
}
// Ideally, this would return a regular MediaDataDecoderCallback pointer
@ -155,7 +132,7 @@ public:
// Init and Shutdown run synchronously on the proxy thread, all others are
// asynchronously and responded to via the MediaDataDecoderCallback.
// Note: the nsresults returned by the proxied decoder are lost.
virtual nsresult Init() override;
virtual nsRefPtr<InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample) override;
virtual nsresult Flush() override;
virtual nsresult Drain() override;
@ -165,6 +142,8 @@ public:
void FlushComplete();
private:
nsRefPtr<InitPromise> InternalInit();
#ifdef DEBUG
bool IsOnProxyThread() {
return NS_GetCurrentThread() == mProxyThread;
@ -176,6 +155,7 @@ private:
nsRefPtr<MediaDataDecoder> mProxyDecoder;
nsCOMPtr<nsIThread> mProxyThread;
nsRefPtr<AbstractThread> mProxyThreadWrapper;
MediaDataDecoderCallbackProxy mProxyCallback;

View File

@ -53,14 +53,18 @@ public:
}
nsresult Init() override {
nsRefPtr<InitPromise> Init() override {
mSurfaceTexture = AndroidSurfaceTexture::Create();
if (!mSurfaceTexture) {
NS_WARNING("Failed to create SurfaceTexture for video decode\n");
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
return InitDecoder(mSurfaceTexture->JavaSurface());
if (NS_FAILED(InitDecoder(mSurfaceTexture->JavaSurface()))) {
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
}
void Cleanup() override {
@ -349,9 +353,17 @@ MediaCodecDataDecoder::~MediaCodecDataDecoder()
Shutdown();
}
nsresult MediaCodecDataDecoder::Init()
nsRefPtr<MediaDataDecoder::InitPromise> MediaCodecDataDecoder::Init()
{
return InitDecoder(nullptr);
nsresult rv = InitDecoder(nullptr);
TrackInfo::TrackType type =
(mType == MediaData::AUDIO_DATA ? TrackInfo::TrackType::kAudioTrack
: TrackInfo::TrackType::kVideoTrack);
return NS_SUCCEEDED(rv) ?
InitPromise::CreateAndResolve(type, __func__) :
InitPromise::CreateAndReject(MediaDataDecoder::DecoderFailureReason::INIT_ERROR, __func__);
}
nsresult MediaCodecDataDecoder::InitDecoder(Surface::Param aSurface)

View File

@ -52,7 +52,7 @@ public:
virtual ~MediaCodecDataDecoder();
virtual nsresult Init() override;
virtual nsRefPtr<MediaDataDecoder::InitPromise> Init() override;
virtual nsresult Flush() override;
virtual nsresult Drain() override;
virtual nsresult Shutdown() override;

View File

@ -50,14 +50,15 @@ AppleATDecoder::~AppleATDecoder()
MOZ_ASSERT(!mConverter);
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
AppleATDecoder::Init()
{
if (!mFormatID) {
NS_ERROR("Non recognised format");
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
return NS_OK;
return InitPromise::CreateAndResolve(TrackType::kAudioTrack, __func__);
}
nsresult

View File

@ -25,7 +25,7 @@ public:
MediaDataDecoderCallback* aCallback);
virtual ~AppleATDecoder();
virtual nsresult Init() override;
virtual nsRefPtr<InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample) override;
virtual nsresult Flush() override;
virtual nsresult Drain() override;

View File

@ -73,19 +73,10 @@ AppleVDADecoder::~AppleVDADecoder()
MOZ_COUNT_DTOR(AppleVDADecoder);
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
AppleVDADecoder::Init()
{
if (!gfxPlatform::GetPlatform()->CanUseHardwareVideoDecoding()) {
// This GPU is blacklisted for hardware decoding.
return NS_ERROR_FAILURE;
}
if (mDecoder) {
return NS_OK;
}
nsresult rv = InitializeSession();
return rv;
return InitPromise::CreateAndResolve(TrackType::kVideoTrack, __func__);
}
nsresult
@ -580,11 +571,18 @@ AppleVDADecoder::CreateVDADecoder(
MediaDataDecoderCallback* aCallback,
layers::ImageContainer* aImageContainer)
{
nsRefPtr<AppleVDADecoder> decoder =
new AppleVDADecoder(aConfig, aVideoTaskQueue, aCallback, aImageContainer);
if (NS_FAILED(decoder->Init())) {
if (!gfxPlatform::GetPlatform()->CanUseHardwareVideoDecoding()) {
// This GPU is blacklisted for hardware decoding.
return nullptr;
}
nsRefPtr<AppleVDADecoder> decoder =
new AppleVDADecoder(aConfig, aVideoTaskQueue, aCallback, aImageContainer);
if (NS_FAILED(decoder->InitializeSession())) {
return nullptr;
}
return decoder.forget();
}

View File

@ -70,7 +70,7 @@ public:
MediaDataDecoderCallback* aCallback,
layers::ImageContainer* aImageContainer);
virtual ~AppleVDADecoder();
virtual nsresult Init() override;
virtual nsRefPtr<InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample) override;
virtual nsresult Flush() override;
virtual nsresult Drain() override;
@ -83,11 +83,15 @@ public:
nsresult OutputFrame(CVPixelBufferRef aImage,
nsAutoPtr<AppleFrameRef> aFrameRef);
// Method to set up the decompression session.
nsresult InitializeSession();
protected:
AppleFrameRef* CreateAppleFrameRef(const MediaRawData* aSample);
void DrainReorderedFrames();
void ClearReorderedFrames();
CFDictionaryRef CreateOutputConfiguration();
nsresult InitDecoder();
nsRefPtr<MediaByteBuffer> mExtraData;
nsRefPtr<FlushableTaskQueue> mTaskQueue;
@ -107,8 +111,6 @@ private:
// Method to pass a frame to VideoToolbox for decoding.
nsresult SubmitFrame(MediaRawData* aSample);
// Method to set up the decompression session.
nsresult InitializeSession();
CFDictionaryRef CreateDecoderSpecification();
};

View File

@ -51,11 +51,16 @@ AppleVTDecoder::~AppleVTDecoder()
MOZ_COUNT_DTOR(AppleVTDecoder);
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
AppleVTDecoder::Init()
{
nsresult rv = InitializeSession();
return rv;
if (NS_SUCCEEDED(rv)) {
return InitPromise::CreateAndResolve(TrackType::kVideoTrack, __func__);
}
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
nsresult

View File

@ -20,7 +20,7 @@ public:
MediaDataDecoderCallback* aCallback,
layers::ImageContainer* aImageContainer);
virtual ~AppleVTDecoder();
virtual nsresult Init() override;
virtual nsRefPtr<InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample) override;
virtual nsresult Flush() override;
virtual nsresult Drain() override;

View File

@ -27,13 +27,13 @@ FFmpegAudioDecoder<LIBAV_VER>::FFmpegAudioDecoder(
mExtraData->AppendElements(*aConfig.mCodecSpecificConfig);
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
FFmpegAudioDecoder<LIBAV_VER>::Init()
{
nsresult rv = FFmpegDataDecoder::Init();
NS_ENSURE_SUCCESS(rv, rv);
nsresult rv = InitDecoder();
return NS_OK;
return rv == NS_OK ? InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__)
: InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
static AudioDataValue*

View File

@ -25,7 +25,7 @@ public:
const AudioInfo& aConfig);
virtual ~FFmpegAudioDecoder();
virtual nsresult Init() override;
virtual nsRefPtr<InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample) override;
virtual nsresult Drain() override;
static AVCodecID GetCodecId(const nsACString& aMimeType);

View File

@ -58,7 +58,7 @@ ChoosePixelFormat(AVCodecContext* aCodecContext, const PixelFormat* aFormats)
}
nsresult
FFmpegDataDecoder<LIBAV_VER>::Init()
FFmpegDataDecoder<LIBAV_VER>::InitDecoder()
{
StaticMutexAutoLock mon(sMonitor);

View File

@ -28,7 +28,7 @@ public:
static bool Link();
virtual nsresult Init() override;
virtual nsRefPtr<InitPromise> Init() override = 0;
virtual nsresult Input(MediaRawData* aSample) override = 0;
virtual nsresult Flush() override;
virtual nsresult Drain() override = 0;
@ -36,6 +36,7 @@ public:
protected:
AVFrame* PrepareFrame();
nsresult InitDecoder();
FlushableTaskQueue* mTaskQueue;
AVCodecContext* mCodecContext;

View File

@ -39,16 +39,17 @@ FFmpegH264Decoder<LIBAV_VER>::FFmpegH264Decoder(
mExtraData->AppendElements(*aConfig.mExtraData);
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
FFmpegH264Decoder<LIBAV_VER>::Init()
{
nsresult rv = FFmpegDataDecoder::Init();
NS_ENSURE_SUCCESS(rv, rv);
if (NS_FAILED(InitDecoder())) {
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
mCodecContext->get_buffer = AllocateBufferCb;
mCodecContext->release_buffer = ReleaseBufferCb;
return NS_OK;
return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
}
int64_t

View File

@ -36,7 +36,7 @@ public:
ImageContainer* aImageContainer);
virtual ~FFmpegH264Decoder();
virtual nsresult Init() override;
virtual nsRefPtr<InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample) override;
virtual nsresult Drain() override;
virtual nsresult Flush() override;

View File

@ -37,6 +37,8 @@ public:
virtual bool HasQueuedSample() override;
virtual TrackType GetTrackType() override { return TrackType::kAudioTrack; }
private:
nsresult CreateAudioData(int64_t aStreamOffset,
AudioData** aOutData);

View File

@ -37,7 +37,7 @@ GonkMediaDataDecoder::~GonkMediaDataDecoder()
MOZ_COUNT_DTOR(GonkMediaDataDecoder);
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
GonkMediaDataDecoder::Init()
{
sp<MediaCodecProxy> decoder;
@ -45,7 +45,7 @@ GonkMediaDataDecoder::Init()
mDecoder = decoder;
mDrainComplete = false;
return NS_OK;
return InitPromise::CreateAndResolve(mManager->GetTrackType(), __func__);
}
nsresult

View File

@ -18,6 +18,8 @@ class MediaRawData;
// Manage the data flow from inputting encoded data and outputting decode data.
class GonkDecoderManager {
public:
typedef TrackInfo::TrackType TrackType;
virtual ~GonkDecoderManager() {}
// Creates and initializs the GonkDecoder.
@ -42,6 +44,8 @@ public:
// True if sample is queued.
virtual bool HasQueuedSample() = 0;
virtual TrackType GetTrackType() = 0;
protected:
nsRefPtr<MediaByteBuffer> mCodecSpecificData;
@ -61,9 +65,9 @@ public:
~GonkMediaDataDecoder();
virtual nsresult Init() override;
virtual nsRefPtr<InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample);
virtual nsresult Input(MediaRawData* aSample) override;
virtual nsresult Flush() override;

View File

@ -55,6 +55,8 @@ public:
virtual bool HasQueuedSample() override;
virtual TrackType GetTrackType() override { return TrackType::kVideoTrack; }
static void RecycleCallback(TextureClient* aClient, void* aClosure);
private:

View File

@ -31,6 +31,10 @@ public:
virtual void Shutdown() override;
virtual TrackInfo::TrackType GetType() override {
return TrackInfo::kAudioTrack;
}
private:
HRESULT UpdateOutputType();

View File

@ -8,6 +8,7 @@
#include "WMFDecoderModule.h"
#include "WMFVideoMFTManager.h"
#include "WMFAudioMFTManager.h"
#include "MFTDecoder.h"
#include "mozilla/Preferences.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/Services.h"
@ -97,13 +98,21 @@ WMFDecoderModule::CreateVideoDecoder(const VideoInfo& aConfig,
FlushableTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback)
{
nsAutoPtr<WMFVideoMFTManager> manager =
new WMFVideoMFTManager(aConfig,
aLayersBackend,
aImageContainer,
sDXVAEnabled && ShouldUseDXVA(aConfig));
nsRefPtr<MFTDecoder> mft = manager->Init();
if (!mft) {
return nullptr;
}
nsRefPtr<MediaDataDecoder> decoder =
new WMFMediaDataDecoder(new WMFVideoMFTManager(aConfig,
aLayersBackend,
aImageContainer,
sDXVAEnabled && ShouldUseDXVA(aConfig)),
aVideoTaskQueue,
aCallback);
new WMFMediaDataDecoder(manager.forget(), mft, aVideoTaskQueue, aCallback);
return decoder.forget();
}
@ -112,10 +121,15 @@ WMFDecoderModule::CreateAudioDecoder(const AudioInfo& aConfig,
FlushableTaskQueue* aAudioTaskQueue,
MediaDataDecoderCallback* aCallback)
{
nsAutoPtr<WMFAudioMFTManager> manager = new WMFAudioMFTManager(aConfig);
nsRefPtr<MFTDecoder> mft = manager->Init();
if (!mft) {
return nullptr;
}
nsRefPtr<MediaDataDecoder> decoder =
new WMFMediaDataDecoder(new WMFAudioMFTManager(aConfig),
aAudioTaskQueue,
aCallback);
new WMFMediaDataDecoder(manager.forget(), mft, aAudioTaskQueue, aCallback);
return decoder.forget();
}

View File

@ -18,10 +18,12 @@ PRLogModuleInfo* GetDemuxerLog();
namespace mozilla {
WMFMediaDataDecoder::WMFMediaDataDecoder(MFTManager* aMFTManager,
MFTDecoder* aDecoder,
FlushableTaskQueue* aTaskQueue,
MediaDataDecoderCallback* aCallback)
: mTaskQueue(aTaskQueue)
, mCallback(aCallback)
, mDecoder(aDecoder)
, mMFTManager(aMFTManager)
, mMonitor("WMFMediaDataDecoder")
, mIsFlushing(false)
@ -33,16 +35,14 @@ WMFMediaDataDecoder::~WMFMediaDataDecoder()
{
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
WMFMediaDataDecoder::Init()
{
MOZ_ASSERT(!mDecoder);
MOZ_ASSERT(!mIsShutDown);
mDecoder = mMFTManager->Init();
NS_ENSURE_TRUE(mDecoder, NS_ERROR_FAILURE);
return NS_OK;
return mDecoder ?
InitPromise::CreateAndResolve(mMFTManager->GetType(), __func__) :
InitPromise::CreateAndReject(MediaDataDecoder::DecoderFailureReason::INIT_ERROR, __func__);
}
// A single telemetry sample is reported for each MediaDataDecoder object

View File

@ -45,6 +45,8 @@ public:
virtual bool IsHardwareAccelerated() const { return false; }
virtual TrackInfo::TrackType GetType() = 0;
};
// Decodes audio and video using Windows Media Foundation. Samples are decoded
@ -55,11 +57,12 @@ public:
class WMFMediaDataDecoder : public MediaDataDecoder {
public:
WMFMediaDataDecoder(MFTManager* aOutputSource,
MFTDecoder* aDecoder,
FlushableTaskQueue* aAudioTaskQueue,
MediaDataDecoderCallback* aCallback);
~WMFMediaDataDecoder();
virtual nsresult Init() override;
virtual nsRefPtr<MediaDataDecoder::InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample);

View File

@ -36,6 +36,10 @@ public:
virtual bool IsHardwareAccelerated() const override;
virtual TrackInfo::TrackType GetType() override {
return TrackInfo::kVideoTrack;
}
private:
bool InitializeDXVA(bool aForceD3D9);

View File

@ -29,6 +29,7 @@ H264Converter::H264Converter(PlatformDecoderModule* aPDM,
, mCallback(aCallback)
, mDecoder(nullptr)
, mNeedAVCC(aPDM->DecoderNeedsConversion(aConfig) == PlatformDecoderModule::kNeedAVCC)
, mDecoderInitializing(false)
, mLastError(NS_OK)
{
CreateDecoder();
@ -38,13 +39,15 @@ H264Converter::~H264Converter()
{
}
nsresult
nsRefPtr<MediaDataDecoder::InitPromise>
H264Converter::Init()
{
if (mDecoder) {
return mDecoder->Init();
}
return mLastError;
return MediaDataDecoder::InitPromise::CreateAndReject(
MediaDataDecoder::DecoderFailureReason::INIT_ERROR, __func__);
}
nsresult
@ -59,6 +62,12 @@ H264Converter::Input(MediaRawData* aSample)
return NS_ERROR_FAILURE;
}
}
if (mDecoderInitializing) {
mMediaRawSamples.AppendElement(aSample);
return NS_OK;
}
nsresult rv;
if (!mDecoder) {
// It is not possible to create an AVCC H264 decoder without SPS.
@ -104,6 +113,7 @@ H264Converter::Shutdown()
{
if (mDecoder) {
nsresult rv = mDecoder->Shutdown();
mInitPromiseRequest.DisconnectIfExists();
mDecoder = nullptr;
return rv;
}
@ -151,8 +161,40 @@ H264Converter::CreateDecoderAndInit(MediaRawData* aSample)
UpdateConfigFromExtraData(extra_data);
nsresult rv = CreateDecoder();
NS_ENSURE_SUCCESS(rv, rv);
return Init();
if (NS_SUCCEEDED(rv)) {
mDecoderInitializing = true;
nsRefPtr<H264Converter> self = this;
// The mVideoTaskQueue is flushable which can't be used in MediaPromise. So
// we get the current AbstractThread instead of it. The MOZ_ASSERT above
// ensures we are running in AbstractThread so we won't get a nullptr.
mInitPromiseRequest.Begin(mDecoder->Init()
->Then(AbstractThread::GetCurrent(), __func__, this,
&H264Converter::OnDecoderInitDone,
&H264Converter::OnDecoderInitFailed));
}
return rv;
}
void
H264Converter::OnDecoderInitDone(const TrackType aTrackType)
{
mInitPromiseRequest.Complete();
for (uint32_t i = 0 ; i < mMediaRawSamples.Length(); i++) {
if (NS_FAILED(mDecoder->Input(mMediaRawSamples[i]))) {
mCallback->Error();
}
}
mMediaRawSamples.Clear();
mDecoderInitializing = false;
}
void
H264Converter::OnDecoderInitFailed(MediaDataDecoder::DecoderFailureReason aReason)
{
mInitPromiseRequest.Complete();
mCallback->Error();
}
nsresult

View File

@ -29,7 +29,7 @@ public:
MediaDataDecoderCallback* aCallback);
virtual ~H264Converter();
virtual nsresult Init() override;
virtual nsRefPtr<InitPromise> Init() override;
virtual nsresult Input(MediaRawData* aSample) override;
virtual nsresult Flush() override;
virtual nsresult Drain() override;
@ -48,14 +48,20 @@ private:
nsresult CheckForSPSChange(MediaRawData* aSample);
void UpdateConfigFromExtraData(MediaByteBuffer* aExtraData);
void OnDecoderInitDone(const TrackType aTrackType);
void OnDecoderInitFailed(MediaDataDecoder::DecoderFailureReason aReason);
nsRefPtr<PlatformDecoderModule> mPDM;
VideoInfo mCurrentConfig;
layers::LayersBackend mLayersBackend;
nsRefPtr<layers::ImageContainer> mImageContainer;
nsRefPtr<FlushableTaskQueue> mVideoTaskQueue;
nsTArray<nsRefPtr<MediaRawData>> mMediaRawSamples;
MediaDataDecoderCallback* mCallback;
nsRefPtr<MediaDataDecoder> mDecoder;
MozPromiseRequestHolder<InitPromise> mInitPromiseRequest;
bool mNeedAVCC;
bool mDecoderInitializing;
nsresult mLastError;
};

View File

@ -47,7 +47,7 @@ ogg_packet InitOggPacket(const unsigned char* aData, size_t aLength,
class VorbisDecoder : public WebMAudioDecoder
{
public:
nsresult Init();
nsRefPtr<InitPromise> Init() override;
void Shutdown();
nsresult ResetDecode();
nsresult DecodeHeader(const unsigned char* aData, size_t aLength);
@ -94,14 +94,14 @@ VorbisDecoder::Shutdown()
mReader = nullptr;
}
nsresult
nsRefPtr<InitPromise>
VorbisDecoder::Init()
{
vorbis_info_init(&mVorbisInfo);
vorbis_comment_init(&mVorbisComment);
PodZero(&mVorbisDsp);
PodZero(&mVorbisBlock);
return NS_OK;
return InitPromise::CreateAndResolve(TrackType::kAudioTrack, __func__);
}
nsresult
@ -229,7 +229,7 @@ VorbisDecoder::Decode(const unsigned char* aData, size_t aLength,
class OpusDecoder : public WebMAudioDecoder
{
public:
nsresult Init();
nsRefPtr<InitPromise> Init() override;
void Shutdown();
nsresult ResetDecode();
nsresult DecodeHeader(const unsigned char* aData, size_t aLength);
@ -277,10 +277,10 @@ OpusDecoder::Shutdown()
mReader = nullptr;
}
nsresult
nsRefPtr<InitPromise>
OpusDecoder::Init()
{
return NS_OK;
return InitPromise::CreateAndResolve(TrackType::kAudioTrack, __func__);
}
nsresult

View File

@ -106,12 +106,12 @@ IntelWebMVideoDecoder::IsSupportedVideoMimeType(const nsACString& aMimeType)
mPlatform->SupportsMimeType(aMimeType);
}
nsresult
nsRefPtr<InitPromise>
IntelWebMVideoDecoder::Init(unsigned int aWidth, unsigned int aHeight)
{
mPlatform = PlatformDecoderModule::Create();
if (!mPlatform) {
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
mDecoderConfig = new VideoInfo();
@ -127,12 +127,12 @@ IntelWebMVideoDecoder::Init(unsigned int aWidth, unsigned int aHeight)
mDecoderConfig->mMimeType = "video/webm; codecs=vp9";
break;
default:
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
const VideoInfo& video = *mDecoderConfig;
if (!IsSupportedVideoMimeType(video.mMimeType)) {
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
mMediaDataDecoder =
mPlatform->CreateDecoder(video,
@ -141,11 +141,10 @@ IntelWebMVideoDecoder::Init(unsigned int aWidth, unsigned int aHeight)
mReader->GetLayersBackendType(),
mReader->GetDecoder()->GetImageContainer());
if (!mMediaDataDecoder) {
return NS_ERROR_FAILURE;
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
nsresult rv = mMediaDataDecoder->Init();
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
return mMediaDataDecoder->Init();
}
bool

View File

@ -28,7 +28,8 @@ class IntelWebMVideoDecoder : public WebMVideoDecoder, public MediaDataDecoderCa
{
public:
static WebMVideoDecoder* Create(WebMReader* aReader);
virtual nsresult Init(unsigned int aWidth, unsigned int aHeight) override;
virtual nsRefPtr<InitPromise> Init(unsigned int aWidth = 0,
unsigned int aHeight = 0) override;
virtual nsresult Flush() override;
virtual void Shutdown() override;

View File

@ -53,8 +53,20 @@ SoftwareWebMVideoDecoder::Create(WebMReader* aReader)
return new SoftwareWebMVideoDecoder(aReader);
}
nsresult
nsRefPtr<InitPromise>
SoftwareWebMVideoDecoder::Init(unsigned int aWidth, unsigned int aHeight)
{
nsresult rv = InitDecoder(aWidth, aHeight);
if (NS_SUCCEEDED(rv)) {
return InitPromise::CreateAndResolve(TrackType::kVideoTrack, __func__);
}
return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
}
nsresult
SoftwareWebMVideoDecoder::InitDecoder(unsigned int aWidth, unsigned int aHeight)
{
vpx_codec_iface_t* dx = nullptr;
switch(mReader->GetVideoCodec()) {

View File

@ -17,7 +17,8 @@ class SoftwareWebMVideoDecoder : public WebMVideoDecoder
public:
static WebMVideoDecoder* Create(WebMReader* aReader);
virtual nsresult Init(unsigned int aWidth, unsigned int aHeight) override;
virtual nsRefPtr<InitPromise> Init(unsigned int aWidth = 0,
unsigned int aHeight = 0) override;
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold) override;
@ -28,6 +29,7 @@ public:
~SoftwareWebMVideoDecoder();
private:
nsresult InitDecoder(unsigned int aWidth, unsigned int aHeight);
nsRefPtr<WebMReader> mReader;
// VPx decoder state

View File

@ -276,8 +276,22 @@ void WebMReader::Cleanup()
}
}
nsresult WebMReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
nsRefPtr<MediaDecoderReader::MetadataPromise>
WebMReader::AsyncReadMetadata()
{
nsRefPtr<MetadataHolder> metadata = new MetadataHolder();
if (NS_FAILED(RetrieveWebMMetadata(&metadata->mInfo)) ||
!metadata->mInfo.HasValidMedia()) {
return MetadataPromise::CreateAndReject(ReadMetadataFailureReason::METADATA_ERROR,
__func__);
}
return MetadataPromise::CreateAndResolve(metadata, __func__);
}
nsresult
WebMReader::RetrieveWebMMetadata(MediaInfo* aInfo)
{
// We can't use OnTaskQueue() here because of the wacky initialization task
// queue that TrackBuffer uses. We should be able to fix this when we do
@ -330,20 +344,17 @@ nsresult WebMReader::ReadMetadata(MediaInfo* aInfo,
#if defined(MOZ_PDM_VPX)
if (sIsIntelDecoderEnabled) {
mVideoDecoder = IntelWebMVideoDecoder::Create(this);
if (mVideoDecoder &&
NS_FAILED(mVideoDecoder->Init(params.display_width, params.display_height))) {
mVideoDecoder = nullptr;
}
}
#endif
// If there's no decoder yet (e.g. HW decoder not available), use the software decoder.
if (!mVideoDecoder) {
mVideoDecoder = SoftwareWebMVideoDecoder::Create(this);
if (mVideoDecoder &&
NS_FAILED(mVideoDecoder->Init(params.display_width, params.display_height))) {
mVideoDecoder = nullptr;
}
}
if (mVideoDecoder) {
mInitPromises.AppendElement(mVideoDecoder->Init(params.display_width,
params.display_height));
}
if (!mVideoDecoder) {
@ -426,7 +437,10 @@ nsresult WebMReader::ReadMetadata(MediaInfo* aInfo,
Cleanup();
return NS_ERROR_FAILURE;
}
if (NS_FAILED(mAudioDecoder->Init())) {
if (mAudioDecoder) {
mInitPromises.AppendElement(mAudioDecoder->Init());
} else {
Cleanup();
return NS_ERROR_FAILURE;
}
@ -460,8 +474,6 @@ nsresult WebMReader::ReadMetadata(MediaInfo* aInfo,
*aInfo = mInfo;
*aTags = nullptr;
return NS_OK;
}

View File

@ -9,6 +9,7 @@
#include <stdint.h>
#include "MediaDecoderReader.h"
#include "PlatformDecoderModule.h"
#include "nsAutoRef.h"
#include "nestegg/nestegg.h"
@ -23,6 +24,10 @@ namespace mozilla {
static const unsigned NS_PER_USEC = 1000;
static const double NS_PER_S = 1e9;
typedef MediaDataDecoder::InitPromise InitPromise;
typedef TrackInfo::TrackType TrackType;
typedef MediaDataDecoder::DecoderFailureReason DecoderFailureReason;
class WebMBufferedState;
class WebMPacketQueue;
@ -32,7 +37,7 @@ class WebMReader;
class WebMVideoDecoder
{
public:
virtual nsresult Init(unsigned int aWidth = 0, unsigned int aHeight = 0) = 0;
virtual nsRefPtr<InitPromise> Init(unsigned int aWidth = 0, unsigned int aHeight = 0) = 0;
virtual nsresult Flush() { return NS_OK; }
virtual void Shutdown() = 0;
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
@ -45,7 +50,7 @@ public:
class WebMAudioDecoder
{
public:
virtual nsresult Init() = 0;
virtual nsRefPtr<InitPromise> Init() = 0;
virtual void Shutdown() = 0;
virtual nsresult ResetDecode() = 0;
virtual nsresult DecodeHeader(const unsigned char* aData, size_t aLength) = 0;
@ -85,8 +90,8 @@ public:
return mHasVideo;
}
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags) override;
virtual nsRefPtr<MetadataPromise> AsyncReadMetadata() override;
virtual nsRefPtr<SeekPromise>
Seek(int64_t aTime, int64_t aEndTime) override;
@ -120,7 +125,6 @@ public:
uint64_t GetCodecDelay() { return mCodecDelay; }
protected:
virtual void NotifyDataArrivedInternal(uint32_t aLength, int64_t aOffset) override;
// Decode a nestegg packet of audio data. Push the audio data on the
@ -143,6 +147,8 @@ protected:
bool ShouldSkipVideoFrame(int64_t aTimeThreshold);
private:
nsresult RetrieveWebMMetadata(MediaInfo* aInfo);
// Get the timestamp of keyframe greater than aTimeThreshold.
int64_t GetNextKeyframeTime(int64_t aTimeThreshold);
// Push the packets into aOutput which's timestamp is less than aEndTime.
@ -160,6 +166,8 @@ private:
nsAutoPtr<WebMAudioDecoder> mAudioDecoder;
nsAutoPtr<WebMVideoDecoder> mVideoDecoder;
nsTArray<nsRefPtr<InitPromise>> mInitPromises;
// Queue of video and audio packets that have been read but not decoded. These
// must only be accessed from the decode thread.
WebMPacketQueue mVideoPackets;

View File

@ -141,4 +141,11 @@ AbstractThread::DispatchDirectTask(already_AddRefed<nsIRunnable> aRunnable)
GetCurrent()->TailDispatcher().AddDirectTask(Move(aRunnable));
}
already_AddRefed<AbstractThread>
CreateXPCOMAbstractThreadWrapper(nsIThread* aThread, bool aRequireTailDispatch)
{
nsRefPtr<XPCOMThreadWrapper> wrapper = new XPCOMThreadWrapper(aThread, aRequireTailDispatch);
return wrapper.forget();
}
} // namespace mozilla

View File

@ -92,6 +92,9 @@ protected:
const bool mSupportsTailDispatch;
};
already_AddRefed<AbstractThread> CreateXPCOMAbstractThreadWrapper(nsIThread* aThread,
bool aRequireTailDispatch);
} // namespace mozilla
#endif