Bug 1178938 - Make MediaDecoderReader::DecodeToFirstVideoData async. r=jww

This commit is contained in:
Bobby Holley 2015-06-30 01:03:08 -07:00
parent 7826ffd1e1
commit 73525cceb7
6 changed files with 68 additions and 24 deletions

View File

@ -147,24 +147,33 @@ nsresult MediaDecoderReader::ResetDecode()
return NS_OK;
}
VideoData* MediaDecoderReader::DecodeToFirstVideoData()
nsRefPtr<MediaDecoderReader::VideoDataPromise>
MediaDecoderReader::DecodeToFirstVideoData()
{
bool eof = false;
while (!eof && VideoQueue().GetSize() == 0) {
{
ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
if (mDecoder->IsShutdown()) {
return nullptr;
}
MOZ_ASSERT(OnTaskQueue());
typedef MediaDecoderReader::VideoDataPromise PromiseType;
nsRefPtr<PromiseType::Private> p = new PromiseType::Private(__func__);
nsRefPtr<MediaDecoderReader> self = this;
InvokeUntil([self] () -> bool {
MOZ_ASSERT(self->OnTaskQueue());
NS_ENSURE_TRUE(!self->mShutdown, false);
bool skip = false;
if (!self->DecodeVideoFrame(skip, 0)) {
self->VideoQueue().Finish();
return !!self->VideoQueue().PeekFront();
}
bool keyframeSkip = false;
eof = !DecodeVideoFrame(keyframeSkip, 0);
}
if (eof) {
VideoQueue().Finish();
}
VideoData* d = nullptr;
return (d = VideoQueue().PeekFront()) ? d : nullptr;
return true;
}, [self] () -> bool {
MOZ_ASSERT(self->OnTaskQueue());
return self->VideoQueue().GetSize();
})->Then(TaskQueue(), __func__, [self, p] () {
p->Resolve(self->VideoQueue().PeekFront(), __func__);
}, [p] () {
// We don't have a way to differentiate EOS, error, and shutdown here. :-(
p->Reject(END_OF_STREAM, __func__);
});
return p.forget();
}
void

View File

@ -288,8 +288,7 @@ public:
return mDecoder;
}
// TODO: DEPRECATED. This uses synchronous decoding.
VideoData* DecodeToFirstVideoData();
nsRefPtr<VideoDataPromise> DecodeToFirstVideoData();
MediaInfo GetMediaInfo() { return mInfo; }

View File

@ -116,6 +116,8 @@ nsresult AndroidMediaReader::ResetDecode()
if (mLastVideoFrame) {
mLastVideoFrame = nullptr;
}
mSeekRequest.DisconnectIfExists();
mSeekPromise.RejectIfExists(NS_OK, __func__);
return MediaDecoderReader::ResetDecode();
}
@ -323,6 +325,7 @@ AndroidMediaReader::Seek(int64_t aTarget, int64_t aEndTime)
{
MOZ_ASSERT(OnTaskQueue());
nsRefPtr<SeekPromise> p = mSeekPromise.Ensure(__func__);
if (mHasAudio && mHasVideo) {
// The decoder seeks/demuxes audio and video streams separately. So if
// we seek both audio and video to aTarget, the audio stream can typically
@ -333,13 +336,23 @@ AndroidMediaReader::Seek(int64_t aTarget, int64_t aEndTime)
// seek the audio stream to match the video stream's time. Otherwise, the
// audio and video streams won't be in sync after the seek.
mVideoSeekTimeUs = aTarget;
const VideoData* v = DecodeToFirstVideoData();
mAudioSeekTimeUs = v ? v->mTime : aTarget;
nsRefPtr<AndroidMediaReader> self = this;
mSeekRequest.Begin(DecodeToFirstVideoData()->Then(TaskQueue(), __func__, [self] (VideoData* v) {
self->mSeekRequest.Complete();
self->mAudioSeekTimeUs = v->mTime;
self->mSeekPromise.Resolve(self->mAudioSeekTimeUs, __func__);
}, [self, aTarget] () {
self->mSeekRequest.Complete();
self->mAudioSeekTimeUs = aTarget;
self->mSeekPromise.Resolve(aTarget, __func__);
}));
} else {
mAudioSeekTimeUs = mVideoSeekTimeUs = aTarget;
mSeekPromise.Resolve(aTarget, __func__);
}
return SeekPromise::CreateAndResolve(mAudioSeekTimeUs, __func__);
return p;
}
AndroidMediaReader::ImageBufferCallback::ImageBufferCallback(mozilla::layers::ImageContainer *aImageContainer) :

View File

@ -36,6 +36,8 @@ class AndroidMediaReader : public MediaDecoderReader
int64_t mVideoSeekTimeUs;
int64_t mAudioSeekTimeUs;
nsRefPtr<VideoData> mLastVideoFrame;
MediaPromiseHolder<MediaDecoderReader::SeekPromise> mSeekPromise;
MediaPromiseRequestHolder<MediaDecoderReader::VideoDataPromise> mSeekRequest;
public:
AndroidMediaReader(AbstractMediaDecoder* aDecoder,
const nsACString& aContentType);

View File

@ -527,6 +527,7 @@ MediaOmxReader::Seek(int64_t aTarget, int64_t aEndTime)
{
MOZ_ASSERT(OnTaskQueue());
EnsureActive();
nsRefPtr<SeekPromise> p = mSeekPromise.Ensure(__func__);
VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
if (container && container->GetImageContainer()) {
@ -543,13 +544,23 @@ MediaOmxReader::Seek(int64_t aTarget, int64_t aEndTime)
// seek the audio stream to match the video stream's time. Otherwise, the
// audio and video streams won't be in sync after the seek.
mVideoSeekTimeUs = aTarget;
const VideoData* v = DecodeToFirstVideoData();
mAudioSeekTimeUs = v ? v->mTime : aTarget;
nsRefPtr<MediaOmxReader> self = this;
mSeekRequest.Begin(DecodeToFirstVideoData()->Then(TaskQueue(), __func__, [self] (VideoData* v) {
self->mSeekRequest.Complete();
self->mAudioSeekTimeUs = v->mTime;
self->mSeekPromise.Resolve(self->mAudioSeekTimeUs, __func__);
}, [self, aTarget] () {
self->mSeekRequest.Complete();
self->mAudioSeekTimeUs = aTarget;
self->mSeekPromise.Resolve(aTarget, __func__);
}));
} else {
mAudioSeekTimeUs = mVideoSeekTimeUs = aTarget;
mSeekPromise.Resolve(aTarget, __func__);
}
return SeekPromise::CreateAndResolve(mAudioSeekTimeUs, __func__);
return p;
}
void MediaOmxReader::SetIdle() {

View File

@ -46,6 +46,9 @@ class MediaOmxReader : public MediaOmxCommonReader
bool mIsShutdown;
MediaPromiseHolder<MediaDecoderReader::MetadataPromise> mMetadataPromise;
MediaPromiseRequestHolder<MediaResourcePromise> mMediaResourceRequest;
MediaPromiseHolder<MediaDecoderReader::SeekPromise> mSeekPromise;
MediaPromiseRequestHolder<MediaDecoderReader::VideoDataPromise> mSeekRequest;
protected:
android::sp<android::OmxDecoder> mOmxDecoder;
android::sp<android::MediaExtractor> mExtractor;
@ -73,6 +76,13 @@ protected:
virtual void NotifyDataArrivedInternal(uint32_t aLength, int64_t aOffset) override;
public:
virtual nsresult ResetDecode()
{
mSeekRequest.DisconnectIfExists();
mSeekPromise.RejectIfExists(NS_OK, __func__);
return MediaDecoderReader::ResetDecode();
}
virtual bool DecodeAudioData();
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold);