Bug 1033912 - Separate MediaTaskQueues for the video/audio decoding. r=cpearce, r=brsun

This commit is contained in:
Benjamin Chen 2014-08-20 18:17:38 +08:00
parent 8e5b9fd3aa
commit 95d4e975ba
2 changed files with 355 additions and 172 deletions

View File

@ -28,7 +28,10 @@
#include "gfx2DGlue.h" #include "gfx2DGlue.h"
#include "MediaStreamSource.h" #include "MediaStreamSource.h"
#include "MediaTaskQueue.h"
#include "nsThreadUtils.h"
#include "ImageContainer.h" #include "ImageContainer.h"
#include "SharedThreadPool.h"
#include "VideoFrameContainer.h" #include "VideoFrameContainer.h"
using namespace android; using namespace android;
@ -125,8 +128,11 @@ MediaCodecReader::Track::Track()
: mDurationUs(INT64_C(0)) : mDurationUs(INT64_C(0))
, mInputIndex(sInvalidInputIndex) , mInputIndex(sInvalidInputIndex)
, mInputEndOfStream(false) , mInputEndOfStream(false)
, mOutputEndOfStream(false)
, mSeekTimeUs(sInvalidTimestampUs) , mSeekTimeUs(sInvalidTimestampUs)
, mFlushed(false) , mFlushed(false)
, mDiscontinuity(false)
, mTaskQueue(nullptr)
{ {
} }
@ -220,12 +226,61 @@ MediaCodecReader::Shutdown()
ReleaseResources(); ReleaseResources();
} }
bool void
MediaCodecReader::DecodeAudioData() MediaCodecReader::DispatchAudioTask()
{ {
MOZ_ASSERT(mDecoder->OnDecodeThread(), "Should be on decode thread."); if (mAudioTrack.mTaskQueue && mAudioTrack.mTaskQueue->IsEmpty()) {
RefPtr<nsIRunnable> task =
NS_NewRunnableMethod(this,
&MediaCodecReader::DecodeAudioDataTask);
mAudioTrack.mTaskQueue->Dispatch(task);
}
}
if (mAudioTrack.mCodec == nullptr || !mAudioTrack.mCodec->allocated()) { void
MediaCodecReader::DispatchVideoTask(int64_t aTimeThreshold)
{
if (mVideoTrack.mTaskQueue && mVideoTrack.mTaskQueue->IsEmpty()) {
RefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<int64_t>(this,
&MediaCodecReader::DecodeVideoFrameTask,
aTimeThreshold);
mVideoTrack.mTaskQueue->Dispatch(task);
}
}
void
MediaCodecReader::RequestAudioData()
{
MOZ_ASSERT(GetTaskQueue()->IsCurrentThreadIn());
MOZ_ASSERT(HasAudio());
if (CheckAudioResources()) {
DispatchAudioTask();
}
}
void
MediaCodecReader::RequestVideoData(bool aSkipToNextKeyframe,
int64_t aTimeThreshold)
{
MOZ_ASSERT(GetTaskQueue()->IsCurrentThreadIn());
MOZ_ASSERT(HasVideo());
int64_t threshold = sInvalidTimestampUs;
if (aSkipToNextKeyframe && IsValidTimestampUs(aTimeThreshold)) {
mVideoTrack.mTaskQueue->Flush();
threshold = aTimeThreshold;
}
if (CheckVideoResources()) {
DispatchVideoTask(threshold);
}
}
bool
MediaCodecReader::DecodeAudioDataSync()
{
if (mAudioTrack.mCodec == nullptr || !mAudioTrack.mCodec->allocated() ||
mAudioTrack.mOutputEndOfStream) {
return false; return false;
} }
@ -234,14 +289,22 @@ MediaCodecReader::DecodeAudioData()
status_t status; status_t status;
TimeStamp timeout = TimeStamp::Now() + TimeDuration::FromSeconds(sMaxAudioDecodeDurationS); TimeStamp timeout = TimeStamp::Now() + TimeDuration::FromSeconds(sMaxAudioDecodeDurationS);
while (true) { while (true) {
if (timeout < TimeStamp::Now()) { // Try to fill more input buffers and then get one output buffer.
return true; // Try it again later. // FIXME: use callback from MediaCodec
} FillCodecInputData(mAudioTrack);
status = GetCodecOutputData(mAudioTrack, bufferInfo, sInvalidTimestampUs, timeout); status = GetCodecOutputData(mAudioTrack, bufferInfo, sInvalidTimestampUs, timeout);
if (status == OK || status == ERROR_END_OF_STREAM) { if (status == OK || status == ERROR_END_OF_STREAM) {
break; break;
} else if (status == -EAGAIN) { } else if (status == -EAGAIN) {
return true; // Try it again later. if (TimeStamp::Now() > timeout) {
// Don't let this loop run for too long. Try it again later.
if (CheckAudioResources()) {
DispatchAudioTask();
}
return true;
}
continue; // Try it again now.
} else if (status == INFO_FORMAT_CHANGED) { } else if (status == INFO_FORMAT_CHANGED) {
if (UpdateAudioInfo()) { if (UpdateAudioInfo()) {
continue; // Try it again now. continue; // Try it again now.
@ -253,8 +316,7 @@ MediaCodecReader::DecodeAudioData()
} }
} }
bool result = true; bool result = false;
if (bufferInfo.mBuffer != nullptr && bufferInfo.mSize > 0 && bufferInfo.mBuffer->data() != nullptr) { if (bufferInfo.mBuffer != nullptr && bufferInfo.mSize > 0 && bufferInfo.mBuffer->data() != nullptr) {
// This is the approximate byte position in the stream. // This is the approximate byte position in the stream.
int64_t pos = mDecoder->GetResource()->Tell(); int64_t pos = mDecoder->GetResource()->Tell();
@ -273,137 +335,52 @@ MediaCodecReader::DecodeAudioData()
mInfo.mAudio.mChannels)); mInfo.mAudio.mChannels));
} }
mAudioTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex); if ((bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_EOS) ||
(status == ERROR_END_OF_STREAM)) {
if (status == ERROR_END_OF_STREAM) { AudioQueue().Finish();
return false;
} }
mAudioTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
return result; return result;
} }
bool bool
MediaCodecReader::DecodeVideoFrame(bool &aKeyframeSkip, int64_t aTimeThreshold) MediaCodecReader::DecodeAudioDataTask()
{ {
MOZ_ASSERT(mDecoder->OnDecodeThread(), "Should be on decode thread."); bool result = DecodeAudioDataSync();
if (AudioQueue().GetSize() > 0) {
if (mVideoTrack.mCodec == nullptr || !mVideoTrack.mCodec->allocated()) { AudioData* a = AudioQueue().PopFront();
return false; if (a) {
} if (mAudioTrack.mDiscontinuity) {
a->mDiscontinuity = true;
int64_t threshold = sInvalidTimestampUs; mAudioTrack.mDiscontinuity = false;
if (aKeyframeSkip && IsValidTimestampUs(aTimeThreshold)) {
threshold = aTimeThreshold;
}
// Get one video output data from MediaCodec
CodecBufferInfo bufferInfo;
status_t status;
TimeStamp timeout = TimeStamp::Now() + TimeDuration::FromSeconds(sMaxVideoDecodeDurationS);
while (true) {
if (timeout < TimeStamp::Now()) {
return true; // Try it again later.
}
status = GetCodecOutputData(mVideoTrack, bufferInfo, threshold, timeout);
if (status == OK || status == ERROR_END_OF_STREAM) {
break;
} else if (status == -EAGAIN) {
return true; // Try it again later.
} else if (status == INFO_FORMAT_CHANGED) {
if (UpdateVideoInfo()) {
continue; // Try it again now.
} else {
return false;
} }
} else { GetCallback()->OnAudioDecoded(a);
return false;
} }
} }
if (AudioQueue().AtEndOfStream()) {
GetCallback()->OnAudioEOS();
}
return result;
}
bool result = true; bool
MediaCodecReader::DecodeVideoFrameTask(int64_t aTimeThreshold)
if (bufferInfo.mBuffer != nullptr && bufferInfo.mSize > 0 && bufferInfo.mBuffer->data() != nullptr) { {
uint8_t *yuv420p_buffer = bufferInfo.mBuffer->data(); bool result = DecodeVideoFrameSync(aTimeThreshold);
int32_t stride = mVideoTrack.mStride; if (VideoQueue().GetSize() > 0) {
int32_t slice_height = mVideoTrack.mSliceHeight; VideoData* v = VideoQueue().PopFront();
if (v) {
// Converts to OMX_COLOR_FormatYUV420Planar if (mVideoTrack.mDiscontinuity) {
if (mVideoTrack.mColorFormat != OMX_COLOR_FormatYUV420Planar) { v->mDiscontinuity = true;
ARect crop; mVideoTrack.mDiscontinuity = false;
crop.top = 0;
crop.bottom = mVideoTrack.mHeight;
crop.left = 0;
crop.right = mVideoTrack.mWidth;
yuv420p_buffer = GetColorConverterBuffer(mVideoTrack.mWidth, mVideoTrack.mHeight);
if (mColorConverter.convertDecoderOutputToI420(
bufferInfo.mBuffer->data(), mVideoTrack.mWidth, mVideoTrack.mHeight, crop, yuv420p_buffer) != OK) {
mVideoTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
NS_WARNING("Unable to convert color format");
return false;
} }
GetCallback()->OnVideoDecoded(v);
stride = mVideoTrack.mWidth;
slice_height = mVideoTrack.mHeight;
}
size_t yuv420p_y_size = stride * slice_height;
size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
uint8_t *yuv420p_y = yuv420p_buffer;
uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size;
uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size;
// This is the approximate byte position in the stream.
int64_t pos = mDecoder->GetResource()->Tell();
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = yuv420p_y;
b.mPlanes[0].mWidth = mVideoTrack.mWidth;
b.mPlanes[0].mHeight = mVideoTrack.mHeight;
b.mPlanes[0].mStride = stride;
b.mPlanes[0].mOffset = 0;
b.mPlanes[0].mSkip = 0;
b.mPlanes[1].mData = yuv420p_u;
b.mPlanes[1].mWidth = (mVideoTrack.mWidth + 1) / 2;
b.mPlanes[1].mHeight = (mVideoTrack.mHeight + 1) / 2;
b.mPlanes[1].mStride = (stride + 1) / 2;
b.mPlanes[1].mOffset = 0;
b.mPlanes[1].mSkip = 0;
b.mPlanes[2].mData = yuv420p_v;
b.mPlanes[2].mWidth =(mVideoTrack.mWidth + 1) / 2;
b.mPlanes[2].mHeight = (mVideoTrack.mHeight + 1) / 2;
b.mPlanes[2].mStride = (stride + 1) / 2;
b.mPlanes[2].mOffset = 0;
b.mPlanes[2].mSkip = 0;
VideoData *v = VideoData::Create(
mInfo.mVideo,
mDecoder->GetImageContainer(),
pos,
bufferInfo.mTimeUs,
1, // We don't know the duration.
b,
bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_SYNCFRAME,
-1,
mVideoTrack.mRelativePictureRect);
if (v != nullptr) {
result = true;
mVideoQueue.Push(v);
aKeyframeSkip = false;
} else {
NS_WARNING("Unable to create VideoData");
} }
} }
if (VideoQueue().AtEndOfStream()) {
mVideoTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex); GetCallback()->OnVideoEOS();
if (status == ERROR_END_OF_STREAM) {
return false;
} }
return result; return result;
} }
@ -475,6 +452,148 @@ MediaCodecReader::ReadMetadata(MediaInfo* aInfo,
return NS_OK; return NS_OK;
} }
nsresult
MediaCodecReader::ResetDecode()
{
if (CheckAudioResources()) {
mAudioTrack.mTaskQueue->Flush();
FlushCodecData(mAudioTrack);
mAudioTrack.mDiscontinuity = true;
}
if (CheckVideoResources()) {
mVideoTrack.mTaskQueue->Flush();
FlushCodecData(mVideoTrack);
mVideoTrack.mDiscontinuity = true;
}
return MediaDecoderReader::ResetDecode();
}
bool
MediaCodecReader::DecodeVideoFrameSync(int64_t aTimeThreshold)
{
if (mVideoTrack.mCodec == nullptr || !mVideoTrack.mCodec->allocated() ||
mVideoTrack.mOutputEndOfStream) {
return false;
}
// Get one video output data from MediaCodec
CodecBufferInfo bufferInfo;
status_t status;
TimeStamp timeout = TimeStamp::Now() + TimeDuration::FromSeconds(sMaxVideoDecodeDurationS);
while (true) {
// Try to fill more input buffers and then get one output buffer.
// FIXME: use callback from MediaCodec
FillCodecInputData(mVideoTrack);
status = GetCodecOutputData(mVideoTrack, bufferInfo, aTimeThreshold, timeout);
if (status == OK || status == ERROR_END_OF_STREAM) {
break;
} else if (status == -EAGAIN) {
if (TimeStamp::Now() > timeout) {
// Don't let this loop run for too long. Try it again later.
if (CheckVideoResources()) {
DispatchVideoTask(aTimeThreshold);
}
return true;
}
continue; // Try it again now.
} else if (status == INFO_FORMAT_CHANGED) {
if (UpdateVideoInfo()) {
continue; // Try it again now.
} else {
return false;
}
} else {
return false;
}
}
bool result = false;
if (bufferInfo.mBuffer != nullptr && bufferInfo.mSize > 0 && bufferInfo.mBuffer->data() != nullptr) {
uint8_t *yuv420p_buffer = bufferInfo.mBuffer->data();
int32_t stride = mVideoTrack.mStride;
int32_t slice_height = mVideoTrack.mSliceHeight;
// Converts to OMX_COLOR_FormatYUV420Planar
if (mVideoTrack.mColorFormat != OMX_COLOR_FormatYUV420Planar) {
ARect crop;
crop.top = 0;
crop.bottom = mVideoTrack.mHeight;
crop.left = 0;
crop.right = mVideoTrack.mWidth;
yuv420p_buffer = GetColorConverterBuffer(mVideoTrack.mWidth, mVideoTrack.mHeight);
if (mColorConverter.convertDecoderOutputToI420(
bufferInfo.mBuffer->data(), mVideoTrack.mWidth, mVideoTrack.mHeight, crop, yuv420p_buffer) != OK) {
mVideoTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
NS_WARNING("Unable to convert color format");
return false;
}
stride = mVideoTrack.mWidth;
slice_height = mVideoTrack.mHeight;
}
size_t yuv420p_y_size = stride * slice_height;
size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
uint8_t *yuv420p_y = yuv420p_buffer;
uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size;
uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size;
// This is the approximate byte position in the stream.
int64_t pos = mDecoder->GetResource()->Tell();
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = yuv420p_y;
b.mPlanes[0].mWidth = mVideoTrack.mWidth;
b.mPlanes[0].mHeight = mVideoTrack.mHeight;
b.mPlanes[0].mStride = stride;
b.mPlanes[0].mOffset = 0;
b.mPlanes[0].mSkip = 0;
b.mPlanes[1].mData = yuv420p_u;
b.mPlanes[1].mWidth = (mVideoTrack.mWidth + 1) / 2;
b.mPlanes[1].mHeight = (mVideoTrack.mHeight + 1) / 2;
b.mPlanes[1].mStride = (stride + 1) / 2;
b.mPlanes[1].mOffset = 0;
b.mPlanes[1].mSkip = 0;
b.mPlanes[2].mData = yuv420p_v;
b.mPlanes[2].mWidth =(mVideoTrack.mWidth + 1) / 2;
b.mPlanes[2].mHeight = (mVideoTrack.mHeight + 1) / 2;
b.mPlanes[2].mStride = (stride + 1) / 2;
b.mPlanes[2].mOffset = 0;
b.mPlanes[2].mSkip = 0;
VideoData *v = VideoData::Create(
mInfo.mVideo,
mDecoder->GetImageContainer(),
pos,
bufferInfo.mTimeUs,
1, // We don't know the duration.
b,
bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_SYNCFRAME,
-1,
mVideoTrack.mRelativePictureRect);
if (v != nullptr) {
result = true;
VideoQueue().Push(v);
} else {
NS_WARNING("Unable to create VideoData");
}
}
if ((bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_EOS) ||
(status == ERROR_END_OF_STREAM)) {
VideoQueue().Finish();
}
mVideoTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
return result;
}
nsresult nsresult
MediaCodecReader::Seek(int64_t aTime, MediaCodecReader::Seek(int64_t aTime,
int64_t aStartTime, int64_t aStartTime,
@ -483,43 +602,53 @@ MediaCodecReader::Seek(int64_t aTime,
{ {
MOZ_ASSERT(mDecoder->OnDecodeThread(), "Should be on decode thread."); MOZ_ASSERT(mDecoder->OnDecodeThread(), "Should be on decode thread.");
VideoFrameContainer* videoframe = mDecoder->GetVideoFrameContainer();
if (videoframe != nullptr) {
mozilla::layers::ImageContainer *image = videoframe->GetImageContainer();
if (image != nullptr) {
image->ClearAllImagesExceptFront();
}
}
mAudioTrack.mInputEndOfStream = false;
mVideoTrack.mInputEndOfStream = false;
mAudioTrack.mSeekTimeUs = aTime;
mVideoTrack.mSeekTimeUs = aTime; mVideoTrack.mSeekTimeUs = aTime;
mAudioTrack.mSeekTimeUs = aTime;
mVideoTrack.mInputEndOfStream = false;
mVideoTrack.mOutputEndOfStream = false;
mAudioTrack.mInputEndOfStream = false;
mAudioTrack.mOutputEndOfStream = false;
mAudioTrack.mFlushed = false; mAudioTrack.mFlushed = false;
mVideoTrack.mFlushed = false; mVideoTrack.mFlushed = false;
// Regulate the seek time to the closest sync point of video data. if (CheckVideoResources()) {
if (HasVideo() && mVideoTrack.mSource != nullptr) { VideoFrameContainer* videoframe = mDecoder->GetVideoFrameContainer();
MediaBuffer *source_buffer = nullptr; if (videoframe != nullptr) {
mozilla::layers::ImageContainer *image = videoframe->GetImageContainer();
if (image != nullptr) {
image->ClearAllImagesExceptFront();
}
}
MediaBuffer* source_buffer = nullptr;
MediaSource::ReadOptions options; MediaSource::ReadOptions options;
int64_t timestamp = sInvalidTimestampUs;
options.setSeekTo(aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); options.setSeekTo(aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
if (mVideoTrack.mSource->read(&source_buffer, &options) != OK || source_buffer == nullptr) { if (mVideoTrack.mSource->read(&source_buffer, &options) != OK ||
source_buffer == nullptr) {
return NS_ERROR_FAILURE; return NS_ERROR_FAILURE;
} }
sp<MetaData> format = source_buffer->meta_data(); sp<MetaData> format = source_buffer->meta_data();
if (format != nullptr) { if (format != nullptr) {
int64_t timestamp = sInvalidTimestampUs;
if (format->findInt64(kKeyTime, &timestamp) && IsValidTimestampUs(timestamp)) { if (format->findInt64(kKeyTime, &timestamp) && IsValidTimestampUs(timestamp)) {
mAudioTrack.mSeekTimeUs = timestamp;
mVideoTrack.mSeekTimeUs = timestamp; mVideoTrack.mSeekTimeUs = timestamp;
mAudioTrack.mSeekTimeUs = timestamp;
} }
format = nullptr; format = nullptr;
} }
source_buffer->release(); source_buffer->release();
}
MOZ_ASSERT(mVideoTrack.mTaskQueue->IsEmpty());
DispatchVideoTask(mVideoTrack.mSeekTimeUs);
if (CheckAudioResources()) {
MOZ_ASSERT(mAudioTrack.mTaskQueue->IsEmpty());
DispatchAudioTask();
}
} else if (CheckAudioResources()) {// Audio only
MOZ_ASSERT(mAudioTrack.mTaskQueue->IsEmpty());
DispatchAudioTask();
}
return NS_OK; return NS_OK;
} }
@ -542,7 +671,8 @@ MediaCodecReader::ReallocateResources()
if (CreateLooper() && if (CreateLooper() &&
CreateExtractor() && CreateExtractor() &&
CreateMediaSources() && CreateMediaSources() &&
CreateMediaCodecs()) { CreateMediaCodecs() &&
CreateTaskQueues()) {
return true; return true;
} }
@ -573,6 +703,7 @@ MediaCodecReader::ReleaseResources()
DestroyMediaSources(); DestroyMediaSources();
DestroyExtractor(); DestroyExtractor();
DestroyLooper(); DestroyLooper();
ShutdownTaskQueues();
} }
bool bool
@ -711,6 +842,38 @@ MediaCodecReader::DestroyMediaSources()
mAudioOffloadTrack.mSource = nullptr; mAudioOffloadTrack.mSource = nullptr;
} }
void
MediaCodecReader::ShutdownTaskQueues()
{
if(mAudioTrack.mTaskQueue) {
mAudioTrack.mTaskQueue->Shutdown();
mAudioTrack.mTaskQueue = nullptr;
}
if(mVideoTrack.mTaskQueue) {
mVideoTrack.mTaskQueue->Shutdown();
mVideoTrack.mTaskQueue = nullptr;
}
}
bool
MediaCodecReader::CreateTaskQueues()
{
if (mAudioTrack.mSource != nullptr && mAudioTrack.mCodec != nullptr &&
!mAudioTrack.mTaskQueue) {
mAudioTrack.mTaskQueue = new MediaTaskQueue(
SharedThreadPool::Get(NS_LITERAL_CSTRING("MediaCodecReader Audio"), 1));
NS_ENSURE_TRUE(mAudioTrack.mTaskQueue, false);
}
if (mVideoTrack.mSource != nullptr && mVideoTrack.mCodec != nullptr &&
!mVideoTrack.mTaskQueue) {
mVideoTrack.mTaskQueue = new MediaTaskQueue(
SharedThreadPool::Get(NS_LITERAL_CSTRING("MediaCodecReader Video"), 1));
NS_ENSURE_TRUE(mVideoTrack.mTaskQueue, false);
}
return true;
}
bool bool
MediaCodecReader::CreateMediaCodecs() MediaCodecReader::CreateMediaCodecs()
{ {
@ -1103,29 +1266,25 @@ MediaCodecReader::GetCodecOutputData(Track &aTrack,
{ {
// Read next frame. // Read next frame.
CodecBufferInfo info; CodecBufferInfo info;
// Try to fill more input buffers and then get one output buffer.
// FIXME: use callback from MediaCodec
status_t status = OK; status_t status = OK;
while (status == OK || status == INFO_OUTPUT_BUFFERS_CHANGED || while (status == OK || status == INFO_OUTPUT_BUFFERS_CHANGED ||
status == -EAGAIN || status == ERROR_END_OF_STREAM) { status == -EAGAIN) {
// Try to fill more input buffers and then get one output buffer.
// FIXME: use callback from MediaCodec
status = FillCodecInputData(aTrack);
int64_t duration = (int64_t)(aTimeout - TimeStamp::Now()).ToMicroseconds(); int64_t duration = (int64_t)(aTimeout - TimeStamp::Now()).ToMicroseconds();
if (!IsValidDurationUs(duration)) { if (!IsValidDurationUs(duration)) {
return -EAGAIN; return -EAGAIN;
} }
if (status == OK || status == ERROR_END_OF_STREAM) { status = aTrack.mCodec->dequeueOutputBuffer(
status = aTrack.mCodec->dequeueOutputBuffer( &info.mIndex, &info.mOffset, &info.mSize, &info.mTimeUs, &info.mFlags, duration);
&info.mIndex, &info.mOffset, &info.mSize, &info.mTimeUs, &info.mFlags, duration); // Check EOS first.
if (info.mFlags & MediaCodec::BUFFER_FLAG_EOS) { if (status == ERROR_END_OF_STREAM ||
aBuffer = info; info.mFlags & MediaCodec::BUFFER_FLAG_EOS) {
aBuffer.mBuffer = aTrack.mOutputBuffers[info.mIndex]; aBuffer = info;
return ERROR_END_OF_STREAM; aBuffer.mBuffer = aTrack.mOutputBuffers[info.mIndex];
} aTrack.mOutputEndOfStream = true;
return ERROR_END_OF_STREAM;
} }
if (status == OK) { if (status == OK) {

View File

@ -29,6 +29,8 @@ struct MediaCodec;
namespace mozilla { namespace mozilla {
class MediaTaskQueue;
class MediaCodecReader : public MediaOmxCommonReader class MediaCodecReader : public MediaOmxCommonReader
{ {
public: public:
@ -53,17 +55,15 @@ public:
// irreversible, whereas ReleaseMediaResources() is reversible. // irreversible, whereas ReleaseMediaResources() is reversible.
virtual void Shutdown(); virtual void Shutdown();
// Decodes an unspecified amount of audio data, enqueuing the audio data // Flush the MediaTaskQueue, flush MediaCodec and raise the mDiscontinuity.
// in mAudioQueue. Returns true when there's more audio to decode, virtual nsresult ResetDecode() MOZ_OVERRIDE;
// false if the audio is finished, end of file has been reached,
// or an un-recoverable read error has occured.
virtual bool DecodeAudioData();
// Reads and decodes one video frame. Packets with a timestamp less // Disptach a DecodeVideoFrameTask to decode video data.
// than aTimeThreshold will be decoded (unless they're not keyframes virtual void RequestVideoData(bool aSkipToNextKeyframe,
// and aKeyframeSkip is true), but will not be added to the queue. int64_t aTimeThreshold) MOZ_OVERRIDE;
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold); // Disptach a DecodeAduioDataTask to decode video data.
virtual void RequestAudioData() MOZ_OVERRIDE;
virtual bool HasAudio(); virtual bool HasAudio();
virtual bool HasVideo(); virtual bool HasVideo();
@ -111,9 +111,15 @@ protected:
// playback parameters // playback parameters
CheckedUint32 mInputIndex; CheckedUint32 mInputIndex;
// mDiscontinuity, mFlushed, mInputEndOfStream, mInputEndOfStream,
// mSeekTimeUs don't be protected by a lock because the
// mTaskQueue->Flush() will flush all tasks.
bool mInputEndOfStream; bool mInputEndOfStream;
bool mOutputEndOfStream;
int64_t mSeekTimeUs; int64_t mSeekTimeUs;
bool mFlushed; // meaningless when mSeekTimeUs is invalid. bool mFlushed; // meaningless when mSeekTimeUs is invalid.
bool mDiscontinuity;
nsRefPtr<MediaTaskQueue> mTaskQueue;
}; };
// Receive a message from MessageHandler. // Receive a message from MessageHandler.
@ -230,6 +236,24 @@ private:
void DestroyMediaCodecs(); void DestroyMediaCodecs();
static void DestroyMediaCodecs(Track &aTrack); static void DestroyMediaCodecs(Track &aTrack);
bool CreateTaskQueues();
void ShutdownTaskQueues();
bool DecodeVideoFrameTask(int64_t aTimeThreshold);
bool DecodeVideoFrameSync(int64_t aTimeThreshold);
bool DecodeAudioDataTask();
bool DecodeAudioDataSync();
void DispatchVideoTask(int64_t aTimeThreshold);
void DispatchAudioTask();
inline bool CheckVideoResources() {
return (HasVideo() && mVideoTrack.mSource != nullptr &&
mVideoTrack.mTaskQueue);
}
inline bool CheckAudioResources() {
return (HasAudio() && mAudioTrack.mSource != nullptr &&
mAudioTrack.mTaskQueue);
}
bool UpdateDuration(); bool UpdateDuration();
bool UpdateAudioInfo(); bool UpdateAudioInfo();
bool UpdateVideoInfo(); bool UpdateVideoInfo();