2010-04-01 20:03:07 -07:00
|
|
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
|
|
|
/* vim:set ts=2 sw=2 sts=2 et cindent: */
|
2012-05-21 04:12:37 -07:00
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2010-05-05 19:31:02 -07:00
|
|
|
#include "nsBuiltinDecoder.h"
|
|
|
|
#include "nsBuiltinDecoderReader.h"
|
|
|
|
#include "nsBuiltinDecoderStateMachine.h"
|
2010-04-27 01:53:44 -07:00
|
|
|
#include "VideoUtils.h"
|
2012-08-20 21:06:46 -07:00
|
|
|
#include "ImageContainer.h"
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-01-11 00:23:07 -08:00
|
|
|
#include "mozilla/mozalloc.h"
|
2012-02-29 19:56:43 -08:00
|
|
|
#include "mozilla/StandardInteger.h"
|
2012-01-11 00:23:07 -08:00
|
|
|
|
2010-05-05 19:31:02 -07:00
|
|
|
using namespace mozilla;
|
2010-05-18 20:04:33 -07:00
|
|
|
using mozilla::layers::ImageContainer;
|
|
|
|
using mozilla::layers::PlanarYCbCrImage;
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-01-27 22:36:03 -08:00
|
|
|
// Verify these values are sane. Once we've checked the frame sizes, we then
|
|
|
|
// can do less integer overflow checking.
|
|
|
|
PR_STATIC_ASSERT(MAX_VIDEO_WIDTH < PlanarYCbCrImage::MAX_DIMENSION);
|
|
|
|
PR_STATIC_ASSERT(MAX_VIDEO_HEIGHT < PlanarYCbCrImage::MAX_DIMENSION);
|
|
|
|
PR_STATIC_ASSERT(PlanarYCbCrImage::MAX_DIMENSION < PR_UINT32_MAX / PlanarYCbCrImage::MAX_DIMENSION);
|
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
// Un-comment to enable logging of seek bisections.
|
|
|
|
//#define SEEK_LOGGING
|
|
|
|
|
|
|
|
#ifdef PR_LOGGING
|
2010-04-27 01:53:44 -07:00
|
|
|
extern PRLogModuleInfo* gBuiltinDecoderLog;
|
|
|
|
#define LOG(type, msg) PR_LOG(gBuiltinDecoderLog, type, msg)
|
2010-04-01 20:03:07 -07:00
|
|
|
#ifdef SEEK_LOGGING
|
2010-04-27 01:53:44 -07:00
|
|
|
#define SEEK_LOG(type, msg) PR_LOG(gBuiltinDecoderLog, type, msg)
|
2010-04-01 20:03:07 -07:00
|
|
|
#else
|
|
|
|
#define SEEK_LOG(type, msg)
|
|
|
|
#endif
|
|
|
|
#else
|
|
|
|
#define LOG(type, msg)
|
|
|
|
#define SEEK_LOG(type, msg)
|
|
|
|
#endif
|
|
|
|
|
2012-04-29 20:12:42 -07:00
|
|
|
void
|
|
|
|
AudioData::EnsureAudioBuffer()
|
|
|
|
{
|
|
|
|
if (mAudioBuffer)
|
|
|
|
return;
|
|
|
|
mAudioBuffer = SharedBuffer::Create(mFrames*mChannels*sizeof(AudioDataValue));
|
|
|
|
|
|
|
|
AudioDataValue* data = static_cast<AudioDataValue*>(mAudioBuffer->Data());
|
|
|
|
for (PRUint32 i = 0; i < mFrames; ++i) {
|
|
|
|
for (PRUint32 j = 0; j < mChannels; ++j) {
|
|
|
|
data[j*mFrames + i] = mAudioData[i*mChannels + j];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-09-28 23:19:26 -07:00
|
|
|
static bool
|
2010-05-18 20:04:33 -07:00
|
|
|
ValidatePlane(const VideoData::YCbCrBuffer::Plane& aPlane)
|
|
|
|
{
|
|
|
|
return aPlane.mWidth <= PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aPlane.mHeight <= PlanarYCbCrImage::MAX_DIMENSION &&
|
2011-01-27 22:36:03 -08:00
|
|
|
aPlane.mWidth * aPlane.mHeight < MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
|
2010-05-18 20:04:33 -07:00
|
|
|
aPlane.mStride > 0;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2011-09-28 23:19:26 -07:00
|
|
|
bool
|
2011-01-27 22:36:03 -08:00
|
|
|
nsVideoInfo::ValidateVideoRegion(const nsIntSize& aFrame,
|
|
|
|
const nsIntRect& aPicture,
|
|
|
|
const nsIntSize& aDisplay)
|
|
|
|
{
|
|
|
|
return
|
|
|
|
aFrame.width <= PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aFrame.height <= PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aFrame.width * aFrame.height <= MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
|
|
|
|
aFrame.width * aFrame.height != 0 &&
|
|
|
|
aPicture.width <= PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aPicture.x < PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aPicture.x + aPicture.width < PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aPicture.height <= PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aPicture.y < PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aPicture.y + aPicture.height < PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aPicture.width * aPicture.height <= MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
|
|
|
|
aPicture.width * aPicture.height != 0 &&
|
|
|
|
aDisplay.width <= PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aDisplay.height <= PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aDisplay.width * aDisplay.height <= MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
|
|
|
|
aDisplay.width * aDisplay.height != 0;
|
|
|
|
}
|
|
|
|
|
2012-08-20 21:06:46 -07:00
|
|
|
VideoData:: VideoData(PRInt64 aOffset, PRInt64 aTime, PRInt64 aEndTime, PRInt64 aTimecode)
|
|
|
|
: mOffset(aOffset),
|
|
|
|
mTime(aTime),
|
|
|
|
mEndTime(aEndTime),
|
|
|
|
mTimecode(aTimecode),
|
|
|
|
mDuplicate(true),
|
|
|
|
mKeyframe(false)
|
|
|
|
{
|
|
|
|
MOZ_COUNT_CTOR(VideoData);
|
|
|
|
NS_ASSERTION(aEndTime >= aTime, "Frame must start before it ends.");
|
|
|
|
}
|
|
|
|
|
|
|
|
VideoData::VideoData(PRInt64 aOffset,
|
|
|
|
PRInt64 aTime,
|
|
|
|
PRInt64 aEndTime,
|
|
|
|
bool aKeyframe,
|
|
|
|
PRInt64 aTimecode,
|
|
|
|
nsIntSize aDisplay)
|
|
|
|
: mDisplay(aDisplay),
|
|
|
|
mOffset(aOffset),
|
|
|
|
mTime(aTime),
|
|
|
|
mEndTime(aEndTime),
|
|
|
|
mTimecode(aTimecode),
|
|
|
|
mDuplicate(false),
|
|
|
|
mKeyframe(aKeyframe)
|
|
|
|
{
|
|
|
|
MOZ_COUNT_CTOR(VideoData);
|
|
|
|
NS_ASSERTION(aEndTime >= aTime, "Frame must start before it ends.");
|
|
|
|
}
|
|
|
|
|
|
|
|
VideoData::~VideoData()
|
|
|
|
{
|
|
|
|
MOZ_COUNT_DTOR(VideoData);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-05-18 20:04:33 -07:00
|
|
|
VideoData* VideoData::Create(nsVideoInfo& aInfo,
|
|
|
|
ImageContainer* aContainer,
|
|
|
|
PRInt64 aOffset,
|
2010-04-27 01:53:45 -07:00
|
|
|
PRInt64 aTime,
|
2010-05-30 21:02:00 -07:00
|
|
|
PRInt64 aEndTime,
|
2010-05-18 20:04:33 -07:00
|
|
|
const YCbCrBuffer& aBuffer,
|
2011-09-28 23:19:26 -07:00
|
|
|
bool aKeyframe,
|
2011-06-23 15:08:54 -07:00
|
|
|
PRInt64 aTimecode,
|
|
|
|
nsIntRect aPicture)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2010-05-18 20:04:33 -07:00
|
|
|
if (!aContainer) {
|
2012-04-29 20:12:42 -07:00
|
|
|
// Create a dummy VideoData with no image. This gives us something to
|
|
|
|
// send to media streams if necessary.
|
|
|
|
nsAutoPtr<VideoData> v(new VideoData(aOffset,
|
|
|
|
aTime,
|
|
|
|
aEndTime,
|
|
|
|
aKeyframe,
|
|
|
|
aTimecode,
|
|
|
|
aInfo.mDisplay));
|
|
|
|
return v.forget();
|
2010-05-18 20:04:33 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// The following situation should never happen unless there is a bug
|
|
|
|
// in the decoder
|
|
|
|
if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
|
|
|
|
aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
|
|
|
|
NS_ERROR("C planes with different sizes");
|
2012-07-30 07:20:58 -07:00
|
|
|
return nullptr;
|
2010-05-18 20:04:33 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// The following situations could be triggered by invalid input
|
2011-06-23 15:08:54 -07:00
|
|
|
if (aPicture.width <= 0 || aPicture.height <= 0) {
|
2010-05-18 20:04:33 -07:00
|
|
|
NS_WARNING("Empty picture rect");
|
2012-07-30 07:20:58 -07:00
|
|
|
return nullptr;
|
2010-05-18 20:04:33 -07:00
|
|
|
}
|
|
|
|
if (!ValidatePlane(aBuffer.mPlanes[0]) || !ValidatePlane(aBuffer.mPlanes[1]) ||
|
|
|
|
!ValidatePlane(aBuffer.mPlanes[2])) {
|
|
|
|
NS_WARNING("Invalid plane size");
|
2012-07-30 07:20:58 -07:00
|
|
|
return nullptr;
|
2010-05-18 20:04:33 -07:00
|
|
|
}
|
2011-01-27 22:36:03 -08:00
|
|
|
|
2010-05-18 20:04:33 -07:00
|
|
|
// Ensure the picture size specified in the headers can be extracted out of
|
|
|
|
// the frame we've been supplied without indexing out of bounds.
|
2012-02-22 04:28:06 -08:00
|
|
|
CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
|
|
|
|
CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!xLimit.isValid() || xLimit.value() > aBuffer.mPlanes[0].mStride ||
|
|
|
|
!yLimit.isValid() || yLimit.value() > aBuffer.mPlanes[0].mHeight)
|
2010-05-18 20:04:33 -07:00
|
|
|
{
|
|
|
|
// The specified picture dimensions can't be contained inside the video
|
|
|
|
// frame, we'll stomp memory if we try to copy it. Fail.
|
|
|
|
NS_WARNING("Overflowing picture rect");
|
2012-07-30 07:20:58 -07:00
|
|
|
return nullptr;
|
2010-05-18 20:04:33 -07:00
|
|
|
}
|
|
|
|
|
2011-06-23 15:08:54 -07:00
|
|
|
nsAutoPtr<VideoData> v(new VideoData(aOffset,
|
|
|
|
aTime,
|
|
|
|
aEndTime,
|
|
|
|
aKeyframe,
|
|
|
|
aTimecode,
|
|
|
|
aInfo.mDisplay));
|
2010-05-18 20:04:33 -07:00
|
|
|
// Currently our decoder only knows how to output to PLANAR_YCBCR
|
|
|
|
// format.
|
2012-08-19 19:39:10 -07:00
|
|
|
ImageFormat format = PLANAR_YCBCR;
|
2010-05-18 20:04:33 -07:00
|
|
|
v->mImage = aContainer->CreateImage(&format, 1);
|
|
|
|
if (!v->mImage) {
|
2012-07-30 07:20:58 -07:00
|
|
|
return nullptr;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2012-08-19 19:39:10 -07:00
|
|
|
NS_ASSERTION(v->mImage->GetFormat() == PLANAR_YCBCR,
|
2010-05-18 20:04:33 -07:00
|
|
|
"Wrong format?");
|
|
|
|
PlanarYCbCrImage* videoImage = static_cast<PlanarYCbCrImage*>(v->mImage.get());
|
|
|
|
|
|
|
|
PlanarYCbCrImage::Data data;
|
2012-05-31 17:54:23 -07:00
|
|
|
const YCbCrBuffer::Plane &Y = aBuffer.mPlanes[0];
|
|
|
|
const YCbCrBuffer::Plane &Cb = aBuffer.mPlanes[1];
|
|
|
|
const YCbCrBuffer::Plane &Cr = aBuffer.mPlanes[2];
|
|
|
|
|
|
|
|
data.mYChannel = Y.mData;
|
|
|
|
data.mYSize = gfxIntSize(Y.mWidth, Y.mHeight);
|
|
|
|
data.mYStride = Y.mStride;
|
2012-08-21 02:31:36 -07:00
|
|
|
data.mYOffset = Y.mOffset;
|
|
|
|
data.mYSkip = Y.mSkip;
|
2012-05-31 17:54:23 -07:00
|
|
|
data.mCbChannel = Cb.mData;
|
|
|
|
data.mCrChannel = Cr.mData;
|
|
|
|
data.mCbCrSize = gfxIntSize(Cb.mWidth, Cb.mHeight);
|
|
|
|
data.mCbCrStride = Cb.mStride;
|
2012-08-21 02:31:36 -07:00
|
|
|
data.mCbOffset = Cb.mOffset;
|
|
|
|
data.mCbSkip = Cb.mSkip;
|
|
|
|
data.mCrOffset = Cr.mOffset;
|
|
|
|
data.mCrSkip = Cr.mSkip;
|
2011-06-23 15:08:54 -07:00
|
|
|
data.mPicX = aPicture.x;
|
|
|
|
data.mPicY = aPicture.y;
|
|
|
|
data.mPicSize = gfxIntSize(aPicture.width, aPicture.height);
|
2010-11-02 16:43:29 -07:00
|
|
|
data.mStereoMode = aInfo.mStereoMode;
|
2010-05-18 20:04:33 -07:00
|
|
|
|
2012-08-21 02:31:36 -07:00
|
|
|
videoImage->CopyData(data);
|
2010-04-01 20:03:07 -07:00
|
|
|
return v.forget();
|
|
|
|
}
|
|
|
|
|
2012-08-20 21:06:46 -07:00
|
|
|
void* nsBuiltinDecoderReader::VideoQueueMemoryFunctor::operator()(void* anObject) {
|
|
|
|
const VideoData* v = static_cast<const VideoData*>(anObject);
|
|
|
|
if (!v->mImage) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
NS_ASSERTION(v->mImage->GetFormat() == mozilla::ImageFormat::PLANAR_YCBCR,
|
|
|
|
"Wrong format?");
|
|
|
|
mozilla::layers::PlanarYCbCrImage* vi = static_cast<mozilla::layers::PlanarYCbCrImage*>(v->mImage.get());
|
|
|
|
|
|
|
|
mResult += vi->GetDataSize();
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2010-05-05 19:31:02 -07:00
|
|
|
nsBuiltinDecoderReader::nsBuiltinDecoderReader(nsBuiltinDecoder* aDecoder)
|
2011-07-11 20:39:28 -07:00
|
|
|
: mDecoder(aDecoder)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2010-05-05 19:31:02 -07:00
|
|
|
MOZ_COUNT_CTOR(nsBuiltinDecoderReader);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2010-05-05 19:31:02 -07:00
|
|
|
nsBuiltinDecoderReader::~nsBuiltinDecoderReader()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
ResetDecode();
|
2010-05-05 19:31:02 -07:00
|
|
|
MOZ_COUNT_DTOR(nsBuiltinDecoderReader);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2010-05-05 19:31:02 -07:00
|
|
|
nsresult nsBuiltinDecoderReader::ResetDecode()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
nsresult res = NS_OK;
|
|
|
|
|
|
|
|
mVideoQueue.Reset();
|
|
|
|
mAudioQueue.Reset();
|
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2011-05-08 14:10:28 -07:00
|
|
|
VideoData* nsBuiltinDecoderReader::FindStartTime(PRInt64& aOutStartTime)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_ASSERTION(mDecoder->OnStateMachineThread() || mDecoder->OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
// Extract the start times of the bitstreams in order to calculate
|
|
|
|
// the duration.
|
2012-01-11 00:23:07 -08:00
|
|
|
PRInt64 videoStartTime = INT64_MAX;
|
|
|
|
PRInt64 audioStartTime = INT64_MAX;
|
2012-07-30 07:20:58 -07:00
|
|
|
VideoData* videoData = nullptr;
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
if (HasVideo()) {
|
2010-05-05 19:31:02 -07:00
|
|
|
videoData = DecodeToFirstData(&nsBuiltinDecoderReader::DecodeVideoFrame,
|
2010-04-01 20:03:07 -07:00
|
|
|
mVideoQueue);
|
|
|
|
if (videoData) {
|
|
|
|
videoStartTime = videoData->mTime;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (HasAudio()) {
|
2011-08-15 22:19:51 -07:00
|
|
|
AudioData* audioData = DecodeToFirstData(&nsBuiltinDecoderReader::DecodeAudioData,
|
2010-04-01 20:03:07 -07:00
|
|
|
mAudioQueue);
|
2011-08-15 22:19:51 -07:00
|
|
|
if (audioData) {
|
|
|
|
audioStartTime = audioData->mTime;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-06-02 05:56:50 -07:00
|
|
|
PRInt64 startTime = NS_MIN(videoStartTime, audioStartTime);
|
2012-01-11 00:23:07 -08:00
|
|
|
if (startTime != INT64_MAX) {
|
2010-04-01 20:03:07 -07:00
|
|
|
aOutStartTime = startTime;
|
|
|
|
}
|
|
|
|
|
|
|
|
return videoData;
|
|
|
|
}
|
|
|
|
|
2010-05-05 19:31:02 -07:00
|
|
|
template<class Data>
|
|
|
|
Data* nsBuiltinDecoderReader::DecodeToFirstData(DecodeFn aDecodeFn,
|
|
|
|
MediaQueue<Data>& aQueue)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-09-28 23:19:26 -07:00
|
|
|
bool eof = false;
|
2010-05-05 19:31:02 -07:00
|
|
|
while (!eof && aQueue.GetSize() == 0) {
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
|
2010-05-05 19:31:02 -07:00
|
|
|
if (mDecoder->GetDecodeState() == nsDecoderStateMachine::DECODER_STATE_SHUTDOWN) {
|
2012-07-30 07:20:58 -07:00
|
|
|
return nullptr;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
}
|
2010-05-05 19:31:02 -07:00
|
|
|
eof = !(this->*aDecodeFn)();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2012-07-30 07:20:58 -07:00
|
|
|
Data* d = nullptr;
|
|
|
|
return (d = aQueue.PeekFront()) ? d : nullptr;
|
2010-05-05 19:31:02 -07:00
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2010-08-12 19:28:15 -07:00
|
|
|
nsresult nsBuiltinDecoderReader::DecodeToTarget(PRInt64 aTarget)
|
|
|
|
{
|
|
|
|
// Decode forward to the target frame. Start with video, if we have it.
|
|
|
|
if (HasVideo()) {
|
2011-09-28 23:19:26 -07:00
|
|
|
bool eof = false;
|
2010-08-12 19:28:15 -07:00
|
|
|
PRInt64 startTime = -1;
|
2011-09-29 21:15:24 -07:00
|
|
|
nsAutoPtr<VideoData> video;
|
2010-08-12 19:28:15 -07:00
|
|
|
while (HasVideo() && !eof) {
|
|
|
|
while (mVideoQueue.GetSize() == 0 && !eof) {
|
2011-09-28 23:19:26 -07:00
|
|
|
bool skip = false;
|
2010-08-12 19:28:15 -07:00
|
|
|
eof = !DecodeVideoFrame(skip, 0);
|
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
|
2010-08-12 19:28:15 -07:00
|
|
|
if (mDecoder->GetDecodeState() == nsBuiltinDecoderStateMachine::DECODER_STATE_SHUTDOWN) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (mVideoQueue.GetSize() == 0) {
|
2011-09-29 21:15:24 -07:00
|
|
|
// Hit end of file, we want to display the last frame of the video.
|
|
|
|
if (video) {
|
|
|
|
mVideoQueue.PushFront(video.forget());
|
|
|
|
}
|
2010-08-12 19:28:15 -07:00
|
|
|
break;
|
|
|
|
}
|
2011-09-29 21:15:24 -07:00
|
|
|
video = mVideoQueue.PeekFront();
|
2010-08-12 19:28:15 -07:00
|
|
|
// If the frame end time is less than the seek target, we won't want
|
|
|
|
// to display this frame after the seek, so discard it.
|
2011-01-16 19:03:00 -08:00
|
|
|
if (video && video->mEndTime <= aTarget) {
|
2010-08-12 19:28:15 -07:00
|
|
|
if (startTime == -1) {
|
|
|
|
startTime = video->mTime;
|
|
|
|
}
|
|
|
|
mVideoQueue.PopFront();
|
|
|
|
} else {
|
|
|
|
video.forget();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
|
2010-08-12 19:28:15 -07:00
|
|
|
if (mDecoder->GetDecodeState() == nsBuiltinDecoderStateMachine::DECODER_STATE_SHUTDOWN) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
LOG(PR_LOG_DEBUG, ("First video frame after decode is %lld", startTime));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (HasAudio()) {
|
|
|
|
// Decode audio forward to the seek target.
|
2011-09-28 23:19:26 -07:00
|
|
|
bool eof = false;
|
2010-08-12 19:28:15 -07:00
|
|
|
while (HasAudio() && !eof) {
|
|
|
|
while (!eof && mAudioQueue.GetSize() == 0) {
|
|
|
|
eof = !DecodeAudioData();
|
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
|
2010-08-12 19:28:15 -07:00
|
|
|
if (mDecoder->GetDecodeState() == nsBuiltinDecoderStateMachine::DECODER_STATE_SHUTDOWN) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2011-08-15 22:19:51 -07:00
|
|
|
const AudioData* audio = mAudioQueue.PeekFront();
|
2011-07-08 18:10:40 -07:00
|
|
|
if (!audio)
|
|
|
|
break;
|
2012-02-22 04:28:06 -08:00
|
|
|
CheckedInt64 startFrame = UsecsToFrames(audio->mTime, mInfo.mAudioRate);
|
|
|
|
CheckedInt64 targetFrame = UsecsToFrames(aTarget, mInfo.mAudioRate);
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!startFrame.isValid() || !targetFrame.isValid()) {
|
2011-07-08 18:10:40 -07:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
2012-02-22 04:28:06 -08:00
|
|
|
if (startFrame.value() + audio->mFrames <= targetFrame.value()) {
|
2011-09-26 20:31:18 -07:00
|
|
|
// Our seek target lies after the frames in this AudioData. Pop it
|
2011-07-08 18:10:40 -07:00
|
|
|
// off the queue, and keep decoding forwards.
|
|
|
|
delete mAudioQueue.PopFront();
|
2012-07-30 07:20:58 -07:00
|
|
|
audio = nullptr;
|
2011-07-08 18:10:40 -07:00
|
|
|
continue;
|
|
|
|
}
|
2012-02-22 04:28:06 -08:00
|
|
|
if (startFrame.value() > targetFrame.value()) {
|
2011-07-31 19:45:58 -07:00
|
|
|
// The seek target doesn't lie in the audio block just after the last
|
2011-09-26 20:31:18 -07:00
|
|
|
// audio frames we've seen which were before the seek target. This
|
2011-07-31 19:45:58 -07:00
|
|
|
// could have been the first audio data we've seen after seek, i.e. the
|
|
|
|
// seek terminated after the seek target in the audio stream. Just
|
|
|
|
// abort the audio decode-to-target, the state machine will play
|
|
|
|
// silence to cover the gap. Typically this happens in poorly muxed
|
|
|
|
// files.
|
|
|
|
NS_WARNING("Audio not synced after seek, maybe a poorly muxed file?");
|
|
|
|
break;
|
|
|
|
}
|
2011-07-08 18:10:40 -07:00
|
|
|
|
2011-09-26 20:31:18 -07:00
|
|
|
// The seek target lies somewhere in this AudioData's frames, strip off
|
|
|
|
// any frames which lie before the seek target, so we'll begin playback
|
2011-07-08 18:10:40 -07:00
|
|
|
// exactly at the seek target.
|
2012-02-22 04:28:06 -08:00
|
|
|
NS_ASSERTION(targetFrame.value() >= startFrame.value(),
|
|
|
|
"Target must at or be after data start.");
|
|
|
|
NS_ASSERTION(targetFrame.value() < startFrame.value() + audio->mFrames,
|
|
|
|
"Data must end after target.");
|
2011-07-08 18:10:40 -07:00
|
|
|
|
2012-02-22 04:28:06 -08:00
|
|
|
PRInt64 framesToPrune = targetFrame.value() - startFrame.value();
|
2011-09-26 20:31:18 -07:00
|
|
|
if (framesToPrune > audio->mFrames) {
|
|
|
|
// We've messed up somehow. Don't try to trim frames, the |frames|
|
2011-07-08 18:10:40 -07:00
|
|
|
// variable below will overflow.
|
2011-09-26 20:31:18 -07:00
|
|
|
NS_WARNING("Can't prune more frames that we have!");
|
2010-08-12 19:28:15 -07:00
|
|
|
break;
|
|
|
|
}
|
2011-09-26 20:31:18 -07:00
|
|
|
PRUint32 frames = audio->mFrames - static_cast<PRUint32>(framesToPrune);
|
2011-07-08 18:10:40 -07:00
|
|
|
PRUint32 channels = audio->mChannels;
|
2011-09-26 20:31:18 -07:00
|
|
|
nsAutoArrayPtr<AudioDataValue> audioData(new AudioDataValue[frames * channels]);
|
2011-07-08 18:10:40 -07:00
|
|
|
memcpy(audioData.get(),
|
2011-09-26 20:31:18 -07:00
|
|
|
audio->mAudioData.get() + (framesToPrune * channels),
|
|
|
|
frames * channels * sizeof(AudioDataValue));
|
2012-02-22 04:28:06 -08:00
|
|
|
CheckedInt64 duration = FramesToUsecs(frames, mInfo.mAudioRate);
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!duration.isValid()) {
|
2011-07-08 18:10:40 -07:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
2011-08-15 22:19:51 -07:00
|
|
|
nsAutoPtr<AudioData> data(new AudioData(audio->mOffset,
|
2011-07-08 18:10:40 -07:00
|
|
|
aTarget,
|
2012-02-22 04:28:06 -08:00
|
|
|
duration.value(),
|
2011-09-26 20:31:18 -07:00
|
|
|
frames,
|
2011-07-08 18:10:40 -07:00
|
|
|
audioData.forget(),
|
|
|
|
channels));
|
|
|
|
delete mAudioQueue.PopFront();
|
|
|
|
mAudioQueue.PushFront(data.forget());
|
|
|
|
break;
|
2010-08-12 19:28:15 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
|