2010-06-08 16:31:27 -07:00
|
|
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
|
|
|
/* vim:set ts=2 sw=2 sts=2 et cindent: */
|
2012-05-21 04:12:37 -07:00
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
2010-06-08 16:31:27 -07:00
|
|
|
#include "nsError.h"
|
2012-11-14 11:46:40 -08:00
|
|
|
#include "MediaDecoderStateMachine.h"
|
2012-11-19 07:11:21 -08:00
|
|
|
#include "AbstractMediaDecoder.h"
|
2012-02-14 20:35:01 -08:00
|
|
|
#include "MediaResource.h"
|
2012-11-14 11:46:40 -08:00
|
|
|
#include "WebMReader.h"
|
|
|
|
#include "WebMBufferedParser.h"
|
2010-06-08 16:31:27 -07:00
|
|
|
#include "VideoUtils.h"
|
2010-09-13 01:45:50 -07:00
|
|
|
#include "nsTimeRanges.h"
|
2012-10-25 03:09:41 -07:00
|
|
|
#include "VorbisUtils.h"
|
2010-06-08 16:31:27 -07:00
|
|
|
|
2012-03-11 19:19:31 -07:00
|
|
|
#define VPX_DONT_DEFINE_STDINT_TYPES
|
|
|
|
#include "vpx/vp8dx.h"
|
|
|
|
#include "vpx/vpx_decoder.h"
|
|
|
|
|
2012-11-14 11:45:33 -08:00
|
|
|
using mozilla::NesteggPacketHolder;
|
|
|
|
|
|
|
|
template <>
|
|
|
|
class nsAutoRefTraits<NesteggPacketHolder> : public nsPointerRefTraits<NesteggPacketHolder>
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
static void Release(NesteggPacketHolder* aHolder) { delete aHolder; }
|
|
|
|
};
|
|
|
|
|
|
|
|
namespace mozilla {
|
|
|
|
|
|
|
|
using namespace layers;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
// Un-comment to enable logging of seek bisections.
|
|
|
|
//#define SEEK_LOGGING
|
|
|
|
|
|
|
|
#ifdef PR_LOGGING
|
2012-11-14 11:46:40 -08:00
|
|
|
extern PRLogModuleInfo* gMediaDecoderLog;
|
2012-12-17 14:26:55 -08:00
|
|
|
PRLogModuleInfo* gNesteggLog;
|
2012-11-14 11:46:40 -08:00
|
|
|
#define LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg)
|
2010-06-08 16:31:27 -07:00
|
|
|
#ifdef SEEK_LOGGING
|
2012-11-14 11:46:40 -08:00
|
|
|
#define SEEK_LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg)
|
2010-06-08 16:31:27 -07:00
|
|
|
#else
|
|
|
|
#define SEEK_LOG(type, msg)
|
|
|
|
#endif
|
|
|
|
#else
|
|
|
|
#define LOG(type, msg)
|
|
|
|
#define SEEK_LOG(type, msg)
|
|
|
|
#endif
|
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
static const unsigned NS_PER_USEC = 1000;
|
2011-01-16 19:03:00 -08:00
|
|
|
static const double NS_PER_S = 1e9;
|
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// If a seek request is within SEEK_DECODE_MARGIN microseconds of the
|
2011-01-16 19:03:00 -08:00
|
|
|
// current time, decode ahead from the current frame rather than performing
|
|
|
|
// a full seek.
|
2011-04-13 15:12:23 -07:00
|
|
|
static const int SEEK_DECODE_MARGIN = 250000;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
2012-02-14 20:35:01 -08:00
|
|
|
// Functions for reading and seeking using MediaResource required for
|
2010-06-08 16:31:27 -07:00
|
|
|
// nestegg_io. The 'user data' passed to these functions is the
|
2012-02-14 20:35:01 -08:00
|
|
|
// decoder from which the media resource is obtained.
|
2010-06-08 16:31:27 -07:00
|
|
|
static int webm_read(void *aBuffer, size_t aLength, void *aUserData)
|
|
|
|
{
|
2012-11-19 07:11:21 -08:00
|
|
|
NS_ASSERTION(aUserData, "aUserData must point to a valid AbstractMediaDecoder");
|
|
|
|
AbstractMediaDecoder* decoder = reinterpret_cast<AbstractMediaDecoder*>(aUserData);
|
2012-02-14 20:35:01 -08:00
|
|
|
MediaResource* resource = decoder->GetResource();
|
|
|
|
NS_ASSERTION(resource, "Decoder has no media resource");
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
nsresult rv = NS_OK;
|
2011-09-28 23:19:26 -07:00
|
|
|
bool eof = false;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
char *p = static_cast<char *>(aBuffer);
|
|
|
|
while (NS_SUCCEEDED(rv) && aLength > 0) {
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t bytes = 0;
|
2012-02-14 20:35:01 -08:00
|
|
|
rv = resource->Read(p, aLength, &bytes);
|
2010-06-08 16:31:27 -07:00
|
|
|
if (bytes == 0) {
|
2011-09-29 16:34:37 -07:00
|
|
|
eof = true;
|
2010-06-08 16:31:27 -07:00
|
|
|
break;
|
|
|
|
}
|
2010-06-15 14:56:28 -07:00
|
|
|
decoder->NotifyBytesConsumed(bytes);
|
2010-06-08 16:31:27 -07:00
|
|
|
aLength -= bytes;
|
|
|
|
p += bytes;
|
|
|
|
}
|
|
|
|
|
|
|
|
return NS_FAILED(rv) ? -1 : eof ? 0 : 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int webm_seek(int64_t aOffset, int aWhence, void *aUserData)
|
|
|
|
{
|
2012-11-19 07:11:21 -08:00
|
|
|
NS_ASSERTION(aUserData, "aUserData must point to a valid AbstractMediaDecoder");
|
|
|
|
AbstractMediaDecoder* decoder = reinterpret_cast<AbstractMediaDecoder*>(aUserData);
|
2012-02-14 20:35:01 -08:00
|
|
|
MediaResource* resource = decoder->GetResource();
|
|
|
|
NS_ASSERTION(resource, "Decoder has no media resource");
|
|
|
|
nsresult rv = resource->Seek(aWhence, aOffset);
|
2010-06-08 16:31:27 -07:00
|
|
|
return NS_SUCCEEDED(rv) ? 0 : -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int64_t webm_tell(void *aUserData)
|
|
|
|
{
|
2012-11-19 07:11:21 -08:00
|
|
|
NS_ASSERTION(aUserData, "aUserData must point to a valid AbstractMediaDecoder");
|
|
|
|
AbstractMediaDecoder* decoder = reinterpret_cast<AbstractMediaDecoder*>(aUserData);
|
2012-02-14 20:35:01 -08:00
|
|
|
MediaResource* resource = decoder->GetResource();
|
|
|
|
NS_ASSERTION(resource, "Decoder has no media resource");
|
|
|
|
return resource->Tell();
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2012-12-17 14:26:55 -08:00
|
|
|
static void webm_log(nestegg * context,
|
|
|
|
unsigned int severity,
|
|
|
|
char const * format, ...)
|
|
|
|
{
|
|
|
|
#ifdef PR_LOGGING
|
|
|
|
va_list args;
|
|
|
|
char msg[256];
|
|
|
|
const char * sevStr;
|
|
|
|
|
|
|
|
switch(severity) {
|
|
|
|
case NESTEGG_LOG_DEBUG:
|
|
|
|
sevStr = "DBG";
|
|
|
|
break;
|
|
|
|
case NESTEGG_LOG_INFO:
|
|
|
|
sevStr = "INF";
|
|
|
|
break;
|
|
|
|
case NESTEGG_LOG_WARNING:
|
|
|
|
sevStr = "WRN";
|
|
|
|
break;
|
|
|
|
case NESTEGG_LOG_ERROR:
|
|
|
|
sevStr = "ERR";
|
|
|
|
break;
|
|
|
|
case NESTEGG_LOG_CRITICAL:
|
|
|
|
sevStr = "CRT";
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
sevStr = "UNK";
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
va_start(args, format);
|
|
|
|
|
|
|
|
PR_snprintf(msg, sizeof(msg), "%p [Nestegg-%s] ", context, sevStr);
|
|
|
|
PR_vsnprintf(msg+strlen(msg), sizeof(msg)-strlen(msg), format, args);
|
|
|
|
PR_LOG(gNesteggLog, PR_LOG_DEBUG, (msg));
|
|
|
|
|
|
|
|
va_end(args);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2012-11-19 07:11:21 -08:00
|
|
|
WebMReader::WebMReader(AbstractMediaDecoder* aDecoder)
|
2012-12-06 15:27:08 -08:00
|
|
|
#ifdef MOZ_DASH
|
|
|
|
: DASHRepReader(aDecoder),
|
|
|
|
#else
|
2012-11-14 11:46:40 -08:00
|
|
|
: MediaDecoderReader(aDecoder),
|
2012-12-06 15:27:08 -08:00
|
|
|
#endif
|
2012-07-30 07:20:58 -07:00
|
|
|
mContext(nullptr),
|
2010-06-08 16:31:27 -07:00
|
|
|
mPacketCount(0),
|
|
|
|
mChannels(0),
|
|
|
|
mVideoTrack(0),
|
|
|
|
mAudioTrack(0),
|
2011-04-13 15:12:23 -07:00
|
|
|
mAudioStartUsec(-1),
|
2011-09-26 20:31:18 -07:00
|
|
|
mAudioFrames(0),
|
2011-09-29 16:34:37 -07:00
|
|
|
mHasVideo(false),
|
2012-05-23 19:40:09 -07:00
|
|
|
mHasAudio(false)
|
2012-12-06 15:27:08 -08:00
|
|
|
#ifdef MOZ_DASH
|
|
|
|
, mMainReader(nullptr),
|
|
|
|
mSwitchingCluster(-1),
|
|
|
|
mNextReader(nullptr),
|
|
|
|
mSeekToCluster(-1),
|
|
|
|
mCurrentOffset(-1),
|
|
|
|
mPushVideoPacketToNextReader(false),
|
|
|
|
mReachedSwitchAccessPoint(false)
|
|
|
|
#endif
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
2012-11-14 11:46:40 -08:00
|
|
|
MOZ_COUNT_CTOR(WebMReader);
|
2012-12-17 14:26:55 -08:00
|
|
|
#ifdef PR_LOGGING
|
|
|
|
if (!gNesteggLog) {
|
|
|
|
gNesteggLog = PR_NewLogModule("Nestegg");
|
|
|
|
}
|
|
|
|
#endif
|
2012-11-30 16:02:46 -08:00
|
|
|
// Zero these member vars to avoid crashes in VP8 destroy and Vorbis clear
|
|
|
|
// functions when destructor is called before |Init|.
|
|
|
|
memset(&mVP8, 0, sizeof(vpx_codec_ctx_t));
|
|
|
|
memset(&mVorbisBlock, 0, sizeof(vorbis_block));
|
|
|
|
memset(&mVorbisDsp, 0, sizeof(vorbis_dsp_state));
|
|
|
|
memset(&mVorbisInfo, 0, sizeof(vorbis_info));
|
|
|
|
memset(&mVorbisComment, 0, sizeof(vorbis_comment));
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
WebMReader::~WebMReader()
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
|
|
|
Cleanup();
|
|
|
|
|
|
|
|
mVideoPackets.Reset();
|
|
|
|
mAudioPackets.Reset();
|
|
|
|
|
2010-08-22 19:50:43 -07:00
|
|
|
vpx_codec_destroy(&mVP8);
|
|
|
|
|
2010-06-08 16:31:27 -07:00
|
|
|
vorbis_block_clear(&mVorbisBlock);
|
|
|
|
vorbis_dsp_clear(&mVorbisDsp);
|
|
|
|
vorbis_info_clear(&mVorbisInfo);
|
|
|
|
vorbis_comment_clear(&mVorbisComment);
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
MOZ_COUNT_DTOR(WebMReader);
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult WebMReader::Init(MediaDecoderReader* aCloneDonor)
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
2011-12-14 21:40:22 -08:00
|
|
|
if (vpx_codec_dec_init(&mVP8, vpx_codec_vp8_dx(), NULL, 0)) {
|
2010-06-08 16:31:27 -07:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
vorbis_info_init(&mVorbisInfo);
|
|
|
|
vorbis_comment_init(&mVorbisComment);
|
|
|
|
memset(&mVorbisDsp, 0, sizeof(vorbis_dsp_state));
|
|
|
|
memset(&mVorbisBlock, 0, sizeof(vorbis_block));
|
|
|
|
|
2010-09-20 17:49:50 -07:00
|
|
|
if (aCloneDonor) {
|
2012-11-14 11:46:40 -08:00
|
|
|
mBufferedState = static_cast<WebMReader*>(aCloneDonor)->mBufferedState;
|
2010-09-20 17:49:50 -07:00
|
|
|
} else {
|
2012-11-14 11:46:40 -08:00
|
|
|
mBufferedState = new WebMBufferedState;
|
2010-09-20 17:49:50 -07:00
|
|
|
}
|
|
|
|
|
2010-06-08 16:31:27 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult WebMReader::ResetDecode()
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
2011-09-26 20:31:18 -07:00
|
|
|
mAudioFrames = 0;
|
2011-04-13 15:12:23 -07:00
|
|
|
mAudioStartUsec = -1;
|
2010-06-08 16:31:27 -07:00
|
|
|
nsresult res = NS_OK;
|
2012-11-14 11:46:40 -08:00
|
|
|
if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
|
2010-06-08 16:31:27 -07:00
|
|
|
res = NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Ignore failed results from vorbis_synthesis_restart. They
|
|
|
|
// aren't fatal and it fails when ResetDecode is called at a
|
|
|
|
// time when no vorbis data has been read.
|
|
|
|
vorbis_synthesis_restart(&mVorbisDsp);
|
|
|
|
|
|
|
|
mVideoPackets.Reset();
|
|
|
|
mAudioPackets.Reset();
|
|
|
|
|
2012-12-13 11:42:45 -08:00
|
|
|
#ifdef MOZ_DASH
|
|
|
|
LOG(PR_LOG_DEBUG, ("Resetting DASH seek vars"));
|
|
|
|
mSwitchingCluster = -1;
|
|
|
|
mNextReader = nullptr;
|
|
|
|
mSeekToCluster = -1;
|
|
|
|
mCurrentOffset = -1;
|
|
|
|
mPushVideoPacketToNextReader = false;
|
|
|
|
mReachedSwitchAccessPoint = false;
|
|
|
|
#endif
|
|
|
|
|
2010-06-08 16:31:27 -07:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void WebMReader::Cleanup()
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
|
|
|
if (mContext) {
|
|
|
|
nestegg_destroy(mContext);
|
2012-07-30 07:20:58 -07:00
|
|
|
mContext = nullptr;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-28 11:40:07 -08:00
|
|
|
nsresult WebMReader::ReadMetadata(VideoInfo* aInfo,
|
2012-11-08 16:40:08 -08:00
|
|
|
MetadataTags** aTags)
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
2011-07-11 20:39:28 -07:00
|
|
|
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
|
2010-06-08 16:31:27 -07:00
|
|
|
|
2012-09-17 13:45:38 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("Reader [%p] for Decoder [%p]: Reading WebM Metadata: "
|
|
|
|
"init bytes [%d - %d] cues bytes [%d - %d]",
|
|
|
|
this, mDecoder,
|
|
|
|
mInitByteRange.mStart, mInitByteRange.mEnd,
|
|
|
|
mCuesByteRange.mStart, mCuesByteRange.mEnd));
|
2010-06-08 16:31:27 -07:00
|
|
|
nestegg_io io;
|
|
|
|
io.read = webm_read;
|
|
|
|
io.seek = webm_seek;
|
|
|
|
io.tell = webm_tell;
|
2012-11-19 07:11:21 -08:00
|
|
|
io.userdata = mDecoder;
|
2012-12-06 15:27:08 -08:00
|
|
|
#ifdef MOZ_DASH
|
2012-09-17 13:45:38 -07:00
|
|
|
int64_t maxOffset = mInitByteRange.IsNull() ? -1 : mInitByteRange.mEnd;
|
2012-12-06 15:27:08 -08:00
|
|
|
#else
|
|
|
|
int64_t maxOffset = -1;
|
|
|
|
#endif
|
2012-12-17 14:26:55 -08:00
|
|
|
int r = nestegg_init(&mContext, io, &webm_log, maxOffset);
|
2010-06-08 16:31:27 -07:00
|
|
|
if (r == -1) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t duration = 0;
|
|
|
|
r = nestegg_duration(mContext, &duration);
|
|
|
|
if (r == 0) {
|
2011-07-11 20:39:28 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2012-11-19 07:11:21 -08:00
|
|
|
mDecoder->SetMediaDuration(duration / NS_PER_USEC);
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
unsigned int ntracks = 0;
|
|
|
|
r = nestegg_track_count(mContext, &ntracks);
|
|
|
|
if (r == -1) {
|
|
|
|
Cleanup();
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
mInfo.mHasAudio = false;
|
|
|
|
mInfo.mHasVideo = false;
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t track = 0; track < ntracks; ++track) {
|
2010-06-08 16:31:27 -07:00
|
|
|
int id = nestegg_track_codec_id(mContext, track);
|
|
|
|
if (id == -1) {
|
|
|
|
Cleanup();
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
int type = nestegg_track_type(mContext, track);
|
|
|
|
if (!mHasVideo && type == NESTEGG_TRACK_VIDEO) {
|
|
|
|
nestegg_video_params params;
|
|
|
|
r = nestegg_track_video_params(mContext, track, ¶ms);
|
|
|
|
if (r == -1) {
|
|
|
|
Cleanup();
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2011-01-27 22:36:03 -08:00
|
|
|
// Picture region, taking into account cropping, before scaling
|
|
|
|
// to the display size.
|
|
|
|
nsIntRect pictureRect(params.crop_left,
|
2011-06-23 15:08:54 -07:00
|
|
|
params.crop_top,
|
|
|
|
params.width - (params.crop_right + params.crop_left),
|
|
|
|
params.height - (params.crop_bottom + params.crop_top));
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
// If the cropping data appears invalid then use the frame data
|
2011-01-27 22:36:03 -08:00
|
|
|
if (pictureRect.width <= 0 ||
|
|
|
|
pictureRect.height <= 0 ||
|
|
|
|
pictureRect.x < 0 ||
|
|
|
|
pictureRect.y < 0)
|
|
|
|
{
|
|
|
|
pictureRect.x = 0;
|
|
|
|
pictureRect.y = 0;
|
|
|
|
pictureRect.width = params.width;
|
|
|
|
pictureRect.height = params.height;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2011-01-27 22:36:03 -08:00
|
|
|
// Validate the container-reported frame and pictureRect sizes. This ensures
|
|
|
|
// that our video frame creation code doesn't overflow.
|
|
|
|
nsIntSize displaySize(params.display_width, params.display_height);
|
|
|
|
nsIntSize frameSize(params.width, params.height);
|
2012-11-28 11:40:07 -08:00
|
|
|
if (!VideoInfo::ValidateVideoRegion(frameSize, pictureRect, displaySize)) {
|
2011-01-27 22:36:03 -08:00
|
|
|
// Video track's frame sizes will overflow. Ignore the video track.
|
|
|
|
continue;
|
|
|
|
}
|
2011-06-23 15:08:54 -07:00
|
|
|
|
2011-01-27 22:36:03 -08:00
|
|
|
mVideoTrack = track;
|
2011-09-29 16:34:37 -07:00
|
|
|
mHasVideo = true;
|
|
|
|
mInfo.mHasVideo = true;
|
2011-06-23 15:08:54 -07:00
|
|
|
|
2011-01-27 22:36:03 -08:00
|
|
|
mInfo.mDisplay = displaySize;
|
2011-06-23 15:08:54 -07:00
|
|
|
mPicture = pictureRect;
|
|
|
|
mInitialFrame = frameSize;
|
2011-01-27 22:36:03 -08:00
|
|
|
|
2010-11-02 16:43:29 -07:00
|
|
|
switch (params.stereo_mode) {
|
|
|
|
case NESTEGG_VIDEO_MONO:
|
|
|
|
mInfo.mStereoMode = STEREO_MODE_MONO;
|
|
|
|
break;
|
|
|
|
case NESTEGG_VIDEO_STEREO_LEFT_RIGHT:
|
|
|
|
mInfo.mStereoMode = STEREO_MODE_LEFT_RIGHT;
|
|
|
|
break;
|
|
|
|
case NESTEGG_VIDEO_STEREO_BOTTOM_TOP:
|
|
|
|
mInfo.mStereoMode = STEREO_MODE_BOTTOM_TOP;
|
|
|
|
break;
|
|
|
|
case NESTEGG_VIDEO_STEREO_TOP_BOTTOM:
|
|
|
|
mInfo.mStereoMode = STEREO_MODE_TOP_BOTTOM;
|
|
|
|
break;
|
|
|
|
case NESTEGG_VIDEO_STEREO_RIGHT_LEFT:
|
|
|
|
mInfo.mStereoMode = STEREO_MODE_RIGHT_LEFT;
|
|
|
|
break;
|
|
|
|
}
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
else if (!mHasAudio && type == NESTEGG_TRACK_AUDIO) {
|
|
|
|
nestegg_audio_params params;
|
|
|
|
r = nestegg_track_audio_params(mContext, track, ¶ms);
|
|
|
|
if (r == -1) {
|
|
|
|
Cleanup();
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
mAudioTrack = track;
|
2011-09-29 16:34:37 -07:00
|
|
|
mHasAudio = true;
|
|
|
|
mInfo.mHasAudio = true;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
// Get the Vorbis header data
|
|
|
|
unsigned int nheaders = 0;
|
|
|
|
r = nestegg_track_codec_data_count(mContext, track, &nheaders);
|
|
|
|
if (r == -1 || nheaders != 3) {
|
|
|
|
Cleanup();
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t header = 0; header < nheaders; ++header) {
|
2010-06-08 16:31:27 -07:00
|
|
|
unsigned char* data = 0;
|
|
|
|
size_t length = 0;
|
|
|
|
|
|
|
|
r = nestegg_track_codec_data(mContext, track, header, &data, &length);
|
|
|
|
if (r == -1) {
|
|
|
|
Cleanup();
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
ogg_packet opacket = InitOggPacket(data, length, header == 0, false, 0);
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
r = vorbis_synthesis_headerin(&mVorbisInfo,
|
|
|
|
&mVorbisComment,
|
|
|
|
&opacket);
|
2010-10-16 12:57:45 -07:00
|
|
|
if (r != 0) {
|
2010-06-08 16:31:27 -07:00
|
|
|
Cleanup();
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
r = vorbis_synthesis_init(&mVorbisDsp, &mVorbisInfo);
|
2010-10-16 12:57:45 -07:00
|
|
|
if (r != 0) {
|
2010-06-08 16:31:27 -07:00
|
|
|
Cleanup();
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
r = vorbis_block_init(&mVorbisDsp, &mVorbisBlock);
|
2010-10-16 12:57:45 -07:00
|
|
|
if (r != 0) {
|
2010-06-08 16:31:27 -07:00
|
|
|
Cleanup();
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
mInfo.mAudioRate = mVorbisDsp.vi->rate;
|
|
|
|
mInfo.mAudioChannels = mVorbisDsp.vi->channels;
|
|
|
|
mChannels = mInfo.mAudioChannels;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-12-06 15:27:08 -08:00
|
|
|
#ifdef MOZ_DASH
|
2012-09-17 13:45:38 -07:00
|
|
|
// Byte range for cues has been specified; load them.
|
|
|
|
if (!mCuesByteRange.IsNull()) {
|
|
|
|
maxOffset = mCuesByteRange.mEnd;
|
|
|
|
|
|
|
|
// Iterate through cluster ranges until nestegg returns the last one
|
|
|
|
NS_ENSURE_TRUE(mClusterByteRanges.IsEmpty(),
|
|
|
|
NS_ERROR_ALREADY_INITIALIZED);
|
|
|
|
int clusterNum = 0;
|
|
|
|
bool done = false;
|
2012-12-13 11:42:45 -08:00
|
|
|
uint64_t timestamp;
|
2012-09-17 13:45:38 -07:00
|
|
|
do {
|
|
|
|
mClusterByteRanges.AppendElement();
|
|
|
|
r = nestegg_get_cue_point(mContext, clusterNum, maxOffset,
|
|
|
|
&(mClusterByteRanges[clusterNum].mStart),
|
2012-12-13 11:42:45 -08:00
|
|
|
&(mClusterByteRanges[clusterNum].mEnd),
|
|
|
|
×tamp);
|
2012-09-17 13:45:38 -07:00
|
|
|
if (r != 0) {
|
|
|
|
Cleanup();
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
LOG(PR_LOG_DEBUG, ("Reader [%p] for Decoder [%p]: Cluster [%d]: "
|
2012-12-13 11:42:45 -08:00
|
|
|
"start [%lld] end [%lld], timestamp [%.2llfs]",
|
2012-09-17 13:45:38 -07:00
|
|
|
this, mDecoder, clusterNum,
|
|
|
|
mClusterByteRanges[clusterNum].mStart,
|
2012-12-13 11:42:45 -08:00
|
|
|
mClusterByteRanges[clusterNum].mEnd,
|
|
|
|
timestamp/NS_PER_S));
|
|
|
|
mClusterByteRanges[clusterNum].mStartTime = timestamp/NS_PER_USEC;
|
2012-09-17 13:45:38 -07:00
|
|
|
// Last cluster will have '-1' as end value
|
|
|
|
if (mClusterByteRanges[clusterNum].mEnd == -1) {
|
|
|
|
mClusterByteRanges[clusterNum].mEnd = (mCuesByteRange.mStart-1);
|
|
|
|
done = true;
|
|
|
|
} else {
|
|
|
|
clusterNum++;
|
|
|
|
}
|
|
|
|
} while (!done);
|
|
|
|
}
|
2012-12-06 15:27:08 -08:00
|
|
|
#endif
|
2012-09-17 13:45:38 -07:00
|
|
|
|
2012-12-07 05:30:03 -08:00
|
|
|
// We can't seek in buffered regions if we have no cues.
|
Backout b3a8618f901c (bug 829042), 34a9ef8f929d (bug 822933), 4c1215cefbab (bug 826349), 70bb7f775178 (bug 825325), e9c8447fb197 (bug 828713), eb6ebf01eafe (bug 828901), f1f3ef647920 (bug 825329), f9d7b5722d4f (bug 825329), 5add564d4546 (bug 819377), 55e93d1fa972 (bug 804875), f14639a3461e (bug 804875), 23456fc21052 (bug 814308) for Windows pgo-only mochitest-1 media test timeouts on a CLOSED TREE
2013-01-16 07:16:23 -08:00
|
|
|
bool haveCues;
|
|
|
|
int64_t dummy = -1;
|
|
|
|
haveCues = nestegg_get_cue_point(mContext, 0, -1, &dummy, &dummy,
|
|
|
|
(uint64_t*)&dummy) == 0;
|
|
|
|
mDecoder->SetMediaSeekable(haveCues);
|
2012-12-07 05:30:03 -08:00
|
|
|
|
2011-03-23 20:53:03 -07:00
|
|
|
*aInfo = mInfo;
|
|
|
|
|
2012-07-30 17:14:29 -07:00
|
|
|
*aTags = nullptr;
|
|
|
|
|
2012-12-06 15:27:08 -08:00
|
|
|
#ifdef MOZ_DASH
|
2012-09-17 13:45:38 -07:00
|
|
|
mDecoder->OnReadMetadataCompleted();
|
2012-12-06 15:27:08 -08:00
|
|
|
#endif
|
2012-09-17 13:45:38 -07:00
|
|
|
|
2010-06-08 16:31:27 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
ogg_packet WebMReader::InitOggPacket(unsigned char* aData,
|
2010-06-08 16:31:27 -07:00
|
|
|
size_t aLength,
|
2011-09-28 23:19:26 -07:00
|
|
|
bool aBOS,
|
|
|
|
bool aEOS,
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t aGranulepos)
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
|
|
|
ogg_packet packet;
|
|
|
|
packet.packet = aData;
|
|
|
|
packet.bytes = aLength;
|
|
|
|
packet.b_o_s = aBOS;
|
|
|
|
packet.e_o_s = aEOS;
|
|
|
|
packet.granulepos = aGranulepos;
|
|
|
|
packet.packetno = mPacketCount++;
|
|
|
|
return packet;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool WebMReader::DecodeAudioPacket(nestegg_packet* aPacket, int64_t aOffset)
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
2011-07-11 20:39:28 -07:00
|
|
|
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
int r = 0;
|
|
|
|
unsigned int count = 0;
|
|
|
|
r = nestegg_packet_count(aPacket, &count);
|
|
|
|
if (r == -1) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t tstamp = 0;
|
|
|
|
r = nestegg_packet_tstamp(aPacket, &tstamp);
|
|
|
|
if (r == -1) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
const uint32_t rate = mVorbisDsp.vi->rate;
|
|
|
|
uint64_t tstamp_usecs = tstamp / NS_PER_USEC;
|
2011-04-13 15:12:23 -07:00
|
|
|
if (mAudioStartUsec == -1) {
|
2010-08-12 19:28:15 -07:00
|
|
|
// This is the first audio chunk. Assume the start time of our decode
|
|
|
|
// is the start of this chunk.
|
2011-04-13 15:12:23 -07:00
|
|
|
mAudioStartUsec = tstamp_usecs;
|
2010-08-12 19:28:15 -07:00
|
|
|
}
|
2011-08-15 22:19:51 -07:00
|
|
|
// If there's a gap between the start of this audio chunk and the end of
|
|
|
|
// the previous audio chunk, we need to increment the packet count so that
|
2010-08-12 19:28:15 -07:00
|
|
|
// the vorbis decode doesn't use data from before the gap to help decode
|
|
|
|
// from after the gap.
|
2012-02-22 04:28:06 -08:00
|
|
|
CheckedInt64 tstamp_frames = UsecsToFrames(tstamp_usecs, rate);
|
|
|
|
CheckedInt64 decoded_frames = UsecsToFrames(mAudioStartUsec, rate);
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!tstamp_frames.isValid() || !decoded_frames.isValid()) {
|
2012-02-22 04:28:06 -08:00
|
|
|
NS_WARNING("Int overflow converting WebM times to frames");
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-08-12 19:28:15 -07:00
|
|
|
}
|
2012-02-22 04:28:06 -08:00
|
|
|
decoded_frames += mAudioFrames;
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!decoded_frames.isValid()) {
|
2011-09-26 20:31:18 -07:00
|
|
|
NS_WARNING("Int overflow adding decoded_frames");
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-08-12 19:28:15 -07:00
|
|
|
}
|
2012-02-22 04:28:06 -08:00
|
|
|
if (tstamp_frames.value() > decoded_frames.value()) {
|
2010-08-12 19:28:15 -07:00
|
|
|
#ifdef DEBUG
|
2012-02-22 04:28:06 -08:00
|
|
|
CheckedInt64 usecs = FramesToUsecs(tstamp_frames.value() - decoded_frames.value(), rate);
|
2011-09-26 20:31:18 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("WebMReader detected gap of %lld, %lld frames, in audio stream\n",
|
2012-05-14 12:50:20 -07:00
|
|
|
usecs.isValid() ? usecs.value() : -1,
|
2012-02-22 04:28:06 -08:00
|
|
|
tstamp_frames.value() - decoded_frames.value()));
|
2010-08-12 19:28:15 -07:00
|
|
|
#endif
|
|
|
|
mPacketCount++;
|
2011-04-13 15:12:23 -07:00
|
|
|
mAudioStartUsec = tstamp_usecs;
|
2011-09-26 20:31:18 -07:00
|
|
|
mAudioFrames = 0;
|
2010-08-12 19:28:15 -07:00
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
int32_t total_frames = 0;
|
|
|
|
for (uint32_t i = 0; i < count; ++i) {
|
2010-06-08 16:31:27 -07:00
|
|
|
unsigned char* data;
|
|
|
|
size_t length;
|
|
|
|
r = nestegg_packet_data(aPacket, i, &data, &length);
|
|
|
|
if (r == -1) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
ogg_packet opacket = InitOggPacket(data, length, false, false, -1);
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
if (vorbis_synthesis(&mVorbisBlock, &opacket) != 0) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (vorbis_synthesis_blockin(&mVorbisDsp,
|
|
|
|
&mVorbisBlock) != 0) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2010-10-18 19:55:45 -07:00
|
|
|
VorbisPCMValue** pcm = 0;
|
2012-08-22 08:56:38 -07:00
|
|
|
int32_t frames = 0;
|
2011-09-26 20:31:18 -07:00
|
|
|
while ((frames = vorbis_synthesis_pcmout(&mVorbisDsp, &pcm)) > 0) {
|
|
|
|
nsAutoArrayPtr<AudioDataValue> buffer(new AudioDataValue[frames * mChannels]);
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t j = 0; j < mChannels; ++j) {
|
2010-10-18 19:55:45 -07:00
|
|
|
VorbisPCMValue* channel = pcm[j];
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < uint32_t(frames); ++i) {
|
2010-10-18 19:55:45 -07:00
|
|
|
buffer[i*mChannels + j] = MOZ_CONVERT_VORBIS_SAMPLE(channel[i]);
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
2010-08-12 19:28:15 -07:00
|
|
|
}
|
2010-06-08 16:31:27 -07:00
|
|
|
|
2012-02-22 04:28:06 -08:00
|
|
|
CheckedInt64 duration = FramesToUsecs(frames, rate);
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!duration.isValid()) {
|
2010-08-12 19:28:15 -07:00
|
|
|
NS_WARNING("Int overflow converting WebM audio duration");
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-08-12 19:28:15 -07:00
|
|
|
}
|
2012-02-22 04:28:06 -08:00
|
|
|
CheckedInt64 total_duration = FramesToUsecs(total_frames, rate);
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!total_duration.isValid()) {
|
2010-08-12 19:28:15 -07:00
|
|
|
NS_WARNING("Int overflow converting WebM audio total_duration");
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
2010-08-12 19:28:15 -07:00
|
|
|
|
2012-02-22 04:28:06 -08:00
|
|
|
CheckedInt64 time = total_duration + tstamp_usecs;
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!time.isValid()) {
|
2012-02-22 04:28:06 -08:00
|
|
|
NS_WARNING("Int overflow adding total_duration and tstamp_usecs");
|
|
|
|
nestegg_free_packet(aPacket);
|
2012-04-04 02:15:10 -07:00
|
|
|
return false;
|
2012-02-22 04:28:06 -08:00
|
|
|
};
|
|
|
|
|
2011-09-26 20:31:18 -07:00
|
|
|
total_frames += frames;
|
2012-12-06 15:27:08 -08:00
|
|
|
AudioQueue().Push(new AudioData(aOffset,
|
2012-02-22 04:28:06 -08:00
|
|
|
time.value(),
|
|
|
|
duration.value(),
|
2011-09-26 20:31:18 -07:00
|
|
|
frames,
|
2011-07-28 18:54:21 -07:00
|
|
|
buffer.forget(),
|
|
|
|
mChannels));
|
2011-09-26 20:31:18 -07:00
|
|
|
mAudioFrames += frames;
|
|
|
|
if (vorbis_synthesis_read(&mVorbisDsp, frames) != 0) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
return true;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsReturnRef<NesteggPacketHolder> WebMReader::NextPacket(TrackType aTrackType)
|
2012-12-06 15:27:08 -08:00
|
|
|
#ifdef MOZ_DASH
|
|
|
|
{
|
|
|
|
nsAutoRef<NesteggPacketHolder> holder;
|
|
|
|
// Get packet from next reader if we're at a switching point; most likely we
|
|
|
|
// did not download the next packet for this reader's stream, so we have to
|
|
|
|
// get it from the next one. Note: Switch to next reader only for video;
|
|
|
|
// audio switching is not supported in the DASH-WebM On Demand profile.
|
|
|
|
if (aTrackType == VIDEO &&
|
|
|
|
(uint32_t)mSwitchingCluster < mClusterByteRanges.Length() &&
|
|
|
|
mCurrentOffset == mClusterByteRanges[mSwitchingCluster].mStart) {
|
|
|
|
|
|
|
|
if (mVideoPackets.GetSize() > 0) {
|
|
|
|
holder = NextPacketInternal(VIDEO);
|
|
|
|
LOG(PR_LOG_DEBUG,
|
|
|
|
("WebMReader[%p] got packet from mVideoPackets @[%lld]",
|
|
|
|
this, holder->mOffset));
|
|
|
|
} else {
|
|
|
|
mReachedSwitchAccessPoint = true;
|
|
|
|
NS_ASSERTION(mNextReader,
|
|
|
|
"Stream switch has been requested but mNextReader is null");
|
|
|
|
holder = mNextReader->NextPacket(aTrackType);
|
|
|
|
mPushVideoPacketToNextReader = true;
|
|
|
|
// Reset for possible future switches.
|
|
|
|
mSwitchingCluster = -1;
|
|
|
|
LOG(PR_LOG_DEBUG,
|
|
|
|
("WebMReader[%p] got packet from mNextReader[%p] @[%lld]",
|
|
|
|
this, mNextReader.get(), (holder ? holder->mOffset : 0)));
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
holder = NextPacketInternal(aTrackType);
|
|
|
|
if (holder) {
|
|
|
|
mCurrentOffset = holder->mOffset;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return holder.out();
|
|
|
|
}
|
|
|
|
|
|
|
|
nsReturnRef<NesteggPacketHolder>
|
|
|
|
WebMReader::NextPacketInternal(TrackType aTrackType)
|
|
|
|
#endif
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
|
|
|
// The packet queue that packets will be pushed on if they
|
|
|
|
// are not the type we are interested in.
|
2012-11-23 11:47:45 -08:00
|
|
|
WebMPacketQueue& otherPackets =
|
2010-06-08 16:31:27 -07:00
|
|
|
aTrackType == VIDEO ? mAudioPackets : mVideoPackets;
|
|
|
|
|
|
|
|
// The packet queue for the type that we are interested in.
|
2012-11-23 11:47:45 -08:00
|
|
|
WebMPacketQueue &packets =
|
2010-06-08 16:31:27 -07:00
|
|
|
aTrackType == VIDEO ? mVideoPackets : mAudioPackets;
|
|
|
|
|
|
|
|
// Flag to indicate that we do need to playback these types of
|
|
|
|
// packets.
|
2011-09-28 23:19:26 -07:00
|
|
|
bool hasType = aTrackType == VIDEO ? mHasVideo : mHasAudio;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
// Flag to indicate that we do need to playback the other type
|
|
|
|
// of track.
|
2011-09-28 23:19:26 -07:00
|
|
|
bool hasOtherType = aTrackType == VIDEO ? mHasAudio : mHasVideo;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
// Track we are interested in
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t ourTrack = aTrackType == VIDEO ? mVideoTrack : mAudioTrack;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
// Value of other track
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t otherTrack = aTrackType == VIDEO ? mAudioTrack : mVideoTrack;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
2010-11-28 12:06:38 -08:00
|
|
|
nsAutoRef<NesteggPacketHolder> holder;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
if (packets.GetSize() > 0) {
|
2010-11-28 12:06:38 -08:00
|
|
|
holder.own(packets.PopFront());
|
2010-10-13 18:50:20 -07:00
|
|
|
} else {
|
2010-06-08 16:31:27 -07:00
|
|
|
// Keep reading packets until we find a packet
|
|
|
|
// for the track we want.
|
|
|
|
do {
|
2010-11-28 12:06:38 -08:00
|
|
|
nestegg_packet* packet;
|
|
|
|
int r = nestegg_read_packet(mContext, &packet);
|
2010-06-08 16:31:27 -07:00
|
|
|
if (r <= 0) {
|
2010-11-28 12:06:38 -08:00
|
|
|
return nsReturnRef<NesteggPacketHolder>();
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t offset = mDecoder->GetResource()->Tell();
|
2010-11-28 12:06:38 -08:00
|
|
|
holder.own(new NesteggPacketHolder(packet, offset));
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
unsigned int track = 0;
|
|
|
|
r = nestegg_packet_track(packet, &track);
|
|
|
|
if (r == -1) {
|
2010-11-28 12:06:38 -08:00
|
|
|
return nsReturnRef<NesteggPacketHolder>();
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (hasOtherType && otherTrack == track) {
|
|
|
|
// Save the packet for when we want these packets
|
2010-11-28 12:06:38 -08:00
|
|
|
otherPackets.Push(holder.disown());
|
2010-06-08 16:31:27 -07:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The packet is for the track we want to play
|
|
|
|
if (hasType && ourTrack == track) {
|
|
|
|
break;
|
|
|
|
}
|
2011-09-29 16:34:37 -07:00
|
|
|
} while (true);
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2010-11-28 12:06:38 -08:00
|
|
|
return holder.out();
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool WebMReader::DecodeAudioData()
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
2011-07-11 20:39:28 -07:00
|
|
|
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
|
|
|
|
|
2010-11-28 12:06:38 -08:00
|
|
|
nsAutoRef<NesteggPacketHolder> holder(NextPacket(AUDIO));
|
|
|
|
if (!holder) {
|
2012-12-06 15:27:08 -08:00
|
|
|
AudioQueue().Finish();
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2010-11-28 12:06:38 -08:00
|
|
|
return DecodeAudioPacket(holder->mPacket, holder->mOffset);
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool WebMReader::DecodeVideoFrame(bool &aKeyframeSkip,
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t aTimeThreshold)
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
2011-07-11 20:39:28 -07:00
|
|
|
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
|
2010-06-08 16:31:27 -07:00
|
|
|
|
2011-03-23 15:28:57 -07:00
|
|
|
// Record number of frames decoded and parsed. Automatically update the
|
|
|
|
// stats counters using the AutoNotifyDecoded stack-based class.
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t parsed = 0, decoded = 0;
|
2012-11-19 07:11:21 -08:00
|
|
|
AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2010-11-28 12:06:38 -08:00
|
|
|
nsAutoRef<NesteggPacketHolder> holder(NextPacket(VIDEO));
|
|
|
|
if (!holder) {
|
2012-12-06 15:27:08 -08:00
|
|
|
VideoQueue().Finish();
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2010-11-28 12:06:38 -08:00
|
|
|
nestegg_packet* packet = holder->mPacket;
|
2010-06-08 16:31:27 -07:00
|
|
|
unsigned int track = 0;
|
2010-10-13 18:50:20 -07:00
|
|
|
int r = nestegg_packet_track(packet, &track);
|
2010-06-08 16:31:27 -07:00
|
|
|
if (r == -1) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
unsigned int count = 0;
|
|
|
|
r = nestegg_packet_count(packet, &count);
|
|
|
|
if (r == -1) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t tstamp = 0;
|
|
|
|
r = nestegg_packet_tstamp(packet, &tstamp);
|
|
|
|
if (r == -1) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2010-05-30 21:02:00 -07:00
|
|
|
// The end time of this frame is the start time of the next frame. Fetch
|
|
|
|
// the timestamp of the next packet for this track. If we've reached the
|
2012-02-14 20:35:01 -08:00
|
|
|
// end of the resource, use the file's duration as the end time of this
|
2010-05-30 21:02:00 -07:00
|
|
|
// video frame.
|
|
|
|
uint64_t next_tstamp = 0;
|
|
|
|
{
|
2010-11-28 12:06:38 -08:00
|
|
|
nsAutoRef<NesteggPacketHolder> next_holder(NextPacket(VIDEO));
|
|
|
|
if (next_holder) {
|
|
|
|
r = nestegg_packet_tstamp(next_holder->mPacket, &next_tstamp);
|
2010-05-30 21:02:00 -07:00
|
|
|
if (r == -1) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-05-30 21:02:00 -07:00
|
|
|
}
|
2012-12-06 15:27:08 -08:00
|
|
|
PushVideoPacket(next_holder.disown());
|
2010-05-30 21:02:00 -07:00
|
|
|
} else {
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
|
2012-11-19 07:11:21 -08:00
|
|
|
int64_t endTime = mDecoder->GetEndMediaTime();
|
2010-10-06 15:58:36 -07:00
|
|
|
if (endTime == -1) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-05-30 21:02:00 -07:00
|
|
|
}
|
2011-04-13 15:12:23 -07:00
|
|
|
next_tstamp = endTime * NS_PER_USEC;
|
2010-05-30 21:02:00 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t tstamp_usecs = tstamp / NS_PER_USEC;
|
|
|
|
for (uint32_t i = 0; i < count; ++i) {
|
2010-06-08 16:31:27 -07:00
|
|
|
unsigned char* data;
|
|
|
|
size_t length;
|
|
|
|
r = nestegg_packet_data(packet, i, &data, &length);
|
|
|
|
if (r == -1) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
vpx_codec_stream_info_t si;
|
|
|
|
memset(&si, 0, sizeof(si));
|
|
|
|
si.sz = sizeof(si);
|
2011-12-14 21:40:22 -08:00
|
|
|
vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), data, length, &si);
|
2011-04-13 15:12:23 -07:00
|
|
|
if (aKeyframeSkip && (!si.is_kf || tstamp_usecs < aTimeThreshold)) {
|
2011-03-23 15:28:57 -07:00
|
|
|
// Skipping to next keyframe...
|
2011-03-23 15:28:57 -07:00
|
|
|
parsed++; // Assume 1 frame per chunk.
|
2011-03-23 15:28:57 -07:00
|
|
|
continue;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (aKeyframeSkip && si.is_kf) {
|
2011-09-29 16:34:37 -07:00
|
|
|
aKeyframeSkip = false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2011-03-23 15:28:57 -07:00
|
|
|
if (vpx_codec_decode(&mVP8, data, length, NULL, 0)) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// If the timestamp of the video frame is less than
|
|
|
|
// the time threshold required then it is not added
|
|
|
|
// to the video queue and won't be displayed.
|
2011-04-13 15:12:23 -07:00
|
|
|
if (tstamp_usecs < aTimeThreshold) {
|
2011-03-23 15:28:57 -07:00
|
|
|
parsed++; // Assume 1 frame per chunk.
|
2010-06-08 16:31:27 -07:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
vpx_codec_iter_t iter = NULL;
|
|
|
|
vpx_image_t *img;
|
|
|
|
|
2011-03-23 15:28:57 -07:00
|
|
|
while ((img = vpx_codec_get_frame(&mVP8, &iter))) {
|
2010-06-08 16:31:27 -07:00
|
|
|
NS_ASSERTION(img->fmt == IMG_FMT_I420, "WebM image format is not I420");
|
|
|
|
|
|
|
|
// Chroma shifts are rounded down as per the decoding examples in the VP8 SDK
|
|
|
|
VideoData::YCbCrBuffer b;
|
|
|
|
b.mPlanes[0].mData = img->planes[0];
|
|
|
|
b.mPlanes[0].mStride = img->stride[0];
|
|
|
|
b.mPlanes[0].mHeight = img->d_h;
|
|
|
|
b.mPlanes[0].mWidth = img->d_w;
|
2012-05-31 17:54:23 -07:00
|
|
|
b.mPlanes[0].mOffset = b.mPlanes[0].mSkip = 0;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
b.mPlanes[1].mData = img->planes[1];
|
|
|
|
b.mPlanes[1].mStride = img->stride[1];
|
2012-10-15 20:03:43 -07:00
|
|
|
b.mPlanes[1].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
|
|
|
|
b.mPlanes[1].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
|
2012-05-31 17:54:23 -07:00
|
|
|
b.mPlanes[1].mOffset = b.mPlanes[1].mSkip = 0;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
|
|
|
b.mPlanes[2].mData = img->planes[2];
|
|
|
|
b.mPlanes[2].mStride = img->stride[2];
|
2012-10-15 20:03:43 -07:00
|
|
|
b.mPlanes[2].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
|
|
|
|
b.mPlanes[2].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
|
2012-05-31 17:54:23 -07:00
|
|
|
b.mPlanes[2].mOffset = b.mPlanes[2].mSkip = 0;
|
2010-06-08 16:31:27 -07:00
|
|
|
|
2011-06-23 15:08:54 -07:00
|
|
|
nsIntRect picture = mPicture;
|
2012-08-22 08:56:38 -07:00
|
|
|
if (img->d_w != static_cast<uint32_t>(mInitialFrame.width) ||
|
|
|
|
img->d_h != static_cast<uint32_t>(mInitialFrame.height)) {
|
2011-06-23 15:08:54 -07:00
|
|
|
// Frame size is different from what the container reports. This is legal
|
|
|
|
// in WebM, and we will preserve the ratio of the crop rectangle as it
|
|
|
|
// was reported relative to the picture size reported by the container.
|
|
|
|
picture.x = (mPicture.x * img->d_w) / mInitialFrame.width;
|
|
|
|
picture.y = (mPicture.y * img->d_h) / mInitialFrame.height;
|
|
|
|
picture.width = (img->d_w * mPicture.width) / mInitialFrame.width;
|
|
|
|
picture.height = (img->d_h * mPicture.height) / mInitialFrame.height;
|
|
|
|
}
|
|
|
|
|
2010-06-08 16:31:27 -07:00
|
|
|
VideoData *v = VideoData::Create(mInfo,
|
|
|
|
mDecoder->GetImageContainer(),
|
2010-11-28 12:06:38 -08:00
|
|
|
holder->mOffset,
|
2011-04-13 15:12:23 -07:00
|
|
|
tstamp_usecs,
|
|
|
|
next_tstamp / NS_PER_USEC,
|
2010-06-08 16:31:27 -07:00
|
|
|
b,
|
|
|
|
si.is_kf,
|
2011-06-23 15:08:54 -07:00
|
|
|
-1,
|
|
|
|
picture);
|
2010-06-08 16:31:27 -07:00
|
|
|
if (!v) {
|
2011-09-29 16:34:37 -07:00
|
|
|
return false;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
2011-03-23 15:28:57 -07:00
|
|
|
parsed++;
|
|
|
|
decoded++;
|
|
|
|
NS_ASSERTION(decoded <= parsed,
|
|
|
|
"Expect only 1 frame per chunk per packet in WebM...");
|
2012-12-06 15:27:08 -08:00
|
|
|
VideoQueue().Push(v);
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
}
|
2010-10-13 18:50:20 -07:00
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
return true;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2012-12-06 15:27:08 -08:00
|
|
|
void
|
|
|
|
WebMReader::PushVideoPacket(NesteggPacketHolder* aItem)
|
|
|
|
{
|
|
|
|
#ifdef MOZ_DASH
|
|
|
|
if (mPushVideoPacketToNextReader) {
|
|
|
|
NS_ASSERTION(mNextReader,
|
|
|
|
"Stream switch has been requested but mNextReader is null");
|
|
|
|
mNextReader->mVideoPackets.PushFront(aItem);
|
|
|
|
mPushVideoPacketToNextReader = false;
|
|
|
|
} else {
|
|
|
|
#endif
|
|
|
|
mVideoPackets.PushFront(aItem);
|
|
|
|
#ifdef MOZ_DASH
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult WebMReader::Seek(int64_t aTarget, int64_t aStartTime, int64_t aEndTime,
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t aCurrentTime)
|
2010-06-08 16:31:27 -07:00
|
|
|
{
|
2011-07-11 20:39:28 -07:00
|
|
|
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
|
|
|
|
|
2012-12-06 15:27:08 -08:00
|
|
|
LOG(PR_LOG_DEBUG, ("Reader [%p] for Decoder [%p]: About to seek to %fs",
|
|
|
|
this, mDecoder, aTarget/1000000.0));
|
2011-07-13 14:24:35 -07:00
|
|
|
if (NS_FAILED(ResetDecode())) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t trackToSeek = mHasVideo ? mVideoTrack : mAudioTrack;
|
2011-07-13 14:24:35 -07:00
|
|
|
int r = nestegg_track_seek(mContext, trackToSeek, aTarget * NS_PER_USEC);
|
|
|
|
if (r != 0) {
|
|
|
|
return NS_ERROR_FAILURE;
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
2010-08-12 19:28:15 -07:00
|
|
|
return DecodeToTarget(aTarget);
|
2010-06-08 16:31:27 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult WebMReader::GetBuffered(nsTimeRanges* aBuffered, int64_t aStartTime)
|
2010-08-05 00:40:35 -07:00
|
|
|
{
|
2012-02-14 20:35:01 -08:00
|
|
|
MediaResource* resource = mDecoder->GetResource();
|
2010-09-13 01:45:50 -07:00
|
|
|
|
2011-04-14 01:49:29 -07:00
|
|
|
uint64_t timecodeScale;
|
2010-09-20 17:49:50 -07:00
|
|
|
if (!mContext || nestegg_tstamp_scale(mContext, &timecodeScale) == -1) {
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2010-09-13 01:45:50 -07:00
|
|
|
// Special case completely cached files. This also handles local files.
|
2010-09-23 18:57:30 -07:00
|
|
|
bool isFullyCached = resource->IsDataCachedToEndOfResource(0);
|
|
|
|
if (isFullyCached) {
|
2010-09-13 01:45:50 -07:00
|
|
|
uint64_t duration = 0;
|
2010-09-20 17:49:50 -07:00
|
|
|
if (nestegg_duration(mContext, &duration) == 0) {
|
2010-10-06 15:58:36 -07:00
|
|
|
aBuffered->Add(0, duration / NS_PER_S);
|
2010-09-13 01:45:50 -07:00
|
|
|
}
|
2010-09-23 18:57:30 -07:00
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t bufferedLength = 0;
|
2010-09-23 18:57:30 -07:00
|
|
|
aBuffered->GetLength(&bufferedLength);
|
|
|
|
|
|
|
|
// Either we the file is not fully cached, or we couldn't find a duration in
|
|
|
|
// the WebM bitstream.
|
|
|
|
if (!isFullyCached || !bufferedLength) {
|
2012-02-14 20:35:01 -08:00
|
|
|
MediaResource* resource = mDecoder->GetResource();
|
|
|
|
nsTArray<MediaByteRange> ranges;
|
|
|
|
nsresult res = resource->GetCachedRanges(ranges);
|
2011-03-23 15:28:58 -07:00
|
|
|
NS_ENSURE_SUCCESS(res, res);
|
2010-09-13 01:45:50 -07:00
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t index = 0; index < ranges.Length(); index++) {
|
|
|
|
uint64_t start, end;
|
2010-09-23 18:57:30 -07:00
|
|
|
bool rv = mBufferedState->CalculateBufferedForRange(ranges[index].mStart,
|
|
|
|
ranges[index].mEnd,
|
|
|
|
&start, &end);
|
|
|
|
if (rv) {
|
|
|
|
double startTime = start * timecodeScale / NS_PER_S - aStartTime;
|
|
|
|
double endTime = end * timecodeScale / NS_PER_S - aStartTime;
|
|
|
|
|
|
|
|
// If this range extends to the end of the file, the true end time
|
|
|
|
// is the file's duration.
|
|
|
|
if (resource->IsDataCachedToEndOfResource(ranges[index].mStart)) {
|
|
|
|
uint64_t duration = 0;
|
|
|
|
if (nestegg_duration(mContext, &duration) == 0) {
|
|
|
|
endTime = duration / NS_PER_S;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
aBuffered->Add(startTime, endTime);
|
|
|
|
}
|
2010-09-13 01:45:50 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-08-05 00:40:35 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2010-09-13 01:45:50 -07:00
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void WebMReader::NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset)
|
2010-09-13 01:45:50 -07:00
|
|
|
{
|
2010-09-20 17:49:50 -07:00
|
|
|
mBufferedState->NotifyDataArrived(aBuffer, aLength, aOffset);
|
2010-09-13 01:45:50 -07:00
|
|
|
}
|
2012-09-17 13:45:38 -07:00
|
|
|
|
2012-12-06 15:27:08 -08:00
|
|
|
#ifdef MOZ_DASH
|
2012-12-13 11:42:45 -08:00
|
|
|
int64_t
|
|
|
|
WebMReader::GetSubsegmentForSeekTime(int64_t aSeekToTime)
|
|
|
|
{
|
|
|
|
NS_ENSURE_TRUE(0 <= aSeekToTime, -1);
|
|
|
|
// Check the first n-1 subsegments. End time is the start time of the next
|
|
|
|
// subsegment.
|
|
|
|
for (uint32_t i = 1; i < (mClusterByteRanges.Length()); i++) {
|
|
|
|
if (aSeekToTime < mClusterByteRanges[i].mStartTime) {
|
|
|
|
return i-1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Check the last subsegment. End time is the end time of the file.
|
|
|
|
NS_ASSERTION(mDecoder, "Decoder should not be null!");
|
|
|
|
if (aSeekToTime <= mDecoder->GetMediaDuration()) {
|
|
|
|
return mClusterByteRanges.Length()-1;
|
|
|
|
}
|
|
|
|
|
|
|
|
return (-1);
|
|
|
|
}
|
2012-09-17 13:45:38 -07:00
|
|
|
nsresult
|
2012-12-06 15:27:08 -08:00
|
|
|
WebMReader::GetSubsegmentByteRanges(nsTArray<MediaByteRange>& aByteRanges)
|
2012-09-17 13:45:38 -07:00
|
|
|
{
|
|
|
|
NS_ENSURE_TRUE(mContext, NS_ERROR_NULL_POINTER);
|
|
|
|
NS_ENSURE_TRUE(aByteRanges.IsEmpty(), NS_ERROR_ALREADY_INITIALIZED);
|
|
|
|
NS_ENSURE_FALSE(mClusterByteRanges.IsEmpty(), NS_ERROR_NOT_INITIALIZED);
|
|
|
|
NS_ENSURE_FALSE(mCuesByteRange.IsNull(), NS_ERROR_NOT_INITIALIZED);
|
|
|
|
|
2012-12-13 11:42:45 -08:00
|
|
|
for (uint32_t i = 0; i < mClusterByteRanges.Length(); i++) {
|
|
|
|
aByteRanges.AppendElement();
|
|
|
|
aByteRanges[i] = mClusterByteRanges[i];
|
|
|
|
}
|
2012-09-17 13:45:38 -07:00
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2012-11-14 11:45:33 -08:00
|
|
|
|
2012-12-06 15:27:08 -08:00
|
|
|
void
|
|
|
|
WebMReader::RequestSwitchAtSubsegment(int32_t aSubsegmentIdx,
|
|
|
|
MediaDecoderReader* aNextReader)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread() || mDecoder->OnDecodeThread(),
|
|
|
|
"Should be on main thread or decode thread.");
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
|
|
|
|
// Only allow one switch at a time; ignore if one is already requested.
|
|
|
|
if (mSwitchingCluster != -1) {
|
|
|
|
return;
|
|
|
|
}
|
2012-12-23 20:45:57 -08:00
|
|
|
NS_ENSURE_TRUE_VOID((uint32_t)aSubsegmentIdx < mClusterByteRanges.Length());
|
2012-12-06 15:27:08 -08:00
|
|
|
mSwitchingCluster = aSubsegmentIdx;
|
2012-12-23 20:45:57 -08:00
|
|
|
NS_ENSURE_TRUE_VOID(aNextReader);
|
|
|
|
NS_ENSURE_TRUE_VOID(aNextReader != this);
|
2012-12-06 15:27:08 -08:00
|
|
|
mNextReader = static_cast<WebMReader*>(aNextReader);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
WebMReader::RequestSeekToSubsegment(uint32_t aIdx)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread() || mDecoder->OnDecodeThread(),
|
|
|
|
"Should be on main thread or decode thread.");
|
|
|
|
NS_ASSERTION(mDecoder, "decoder should not be null!");
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
|
|
|
|
// Don't seek if we're about to switch to another reader.
|
|
|
|
if (mSwitchingCluster != -1) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// Only allow seeking if a request was not already made.
|
|
|
|
if (mSeekToCluster != -1) {
|
|
|
|
return;
|
|
|
|
}
|
2012-12-23 20:45:57 -08:00
|
|
|
NS_ENSURE_TRUE_VOID(aIdx < mClusterByteRanges.Length());
|
2012-12-06 15:27:08 -08:00
|
|
|
mSeekToCluster = aIdx;
|
|
|
|
|
|
|
|
// XXX Hack to get the resource to seek to the correct offset if the decode
|
|
|
|
// thread is in shutdown, e.g. if the video is not autoplay.
|
|
|
|
if (mDecoder->IsShutdown()) {
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
mDecoder->GetResource()->Seek(PR_SEEK_SET,
|
|
|
|
mClusterByteRanges[mSeekToCluster].mStart);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
WebMReader::PrepareToDecode()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
|
|
|
|
if (mSeekToCluster != -1) {
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
SeekToCluster(mSeekToCluster);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
WebMReader::SeekToCluster(uint32_t aIdx)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
|
|
|
|
NS_ASSERTION(0 <= mSeekToCluster, "mSeekToCluster should be set.");
|
2012-12-23 20:45:57 -08:00
|
|
|
NS_ENSURE_TRUE_VOID(aIdx < mClusterByteRanges.Length());
|
2012-12-06 15:27:08 -08:00
|
|
|
LOG(PR_LOG_DEBUG, ("Reader [%p] for Decoder [%p]: seeking to "
|
|
|
|
"subsegment [%lld] at offset [%lld]",
|
|
|
|
this, mDecoder, aIdx, mClusterByteRanges[aIdx].mStart));
|
|
|
|
int r = nestegg_offset_seek(mContext, mClusterByteRanges[aIdx].mStart);
|
2012-12-23 20:45:57 -08:00
|
|
|
NS_ENSURE_TRUE_VOID(r == 0);
|
2012-12-06 15:27:08 -08:00
|
|
|
mSeekToCluster = -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
WebMReader::HasReachedSubsegment(uint32_t aSubsegmentIndex)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(mDecoder, "Decoder is null.");
|
|
|
|
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
|
|
|
|
NS_ENSURE_TRUE(aSubsegmentIndex < mClusterByteRanges.Length(), false);
|
|
|
|
|
|
|
|
NS_ASSERTION(mDecoder->GetResource(), "Decoder has no media resource.");
|
|
|
|
if (mReachedSwitchAccessPoint) {
|
|
|
|
LOG(PR_LOG_DEBUG,
|
|
|
|
("Reader [%p] for Decoder [%p]: reached switching offset [%lld] = "
|
|
|
|
"mClusterByteRanges[%d].mStart[%lld]",
|
|
|
|
this, mDecoder, mCurrentOffset, aSubsegmentIndex,
|
|
|
|
mClusterByteRanges[aSubsegmentIndex].mStart));
|
|
|
|
mReachedSwitchAccessPoint = false;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
#endif /* MOZ_DASH */
|
|
|
|
|
2012-11-14 11:45:33 -08:00
|
|
|
} // namespace mozilla
|