Bug 831645 - part2, Rtsp media resource and decoder. r=roc, doublec, sworkman, khuey, gps

This commit is contained in:
Benjamin Chen 2013-09-23 17:53:36 +08:00
parent 03c4ac4845
commit a9ca45668c
21 changed files with 1467 additions and 48 deletions

View File

@ -559,6 +559,7 @@ pref("dom.webapps.useCurrentProfile", true);
pref("dom.sysmsg.enabled", true);
pref("media.plugins.enabled", false);
pref("media.omx.enabled", true);
pref("media.rtsp.enabled", true);
// Disable printing (particularly, window.print())
pref("dom.disable_window_print", true);

View File

@ -15,6 +15,7 @@
#define MEDIASTREAMURI_SCHEME "mediastream"
#define MEDIASOURCEURI_SCHEME "mediasource"
#define FONTTABLEURI_SCHEME "moz-fonttable"
#define RTSPURI_SCHEME "rtsp"
class nsIDOMBlob;
class nsIDOMMediaStream;
@ -85,6 +86,12 @@ inline bool IsBlobURI(nsIURI* aUri)
return NS_SUCCEEDED(aUri->SchemeIs(BLOBURI_SCHEME, &isBlob)) && isBlob;
}
inline bool IsRtspURI(nsIURI* aUri)
{
bool isRtsp;
return NS_SUCCEEDED(aUri->SchemeIs(RTSPURI_SCHEME, &isRtsp)) && isRtsp;
}
inline bool IsMediaStreamURI(nsIURI* aUri)
{
bool isStream;

View File

@ -536,6 +536,13 @@ public:
}
}
/**
* A public wrapper for FinishDecoderSetup()
*/
nsresult FinishDecoderSetup(MediaDecoder* aDecoder, MediaResource* aStream) {
return FinishDecoderSetup(aDecoder, aStream, nullptr, nullptr);
}
protected:
class MediaLoadListener;
class StreamListener;

View File

@ -2505,7 +2505,19 @@ nsresult HTMLMediaElement::InitializeDecoderForChannel(nsIChannel* aChannel,
// stream successfully created, the stream now owns the channel.
mChannel = nullptr;
return FinishDecoderSetup(decoder, resource, aListener, nullptr);
// We postpone the |FinishDecoderSetup| function call until we get
// |OnConnected| signal from MediaStreamController which is held by
// RtspMediaResource.
if (DecoderTraits::DecoderWaitsForOnConnected(mimeType)) {
decoder->SetResource(resource);
mDecoder = decoder;
if (aListener) {
*aListener = nullptr;
}
return NS_OK;
} else {
return FinishDecoderSetup(decoder, resource, aListener, nullptr);
}
}
nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,

View File

@ -45,6 +45,10 @@
#include "nsIPrincipal.h"
#include "mozilla/dom/HTMLMediaElement.h"
#endif
#ifdef MOZ_RTSP
#include "RtspOmxDecoder.h"
#include "RtspOmxReader.h"
#endif
#ifdef MOZ_DASH
#include "DASHDecoder.h"
#endif
@ -243,6 +247,29 @@ static char const *const gMpegAudioCodecs[2] = {
};
#endif
#ifdef MOZ_RTSP
static const char* const gRtspTypes[2] = {
"RTSP",
nullptr
};
static bool
IsRtspSupportedType(const nsACString& aMimeType)
{
return MediaDecoder::IsRtspEnabled() &&
CodecListContains(gRtspTypes, aMimeType);
}
#endif
/* static */
bool DecoderTraits::DecoderWaitsForOnConnected(const nsACString& aMimeType) {
#ifdef MOZ_RTSP
return CodecListContains(gRtspTypes, aMimeType);
#else
return false;
#endif
}
#ifdef MOZ_MEDIA_PLUGINS
static bool
IsMediaPluginsType(const nsACString& aType)
@ -487,6 +514,11 @@ DecoderTraits::CreateDecoder(const nsACString& aType, MediaDecoderOwner* aOwner)
decoder = new MediaOmxDecoder();
}
#endif
#ifdef MOZ_RTSP
if (IsRtspSupportedType(aType)) {
decoder = new RtspOmxDecoder();
}
#endif
#ifdef MOZ_MEDIA_PLUGINS
if (MediaDecoder::IsMediaPluginsEnabled() && GetMediaPluginHost()->FindDecoder(aType, NULL)) {
decoder = new MediaPluginDecoder(aType);

View File

@ -57,6 +57,10 @@ public:
// or false otherwise. Not all platforms support all MIME types, and
// vice versa.
static bool IsSupportedInVideoDocument(const nsACString& aType);
// Returns true if we should not start decoder until we receive
// OnConnected signal. (currently RTSP only)
static bool DecoderWaitsForOnConnected(const nsACString& aType);
};
}

View File

@ -1172,20 +1172,8 @@ void MediaDecoder::ChangeState(PlayState aState)
}
}
mPlayState = aState;
if (mDecoderStateMachine) {
switch (aState) {
case PLAY_STATE_PLAYING:
mDecoderStateMachine->Play();
break;
case PLAY_STATE_SEEKING:
mDecoderStateMachine->Seek(mRequestedSeekTime);
mRequestedSeekTime = -1.0;
break;
default:
/* No action needed */
break;
}
}
ApplyStateToStateMachine(mPlayState);
if (aState!= PLAY_STATE_LOADING) {
mIsDormant = false;
@ -1195,6 +1183,27 @@ void MediaDecoder::ChangeState(PlayState aState)
GetReentrantMonitor().NotifyAll();
}
void MediaDecoder::ApplyStateToStateMachine(PlayState aState)
{
MOZ_ASSERT(NS_IsMainThread());
GetReentrantMonitor().AssertCurrentThreadIn();
if (mDecoderStateMachine) {
switch (aState) {
case PLAY_STATE_PLAYING:
mDecoderStateMachine->Play();
break;
case PLAY_STATE_SEEKING:
mDecoderStateMachine->Seek(mRequestedSeekTime);
mRequestedSeekTime = -1.0;
break;
default:
/* No action needed */
break;
}
}
}
void MediaDecoder::PlaybackPositionChanged()
{
MOZ_ASSERT(NS_IsMainThread());
@ -1697,6 +1706,15 @@ MediaDecoder::IsWebMEnabled()
}
#endif
#ifdef MOZ_RTSP
bool
MediaDecoder::IsRtspEnabled()
{
//Currently the Rtsp decoded by omx.
return (Preferences::GetBool("media.rtsp.enabled", false) && IsOmxEnabled());
}
#endif
#ifdef MOZ_GSTREAMER
bool
MediaDecoder::IsGStreamerEnabled()

View File

@ -683,6 +683,10 @@ public:
// change. Call on the main thread only.
void ChangeState(PlayState aState);
// Called by |ChangeState|, to update the state machine.
// Call on the main thread only and the lock must be obtained.
virtual void ApplyStateToStateMachine(PlayState aState);
// May be called by the reader to notify this decoder that the metadata from
// the media file has been read. Call on the decode thread only.
void OnReadMetadataCompleted() MOZ_OVERRIDE { }
@ -767,6 +771,9 @@ public:
#ifdef MOZ_WEBM
static bool IsWebMEnabled();
#endif
#ifdef MOZ_RTSP
static bool IsRtspEnabled();
#endif
#ifdef MOZ_GSTREAMER
static bool IsGStreamerEnabled();

View File

@ -7,6 +7,7 @@
#include "mozilla/DebugOnly.h"
#include "MediaResource.h"
#include "RtspMediaResource.h"
#include "mozilla/Mutex.h"
#include "nsDebug.h"
@ -1695,6 +1696,8 @@ MediaResource::Create(MediaDecoder* aDecoder, nsIChannel* aChannel)
nsRefPtr<MediaResource> resource;
if (fc || IsBlobURI(uri)) {
resource = new FileMediaResource(aDecoder, aChannel, uri, contentType);
} else if (IsRtspURI(uri)) {
resource = new RtspMediaResource(aDecoder, aChannel, uri, contentType);
} else {
resource = new ChannelMediaResource(aDecoder, aChannel, uri, contentType);
}

View File

@ -12,6 +12,7 @@
#endif
#include "nsIChannel.h"
#include "nsIURI.h"
#include "nsIStreamingProtocolController.h"
#include "nsIStreamListener.h"
#include "nsIChannelEventSink.h"
#include "nsIInterfaceRequestor.h"
@ -182,6 +183,7 @@ inline MediaByteRange::MediaByteRange(TimestampedMediaByteRange& aByteRange)
NS_ASSERTION(mStart < mEnd, "Range should end after start!");
}
class RtspMediaResource;
/**
* Provides a thread-safe, seek/read interface to resources
@ -391,6 +393,18 @@ public:
// nsIChannel when the MediaResource is created. Safe to call from
// any thread.
virtual const nsCString& GetContentType() const = 0;
// Get the RtspMediaResource pointer if this MediaResource really is a
// RtspMediaResource. For calling Rtsp specific functions.
virtual RtspMediaResource* GetRtspPointer() {
return nullptr;
}
// Return true if the stream is a live stream
virtual bool IsRealTime() {
return false;
}
protected:
virtual ~MediaResource() {};
};

View File

@ -0,0 +1,599 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/DebugOnly.h"
#include "RtspMediaResource.h"
#include "MediaDecoder.h"
#include "mozilla/dom/HTMLMediaElement.h"
#include "mozilla/Monitor.h"
#include "mozilla/Preferences.h"
#include "nsIScriptSecurityManager.h"
#include "nsIStreamingProtocolService.h"
#include "nsServiceManagerUtils.h"
#ifdef PR_LOGGING
PRLogModuleInfo* gRtspMediaResourceLog;
#define LOG(msg, ...) PR_LOG(gRtspMediaResourceLog, PR_LOG_DEBUG, \
(msg, ##__VA_ARGS__))
// Debug logging macro with object pointer and class name.
#define RTSPMLOG(msg, ...) \
LOG("%p [RtspMediaResource]: " msg, this, ##__VA_ARGS__)
#else
#define LOG(msg, ...)
#define RTSPMLOG(msg, ...)
#endif
namespace mozilla {
/* class RtspTrackBuffer: a ring buffer implementation for audio/video track
* un-decoded data.
* The ring buffer is divided into BUFFER_SLOT_NUM slots,
* and each slot's size is fixed(mSlotSize).
* Even though the ring buffer is divided into fixed size slots, it still can
* store the data which size is larger than one slot size.
* */
#define BUFFER_SLOT_NUM 8192
#define BUFFER_SLOT_DEFAULT_SIZE 256
#define BUFFER_SLOT_MAX_SIZE 8192
#define BUFFER_SLOT_INVALID -1
#define BUFFER_SLOT_EMPTY 0
struct BufferSlotData {
int32_t mLength;
uint64_t mTime;
};
class RtspTrackBuffer
{
public:
RtspTrackBuffer(const char *aMonitor, int32_t aTrackIdx, uint32_t aSlotSize)
: mMonitor(aMonitor)
, mSlotSize(aSlotSize)
, mTotalBufferSize(BUFFER_SLOT_NUM * mSlotSize)
, mFrameType(0)
, mIsStarted(false) {
MOZ_COUNT_CTOR(RtspTrackBuffer);
#ifdef PR_LOGGING
mTrackIdx = aTrackIdx;
#endif
MOZ_ASSERT(mSlotSize < UINT32_MAX / BUFFER_SLOT_NUM);
mRingBuffer = new uint8_t[mTotalBufferSize];
Reset();
};
~RtspTrackBuffer() {
MOZ_COUNT_DTOR(RtspTrackBuffer);
mRingBuffer = nullptr;
};
void Start() {
MonitorAutoLock monitor(mMonitor);
mIsStarted = true;
}
void Stop() {
MonitorAutoLock monitor(mMonitor);
mIsStarted = false;
}
// Read the data from mRingBuffer[mConsumerIdx*mSlotSize] into aToBuffer.
// If the aToBufferSize is smaller than mBufferSlotDataLength[mConsumerIdx],
// early return and set the aFrameSize to notify the reader the aToBuffer
// doesn't have enough space. The reader must realloc the aToBuffer if it
// wishes to read the data.
nsresult ReadBuffer(uint8_t* aToBuffer, uint32_t aToBufferSize,
uint32_t& aReadCount, uint64_t& aFrameTime,
uint32_t& aFrameSize);
// Write the data from aFromBuffer into mRingBuffer[mProducerIdx*mSlotSize].
void WriteBuffer(const char *aFromBuffer, uint32_t aWriteCount,
uint64_t aFrameTime, uint32_t aFrameType);
// Reset the mProducerIdx, mConsumerIdx, mBufferSlotDataLength[],
// mBufferSlotDataTime[].
void Reset();
// We should call SetFrameType first then reset().
// If we call reset() first, the queue may still has some "garbage" frame
// from another thread's |OnMediaDataAvailable| before |SetFrameType|.
void ResetWithFrameType(uint32_t aFrameType) {
SetFrameType(aFrameType);
Reset();
}
private:
// The FrameType is sync to nsIStreamingProtocolController.h
void SetFrameType(uint32_t aFrameType) {
MonitorAutoLock monitor(mMonitor);
mFrameType = mFrameType | aFrameType;
}
// A monitor lock to prevent racing condition.
Monitor mMonitor;
#ifdef PR_LOGGING
// Indicate the track number for Rtsp.
int32_t mTrackIdx;
#endif
// mProducerIdx: A slot index that we store data from
// nsIStreamingProtocolController.
// mConsumerIdx: A slot index that we read when decoder need(from OMX decoder).
int32_t mProducerIdx;
int32_t mConsumerIdx;
// Because each slot's size is fixed, we need an array to record the real
// data length and data time stamp.
// The value in mBufferSlotData[index].mLength represents:
// -1(BUFFER_SLOT_INVALID): The index of slot data is invalid, mConsumerIdx
// should go forward.
// 0(BUFFER_SLOT_EMPTY): The index slot is empty. mConsumerIdx should wait here.
// positive value: The index slot contains valid data and the value is data size.
BufferSlotData mBufferSlotData[BUFFER_SLOT_NUM];
// The ring buffer pointer.
nsAutoArrayPtr<uint8_t> mRingBuffer;
// Each slot's size.
uint32_t mSlotSize;
// Total mRingBuffer's total size.
uint32_t mTotalBufferSize;
// A flag that that indicate the incoming data should be dropped or stored.
// When we are seeking, the incoming data should be dropped.
// Bit definition in |nsIStreamingProtocolController.h|
uint32_t mFrameType;
// Set true/false when |Start()/Stop()| is called.
bool mIsStarted;
};
nsresult RtspTrackBuffer::ReadBuffer(uint8_t* aToBuffer, uint32_t aToBufferSize,
uint32_t& aReadCount, uint64_t& aFrameTime,
uint32_t& aFrameSize)
{
MonitorAutoLock monitor(mMonitor);
RTSPMLOG("ReadBuffer mTrackIdx %d mProducerIdx %d mConsumerIdx %d "
"mBufferSlotData[mConsumerIdx].mLength %d"
,mTrackIdx ,mProducerIdx ,mConsumerIdx
,mBufferSlotData[mConsumerIdx].mLength);
// Reader should skip the slots with mLength==BUFFER_SLOT_INVALID.
// The loop ends when
// 1. Read data successfully
// 2. Fail to read data due to aToBuffer's space
// 3. No data in this buffer
// 4. mIsStarted is not set
while (1) {
if (mBufferSlotData[mConsumerIdx].mLength > 0) {
// Check the aToBuffer space is enough for data copy.
if ((int32_t)aToBufferSize < mBufferSlotData[mConsumerIdx].mLength) {
aFrameSize = mBufferSlotData[mConsumerIdx].mLength;
break;
}
uint32_t slots = (mBufferSlotData[mConsumerIdx].mLength / mSlotSize) + 1;
// we have data, copy to aToBuffer
MOZ_ASSERT(mBufferSlotData[mConsumerIdx].mLength <=
(int32_t)((BUFFER_SLOT_NUM - mConsumerIdx) * mSlotSize));
memcpy(aToBuffer,
(void *)(&mRingBuffer[mSlotSize * mConsumerIdx]),
mBufferSlotData[mConsumerIdx].mLength);
aFrameSize = aReadCount = mBufferSlotData[mConsumerIdx].mLength;
aFrameTime = mBufferSlotData[mConsumerIdx].mTime;
RTSPMLOG("DataLength %d, data time %lld"
,mBufferSlotData[mConsumerIdx].mLength
,mBufferSlotData[mConsumerIdx].mTime);
// After reading the data, we set current index of mBufferSlotDataLength
// to BUFFER_SLOT_EMPTY to indicate these slots are free.
for (uint32_t i = mConsumerIdx; i < mConsumerIdx + slots; ++i) {
mBufferSlotData[i].mLength = BUFFER_SLOT_EMPTY;
mBufferSlotData[i].mTime = BUFFER_SLOT_EMPTY;
}
mConsumerIdx = (mConsumerIdx + slots) % BUFFER_SLOT_NUM;
break;
} else if (mBufferSlotData[mConsumerIdx].mLength == BUFFER_SLOT_INVALID) {
mConsumerIdx = (mConsumerIdx + 1) % BUFFER_SLOT_NUM;
RTSPMLOG("BUFFER_SLOT_INVALID move forward");
} else {
// No data, and disconnected.
if (!mIsStarted) {
return NS_ERROR_FAILURE;
}
// No data, the decode thread is blocked here until we receive
// OnMediaDataAvailable. The OnMediaDataAvailable will call WriteBuffer()
// to wake up the decode thread.
RTSPMLOG("monitor.Wait()");
monitor.Wait();
}
}
return NS_OK;
}
/* When we perform a WriteBuffer, we check mIsStarted and aFrameType first.
* These flags prevent "garbage" frames from being written into the buffer.
*
* After writing the data into the buffer, we check to see if we wrote over a
* slot, and update mConsumerIdx if necessary.
* This ensures that the decoder will get the "oldest" data available in the
* buffer.
*
* If the incoming data is larger than one slot size (isMultipleSlots), we do
* |mBufferSlotData[].mLength = BUFFER_SLOT_INVALID;| for other slots except the
* first slot, in order to notify the reader that some slots are unavailable.
*
* If the incoming data is isMultipleSlots and crosses the end of
* BUFFER_SLOT_NUM, returnToHead is set to true and the data will continue to
* be written from head(index 0).
*
* MEDIASTREAM_FRAMETYPE_DISCONTINUITY currently is used when we are seeking.
* */
void RtspTrackBuffer::WriteBuffer(const char *aFromBuffer, uint32_t aWriteCount,
uint64_t aFrameTime, uint32_t aFrameType)
{
MonitorAutoLock monitor(mMonitor);
if (!mIsStarted) {
RTSPMLOG("mIsStarted is false");
return;
}
if (mTotalBufferSize < aWriteCount) {
RTSPMLOG("mTotalBufferSize < aWriteCount, incoming data is too large");
return;
}
// Checking the incoming data's frame type.
// If we receive MEDIASTREAM_FRAMETYPE_DISCONTINUITY, clear the mFrameType
// imply the RtspTrackBuffer is ready for receive data.
if (aFrameType & MEDIASTREAM_FRAMETYPE_DISCONTINUITY) {
mFrameType = mFrameType & (~MEDIASTREAM_FRAMETYPE_DISCONTINUITY);
RTSPMLOG("Clear mFrameType");
return;
}
// Checking current buffer frame type.
// If the MEDIASTREAM_FRAMETYPE_DISCONTINUNITY bit is set, imply the
// RtspTrackBuffer can't receive data now. So we drop the frame until we
// receive MEDIASTREAM_FRAMETYPE_DISCONTINUNITY.
if (mFrameType & MEDIASTREAM_FRAMETYPE_DISCONTINUITY) {
RTSPMLOG("Return because the mFrameType is set");
return;
}
// The flag is true if the incoming data is larger than one slot size.
bool isMultipleSlots = false;
// The flag is true if the incoming data is larger than remainder free slots
bool returnToHead = false;
// Calculate how many slots the incoming data needed.
int32_t slots = 1;
int32_t i;
RTSPMLOG("WriteBuffer mTrackIdx %d mProducerIdx %d mConsumerIdx %d",
mTrackIdx, mProducerIdx,mConsumerIdx);
if (aWriteCount > mSlotSize) {
isMultipleSlots = true;
slots = (aWriteCount / mSlotSize) + 1;
}
if (isMultipleSlots &&
(aWriteCount > (BUFFER_SLOT_NUM - mProducerIdx) * mSlotSize)) {
returnToHead = true;
}
RTSPMLOG("slots %d isMultipleSlots %d returnToHead %d",
slots, isMultipleSlots, returnToHead);
if (returnToHead) {
// Clear the rest index of mBufferSlotData[].mLength
for (i = mProducerIdx; i < BUFFER_SLOT_NUM; ++i) {
mBufferSlotData[i].mLength = BUFFER_SLOT_INVALID;
}
// We wrote one or more slots that the decode thread has not yet read.
// So the mConsumerIdx returns to the head of slot buffer and moves forward
// to the oldest slot.
if (mProducerIdx <= mConsumerIdx && mConsumerIdx < mProducerIdx + slots) {
mConsumerIdx = 0;
for (i = mConsumerIdx; i < BUFFER_SLOT_NUM; ++i) {
if (mBufferSlotData[i].mLength > 0) {
mConsumerIdx = i;
break;
}
}
}
mProducerIdx = 0;
}
memcpy(&(mRingBuffer[mSlotSize * mProducerIdx]), aFromBuffer, aWriteCount);
if (mProducerIdx <= mConsumerIdx && mConsumerIdx < mProducerIdx + slots
&& mBufferSlotData[mConsumerIdx].mLength > 0) {
// Wrote one or more slots that the decode thread has not yet read.
RTSPMLOG("overwrite!! %d time %lld"
,mTrackIdx,mBufferSlotData[mConsumerIdx].mTime);
mBufferSlotData[mProducerIdx].mLength = aWriteCount;
mBufferSlotData[mProducerIdx].mTime = aFrameTime;
// Clear the mBufferSlotDataLength except the start slot.
if (isMultipleSlots) {
for (i = mProducerIdx + 1; i < mProducerIdx + slots; ++i) {
mBufferSlotData[i].mLength = BUFFER_SLOT_INVALID;
}
}
mProducerIdx = (mProducerIdx + slots) % BUFFER_SLOT_NUM;
// Move the mConsumerIdx forward to ensure that the decoder reads the
// oldest data available.
mConsumerIdx = mProducerIdx;
} else {
// Normal case, the writer doesn't take over the reader.
mBufferSlotData[mProducerIdx].mLength = aWriteCount;
mBufferSlotData[mProducerIdx].mTime = aFrameTime;
// Clear the mBufferSlotData[].mLength except the start slot.
if (isMultipleSlots) {
for (i = mProducerIdx + 1; i < mProducerIdx + slots; ++i) {
mBufferSlotData[i].mLength = BUFFER_SLOT_INVALID;
}
}
mProducerIdx = (mProducerIdx + slots) % BUFFER_SLOT_NUM;
}
mMonitor.NotifyAll();
}
void RtspTrackBuffer::Reset() {
MonitorAutoLock monitor(mMonitor);
mProducerIdx = 0;
mConsumerIdx = 0;
for (uint32_t i = 0; i < BUFFER_SLOT_NUM; ++i) {
mBufferSlotData[i].mLength = BUFFER_SLOT_EMPTY;
mBufferSlotData[i].mTime = BUFFER_SLOT_EMPTY;
}
mMonitor.NotifyAll();
}
RtspMediaResource::RtspMediaResource(MediaDecoder* aDecoder,
nsIChannel* aChannel, nsIURI* aURI, const nsACString& aContentType)
: BaseMediaResource(aDecoder, aChannel, aURI, aContentType)
, mIsConnected(false)
, mRealTime(false)
{
nsCOMPtr<nsIStreamingProtocolControllerService> mediaControllerService =
do_GetService(MEDIASTREAMCONTROLLERSERVICE_CONTRACTID);
MOZ_ASSERT(mediaControllerService);
if (mediaControllerService) {
mediaControllerService->Create(mChannel,
getter_AddRefs(mMediaStreamController));
MOZ_ASSERT(mMediaStreamController);
mListener = new Listener(this);
mMediaStreamController->AsyncOpen(mListener);
}
#ifdef PR_LOGGING
if (!gRtspMediaResourceLog) {
gRtspMediaResourceLog = PR_NewLogModule("RtspMediaResource");
}
#endif
}
RtspMediaResource::~RtspMediaResource()
{
RTSPMLOG("~RtspMediaResource");
if (mListener) {
// Kill its reference to us since we're going away
mListener->Revoke();
}
}
NS_IMPL_ISUPPORTS2(RtspMediaResource::Listener,
nsIInterfaceRequestor, nsIStreamingProtocolListener);
nsresult
RtspMediaResource::Listener::OnMediaDataAvailable(uint8_t aTrackIdx,
const nsACString &data,
uint32_t length,
uint32_t offset,
nsIStreamingProtocolMetaData *meta)
{
if (!mResource)
return NS_OK;
return mResource->OnMediaDataAvailable(aTrackIdx, data, length, offset, meta);
}
nsresult
RtspMediaResource::Listener::OnConnected(uint8_t aTrackIdx,
nsIStreamingProtocolMetaData *meta)
{
if (!mResource)
return NS_OK;
return mResource->OnConnected(aTrackIdx, meta);
}
nsresult
RtspMediaResource::Listener::OnDisconnected(uint8_t aTrackIdx, uint32_t reason)
{
if (!mResource)
return NS_OK;
return mResource->OnDisconnected(aTrackIdx, reason);
}
nsresult
RtspMediaResource::Listener::GetInterface(const nsIID & aIID, void **aResult)
{
return QueryInterface(aIID, aResult);
}
nsresult
RtspMediaResource::ReadFrameFromTrack(uint8_t* aBuffer, uint32_t aBufferSize,
uint32_t aTrackIdx, uint32_t& aBytes,
uint64_t& aTime, uint32_t& aFrameSize)
{
NS_ASSERTION(!NS_IsMainThread(), "Don't call on main thread");
NS_ASSERTION(aTrackIdx < mTrackBuffer.Length(),
"ReadTrack index > mTrackBuffer");
MOZ_ASSERT(aBuffer);
return mTrackBuffer[aTrackIdx]->ReadBuffer(aBuffer, aBufferSize, aBytes,
aTime, aFrameSize);
}
nsresult
RtspMediaResource::OnMediaDataAvailable(uint8_t aTrackIdx,
const nsACString &data,
uint32_t length,
uint32_t offset,
nsIStreamingProtocolMetaData *meta)
{
uint64_t time;
uint32_t frameType;
meta->GetTimeStamp(&time);
meta->GetFrameType(&frameType);
if (mRealTime) {
time = 0;
}
mTrackBuffer[aTrackIdx]->WriteBuffer(data.BeginReading(), length, time,
frameType);
return NS_OK;
}
nsresult
RtspMediaResource::OnConnected(uint8_t aTrackIdx,
nsIStreamingProtocolMetaData *meta)
{
if (mIsConnected) {
return NS_OK;
}
uint8_t tracks;
mMediaStreamController->GetTotalTracks(&tracks);
uint64_t duration = 0;
for (int i = 0; i < tracks; ++i) {
nsCString rtspTrackId("RtspTrack");
rtspTrackId.AppendInt(i);
nsCOMPtr<nsIStreamingProtocolMetaData> trackMeta;
mMediaStreamController->GetTrackMetaData(i, getter_AddRefs(trackMeta));
MOZ_ASSERT(trackMeta);
trackMeta->GetDuration(&duration);
// Here is a heuristic to estimate the slot size.
// For video track, calculate the width*height.
// For audio track, use the BUFFER_SLOT_DEFAULT_SIZE because the w*h is 0.
// Finally clamp them into (BUFFER_SLOT_DEFAULT_SIZE,BUFFER_SLOT_MAX_SIZE)
uint32_t w, h;
uint32_t slotSize;
trackMeta->GetWidth(&w);
trackMeta->GetHeight(&h);
slotSize = clamped((int32_t)(w * h), BUFFER_SLOT_DEFAULT_SIZE,
BUFFER_SLOT_MAX_SIZE);
mTrackBuffer.AppendElement(new RtspTrackBuffer(rtspTrackId.get(),
i, slotSize));
mTrackBuffer[i]->Start();
}
// If the duration is 0, imply the stream is live stream.
if (duration) {
// Not live stream.
mRealTime = false;
bool seekable = true;
mDecoder->SetInfinite(false);
mDecoder->SetTransportSeekable(seekable);
mDecoder->SetDuration(duration);
} else {
// Live stream.
// Check the preference "media.realtime_decoder.enabled".
if (!Preferences::GetBool("media.realtime_decoder.enabled", false)) {
// Give up, report error to media element.
nsCOMPtr<nsIRunnable> event =
NS_NewRunnableMethod(mDecoder, &MediaDecoder::DecodeError);
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
return NS_ERROR_FAILURE;
} else {
mRealTime = true;
bool seekable = false;
mDecoder->SetInfinite(true);
mDecoder->SetTransportSeekable(seekable);
mDecoder->SetMediaSeekable(seekable);
}
}
// Fires an initial progress event and sets up the stall counter so stall events
// fire if no download occurs within the required time frame.
mDecoder->Progress(false);
MediaDecoderOwner* owner = mDecoder->GetMediaOwner();
NS_ENSURE_TRUE(owner, NS_ERROR_FAILURE);
HTMLMediaElement* element = owner->GetMediaElement();
NS_ENSURE_TRUE(element, NS_ERROR_FAILURE);
element->FinishDecoderSetup(mDecoder, this);
mIsConnected = true;
return NS_OK;
}
nsresult
RtspMediaResource::OnDisconnected(uint8_t aTrackIdx, uint32_t aReason)
{
NS_ASSERTION(NS_IsMainThread(), "Don't call on non-main thread");
for (uint32_t i = 0 ; i < mTrackBuffer.Length(); ++i) {
mTrackBuffer[i]->Stop();
mTrackBuffer[i]->Reset();
}
if (aReason == (uint32_t)NS_ERROR_CONNECTION_REFUSED) {
mDecoder->NetworkError();
}
return NS_OK;
}
void RtspMediaResource::Suspend(bool aCloseImmediately)
{
NS_ASSERTION(NS_IsMainThread(), "Don't call on non-main thread");
MediaDecoderOwner* owner = mDecoder->GetMediaOwner();
NS_ENSURE_TRUE_VOID(owner);
HTMLMediaElement* element = owner->GetMediaElement();
NS_ENSURE_TRUE_VOID(element);
mMediaStreamController->Suspend();
element->DownloadSuspended();
}
void RtspMediaResource::Resume()
{
NS_ASSERTION(NS_IsMainThread(), "Don't call on non-main thread");
MediaDecoderOwner* owner = mDecoder->GetMediaOwner();
NS_ENSURE_TRUE_VOID(owner);
HTMLMediaElement* element = owner->GetMediaElement();
NS_ENSURE_TRUE_VOID(element);
if (mChannel) {
element->DownloadResumed();
}
mMediaStreamController->Resume();
}
nsresult RtspMediaResource::Open(nsIStreamListener **aStreamListener)
{
return NS_OK;
}
nsresult RtspMediaResource::Close()
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
mMediaStreamController->Stop();
return NS_OK;
}
already_AddRefed<nsIPrincipal> RtspMediaResource::GetCurrentPrincipal()
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
nsCOMPtr<nsIPrincipal> principal;
nsIScriptSecurityManager* secMan = nsContentUtils::GetSecurityManager();
if (!secMan || !mChannel)
return nullptr;
secMan->GetChannelPrincipal(mChannel, getter_AddRefs(principal));
return principal.forget();
}
nsresult RtspMediaResource::SeekTime(int64_t aOffset)
{
NS_ASSERTION(!NS_IsMainThread(), "Don't call on main thread");
RTSPMLOG("Seek requested for aOffset [%lld] for decoder [%p]",
aOffset, mDecoder);
// Clear buffer and raise the frametype flag.
for(uint32_t i = 0 ; i < mTrackBuffer.Length(); ++i) {
mTrackBuffer[i]->ResetWithFrameType(MEDIASTREAM_FRAMETYPE_DISCONTINUITY);
}
return mMediaStreamController->Seek(aOffset);
}
} // namespace mozilla

View File

@ -0,0 +1,233 @@
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(RtspMediaResource_h_)
#define RtspMediaResource_h_
#include "MediaResource.h"
namespace mozilla {
class RtspTrackBuffer;
/* RtspMediaResource
* RtspMediaResource provides an interface to deliver and control RTSP media
* data to RtspDecoder.
*
* RTSP Flow Start vs HTTP Flow Start:
* For HTTP (and files stored on disk), once the channel is created and response
* data is available, HTMLMediaElement::MediaLoadListener::OnStartRequest is
* called. (Note, this is an asynchronous call following channel->AsyncOpen).
* The decoder and MediaResource are set up to talk to each other:
* InitializeDecoderForChannel and FinishDecoderSetup.
* RtspMediaResource is different from this, in that FinishDecoderSetup is
* postponed until after the initial connection with the server is made.
* RtspController, owned by RtspMediaResource, provides the interface to setup
* the connection, and calls RtspMediaResource::Listener::OnConnected
* (from nsIStreamingProtocolListener). FinishDecoderSetup is then called to
* connect RtspMediaResource with RtspDecoder and allow HTMLMediaElement to
* request playback etc.
*
* Playback:
* When the user presses play/pause, HTMLMediaElement::Play/::Pause is called,
* subsequently making calls to the decoder state machine. Upon these state
* changes, the decoder is told to start reading and decoding data. This causes
* the nsIStreamingMediaController object to send play/pause commands to the
* server.
* Data is then delivered to the host and eventually written to the
* RtspTrackBuffer objects. Note that RtspMediaResource does not know about the
* play or pause state. It only knows about the data written into its buffers.
*
* Data Structures and Flow:
* Unlike HTTP, RTSP provides separate streams for audio and video.
* As such, it creates two RtspTrackBuffer objects for the audio and video data.
* Data is read using the function ReadFrameFromTrack. These buffer objects are
* ring buffers, implying that data from the network may be discarded if the
* decoder cannot read at a high enough rate.
*
* Data is delivered via RtspMediaResource::Listener::OnMediaDataAvailable.
* This Listener implements nsIStreamingProtocolListener, and writes the data to
* the appropriate RtspTrackBuffer. The decoder then reads the data by calling
* RtspMediaResource::ReadFrameFromTrack. Note that the decoder and decode
* thread will be blocked until data is available in one of the two buffers.
*
* Seeking:
* Since the frame data received after seek is not continuous with existing
* frames in RtspTrackBuffer, the buffer must be cleared. If we don't clear the
* old frame data in RtspTrackBuffer, the decoder's behavior will be
* unpredictable. So we add |mFrameType| in RtspTrackBuffer to do this:
* When we are seeking, the mFrameType flag is set, and RtspTrackBuffer will
* drop the incoming data until the RTSP server completes the seek operation.
* Note: seeking for RTSP is carried out based on sending the seek time to the
* server, unlike HTTP in which the seek time is converted to a byte offset.
* Thus, RtspMediaResource has a SeekTime function which should be called
* instead of Seek.
* */
class RtspMediaResource : public BaseMediaResource
{
public:
RtspMediaResource(MediaDecoder* aDecoder, nsIChannel* aChannel, nsIURI* aURI,
const nsACString& aContentType);
virtual ~RtspMediaResource();
// The following methods can be called on any thread.
// Get the RtspMediaResource pointer if this MediaResource is a
// RtspMediaResource. For calling Rtsp specific functions.
virtual RtspMediaResource* GetRtspPointer() MOZ_OVERRIDE MOZ_FINAL {
return this;
}
// Returns the nsIStreamingProtocolController in the RtspMediaResource.
// RtspMediaExtractor: request it to get mime type for creating decoder.
// RtspOmxDecoder: request it to send play/pause commands to RTSP server.
// The lifetime of mMediaStreamController is controlled by RtspMediaResource
// because the RtspMediaExtractor and RtspOmxDecoder won't hold the reference.
nsIStreamingProtocolController* GetMediaStreamController() {
return mMediaStreamController;
}
virtual bool IsRealTime() MOZ_OVERRIDE {
return mRealTime;
}
// The following methods can be called on any thread except main thread.
// Read data from track.
// Parameters:
// aToBuffer, aToBufferSize: buffer pointer and buffer size.
// aReadCount: output actual read bytes.
// aFrameTime: output frame time stamp.
// aFrameSize: actual data size in track.
nsresult ReadFrameFromTrack(uint8_t* aBuffer, uint32_t aBufferSize,
uint32_t aTrackIdx, uint32_t& aBytes,
uint64_t& aTime, uint32_t& aFrameSize);
// Seek to the given time offset
nsresult SeekTime(int64_t aOffset);
// dummy
virtual nsresult ReadAt(int64_t aOffset, char* aBuffer,
uint32_t aCount, uint32_t* aBytes) MOZ_OVERRIDE{
return NS_ERROR_FAILURE;
}
// dummy
virtual void SetReadMode(MediaCacheStream::ReadMode aMode) MOZ_OVERRIDE {}
// dummy
virtual void SetPlaybackRate(uint32_t aBytesPerSecond) MOZ_OVERRIDE {}
// dummy
virtual nsresult Read(char* aBuffer, uint32_t aCount, uint32_t* aBytes)
MOZ_OVERRIDE {
return NS_OK;
}
// dummy
virtual nsresult Seek(int32_t aWhence, int64_t aOffset) MOZ_OVERRIDE {
return NS_OK;
}
// dummy
virtual void StartSeekingForMetadata() MOZ_OVERRIDE {}
// dummy
virtual void EndSeekingForMetadata() MOZ_OVERRIDE {}
// dummy
virtual int64_t Tell() MOZ_OVERRIDE { return 0; }
// Any thread
virtual void Pin() MOZ_OVERRIDE {}
virtual void Unpin() MOZ_OVERRIDE {}
// dummy
virtual bool IsSuspendedByCache() MOZ_OVERRIDE { return false; }
virtual bool IsSuspended() MOZ_OVERRIDE { return false; }
virtual bool IsTransportSeekable() MOZ_OVERRIDE { return true; }
// dummy
virtual double GetDownloadRate(bool* aIsReliable) MOZ_OVERRIDE { return 0; }
virtual int64_t GetLength() MOZ_OVERRIDE {
if (mRealTime) {
return -1;
}
return 0;
}
// dummy
virtual int64_t GetNextCachedData(int64_t aOffset) MOZ_OVERRIDE { return 0; }
// dummy
virtual int64_t GetCachedDataEnd(int64_t aOffset) MOZ_OVERRIDE { return 0; }
// dummy
virtual bool IsDataCachedToEndOfResource(int64_t aOffset) MOZ_OVERRIDE {
return false;
}
// dummy
nsresult GetCachedRanges(nsTArray<MediaByteRange>& aRanges) MOZ_OVERRIDE {
return NS_ERROR_FAILURE;
}
// The following methods can be called on main thread only.
virtual nsresult Open(nsIStreamListener** aStreamListener) MOZ_OVERRIDE;
virtual nsresult Close() MOZ_OVERRIDE;
virtual void Suspend(bool aCloseImmediately) MOZ_OVERRIDE;
virtual void Resume() MOZ_OVERRIDE;
virtual already_AddRefed<nsIPrincipal> GetCurrentPrincipal() MOZ_OVERRIDE;
virtual bool CanClone() MOZ_OVERRIDE {
return false;
}
virtual already_AddRefed<MediaResource> CloneData(MediaDecoder* aDecoder)
MOZ_OVERRIDE {
return nullptr;
}
// dummy
virtual nsresult ReadFromCache(char* aBuffer, int64_t aOffset,
uint32_t aCount) MOZ_OVERRIDE {
return NS_ERROR_FAILURE;
}
// Listener implements nsIStreamingProtocolListener as
// mMediaStreamController's callback function.
// It holds RtspMediaResource reference to notify the connection status and
// data arrival. The Revoke function releases the reference when
// RtspMediaResource is destroyed.
class Listener MOZ_FINAL : public nsIInterfaceRequestor,
public nsIStreamingProtocolListener
{
public:
Listener(RtspMediaResource* aResource) : mResource(aResource) {}
~Listener() {}
NS_DECL_ISUPPORTS
NS_DECL_NSIINTERFACEREQUESTOR
NS_DECL_NSISTREAMINGPROTOCOLLISTENER
void Revoke() { mResource = nullptr; }
private:
nsRefPtr<RtspMediaResource> mResource;
};
friend class Listener;
protected:
// Main thread access only.
// These are called on the main thread by Listener.
NS_DECL_NSISTREAMINGPROTOCOLLISTENER
nsRefPtr<Listener> mListener;
private:
// These two members are created at |RtspMediaResource::OnConnected|.
nsCOMPtr<nsIStreamingProtocolController> mMediaStreamController;
nsTArray<nsAutoPtr<RtspTrackBuffer>> mTrackBuffer;
// A flag that indicates the |RtspMediaResource::OnConnected| has already been
// called.
bool mIsConnected;
// live stream
bool mRealTime;
};
} // namespace mozilla
#endif

View File

@ -76,6 +76,26 @@ void MediaOmxReader::ReleaseDecoder()
}
}
nsresult MediaOmxReader::InitOmxDecoder()
{
if (!mOmxDecoder.get()) {
//register sniffers, if they are not registered in this process.
DataSource::RegisterDefaultSniffers();
mDecoder->GetResource()->SetReadMode(MediaCacheStream::MODE_METADATA);
sp<DataSource> dataSource = new MediaStreamSource(mDecoder->GetResource(), mDecoder);
dataSource->initCheck();
sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
mOmxDecoder = new OmxDecoder(mDecoder->GetResource(), mDecoder);
if (!mOmxDecoder->Init(extractor)) {
return NS_ERROR_FAILURE;
}
}
return NS_OK;
}
nsresult MediaOmxReader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags)
{
@ -83,11 +103,10 @@ nsresult MediaOmxReader::ReadMetadata(MediaInfo* aInfo,
*aTags = nullptr;
if (!mOmxDecoder.get()) {
mOmxDecoder = new OmxDecoder(mDecoder->GetResource(), mDecoder);
if (!mOmxDecoder->Init()) {
return NS_ERROR_FAILURE;
}
// Initialize the internal OMX Decoder.
nsresult rv = InitOmxDecoder();
if (NS_FAILED(rv)) {
return rv;
}
if (!mOmxDecoder->TryLoad()) {

View File

@ -26,7 +26,6 @@ class AbstractMediaDecoder;
class MediaOmxReader : public MediaDecoderReader
{
nsCString mType;
android::sp<android::OmxDecoder> mOmxDecoder;
bool mHasVideo;
bool mHasAudio;
nsIntRect mPicture;
@ -34,6 +33,16 @@ class MediaOmxReader : public MediaDecoderReader
int64_t mVideoSeekTimeUs;
int64_t mAudioSeekTimeUs;
int32_t mSkipCount;
protected:
android::sp<android::OmxDecoder> mOmxDecoder;
// Called by ReadMetadata() during MediaDecoderStateMachine::DecodeMetadata()
// on decode thread. It create and initialize the OMX decoder including
// setting up custom extractor. The extractor provide the essential
// information used for creating OMX decoder such as video/audio codec.
virtual nsresult InitOmxDecoder();
public:
MediaOmxReader(AbstractMediaDecoder* aDecoder);
~MediaOmxReader();

View File

@ -8,8 +8,9 @@ class MediaOmxStateMachine : public MediaDecoderStateMachine
{
public:
MediaOmxStateMachine(MediaDecoder *aDecoder,
MediaDecoderReader *aReader)
: MediaDecoderStateMachine(aDecoder, aReader) { }
MediaDecoderReader *aReader,
bool aRealTime = false)
: MediaDecoderStateMachine(aDecoder, aReader, aRealTime) { }
protected:
// Due to a bug in the OMX.qcom.video.decoder.mpeg4 decoder, we can't own too

View File

@ -317,36 +317,13 @@ static sp<IOMX> GetOMX()
return sOMX;
}
bool OmxDecoder::Init() {
bool OmxDecoder::Init(sp<MediaExtractor>& extractor) {
#ifdef PR_LOGGING
if (!gOmxDecoderLog) {
gOmxDecoderLog = PR_NewLogModule("OmxDecoder");
}
#endif
//register sniffers, if they are not registered in this process.
DataSource::RegisterDefaultSniffers();
sp<DataSource> dataSource = new MediaStreamSource(mResource, mDecoder);
if (dataSource->initCheck()) {
NS_WARNING("Initializing DataSource for OMX decoder failed");
return false;
}
mResource->SetReadMode(MediaCacheStream::MODE_METADATA);
sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
if (extractor == nullptr) {
NS_WARNING("Could not create MediaExtractor");
return false;
}
const char* extractorMime;
sp<MetaData> meta = extractor->getMetaData();
if (meta->findCString(kKeyMIMEType, &extractorMime) && !strcasecmp(extractorMime, AUDIO_MP3)) {
mIsMp3 = true;
}
ssize_t audioTrackIndex = -1;
ssize_t videoTrackIndex = -1;

View File

@ -5,6 +5,7 @@
#include <stagefright/DataSource.h>
#include <stagefright/MediaSource.h>
#include <utils/RefBase.h>
#include <stagefright/MediaExtractor.h>
#include "GonkNativeWindow.h"
#include "GonkNativeWindowClient.h"
@ -172,7 +173,16 @@ public:
// MediaResourceManagerClient::EventListener
virtual void statusChanged();
bool Init();
// The MediaExtractor provides essential information for creating OMXCodec
// instance. Such as video/audio codec, we can retrieve them through the
// MediaExtractor::getTrackMetaData().
// In general cases, the extractor is created by a sp<DataSource> which
// connect to a MediaResource like ChannelMediaResource.
// Data is read from the MediaResource to create a suitable extractor which
// extracts data from a container.
// Note: RTSP requires a custom extractor because it doesn't have a container.
bool Init(sp<MediaExtractor>& extractor);
bool TryLoad();
bool IsDormantNeeded();
bool IsWaitingMediaResources();

View File

@ -0,0 +1,65 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "RtspMediaResource.h"
#include "RtspOmxDecoder.h"
#include "RtspOmxReader.h"
#include "MediaOmxStateMachine.h"
namespace mozilla {
MediaDecoder* RtspOmxDecoder::Clone()
{
return new RtspOmxDecoder();
}
MediaDecoderStateMachine* RtspOmxDecoder::CreateStateMachine()
{
return new MediaOmxStateMachine(this, new RtspOmxReader(this),
mResource->IsRealTime());
}
void RtspOmxDecoder::ApplyStateToStateMachine(PlayState aState)
{
MOZ_ASSERT(NS_IsMainThread());
GetReentrantMonitor().AssertCurrentThreadIn();
MediaDecoder::ApplyStateToStateMachine(aState);
// Send play/pause commands here through the nsIStreamingProtocolController
// except seek command. We need to clear the decoded/un-decoded buffer data
// before sending seek command. So the seek calling path to controller is:
// mDecoderStateMachine::Seek-> RtspOmxReader::Seek-> RtspResource::SeekTime->
// controller->Seek(). RtspOmxReader::Seek will clear the decoded buffer and
// the RtspResource::SeekTime will clear the un-decoded buffer.
RtspMediaResource* rtspResource = mResource->GetRtspPointer();
MOZ_ASSERT(rtspResource);
nsIStreamingProtocolController* controller =
rtspResource->GetMediaStreamController();
if (mDecoderStateMachine) {
switch (aState) {
case PLAY_STATE_PLAYING:
if (controller) {
controller->Play();
}
break;
case PLAY_STATE_PAUSED:
if (controller) {
controller->Pause();
}
break;
default:
/* No action needed */
break;
}
}
}
} // namespace mozilla

View File

@ -0,0 +1,43 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(RtspOmxDecoder_h_)
#define RtspOmxDecoder_h_
#include "base/basictypes.h"
#include "MediaDecoder.h"
namespace mozilla {
/* RtspOmxDecoder is a subclass of MediaDecoder but not a subclass of
* MediaOmxDecoder. Because the MediaOmxDecoder doesn't extend any functionality
* for MediaDecoder.
* It creates the RtspOmxReader for the MediaDecoderStateMachine and override
* the ApplyStateToStateMachine to send rtsp play/pause command to rtsp server.
*
* */
class RtspOmxDecoder : public MediaDecoder
{
public:
RtspOmxDecoder()
: MediaDecoder() {
MOZ_COUNT_CTOR(RtspOmxDecoder);
}
~RtspOmxDecoder() {
MOZ_COUNT_DTOR(RtspOmxDecoder);
}
virtual MediaDecoder* Clone() MOZ_OVERRIDE;
virtual MediaDecoderStateMachine* CreateStateMachine() MOZ_OVERRIDE;
// Called by |ChangeState|, override it to send the Rtsp play/pause commands
// through |nsIStreamingProtocolController|.
// Call on the main thread only and the lock must be obtained.
virtual void ApplyStateToStateMachine(PlayState aState) MOZ_OVERRIDE;
};
} // namespace mozilla
#endif

View File

@ -0,0 +1,283 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "RtspOmxReader.h"
#include "AbstractMediaDecoder.h"
#include "MediaDecoderStateMachine.h"
#include "MPAPI.h"
#include "mozilla/dom/TimeRanges.h"
#include "mozilla/Mutex.h"
#include "mozilla/TimeStamp.h"
#include "OmxDecoder.h"
#include "RtspMediaResource.h"
#include "RtspOmxDecoder.h"
#include "VideoUtils.h"
#include <stagefright/MediaExtractor.h>
#include <stagefright/MediaBufferGroup.h>
#include <stagefright/MetaData.h>
#define FRAME_DEFAULT_SIZE 1024
using namespace android;
namespace mozilla {
/* class RtspMediaSource : implements MediaSource for OMX.
* The decoder thread will trigger the MediaDecodeStateMachine to read a/v frame.
* Then RtspOmxReader calls OMX decoder to decode a/v frame. Finally the code
* path run into the read() here, it reads un-decoded frame data from mResource
* and construct a MediaBuffer for output to OMX decoder.
* */
class RtspMediaSource : public android::MediaSource {
public:
RtspMediaSource(RtspMediaResource *aRtspMediaResource,
ssize_t aTrackIdx,
uint32_t aFrameMaxSize,
const sp<MetaData>& aMeta)
: mRtspResource(aRtspMediaResource)
, mFormat(aMeta)
, mTrackIdx(aTrackIdx)
, mMonitor("RtspMediaSource.mMonitor")
, mIsStarted(false)
, mGroup(nullptr)
, mBuffer(nullptr)
, mFrameMaxSize(aFrameMaxSize) {
MOZ_COUNT_CTOR(RtspMediaSource);
};
virtual ~RtspMediaSource() {
MOZ_COUNT_DTOR(RtspMediaSource);
}
virtual status_t start(MetaData *params = nullptr) MOZ_FINAL MOZ_OVERRIDE;
virtual status_t stop() MOZ_FINAL MOZ_OVERRIDE;
virtual sp<MetaData> getFormat() MOZ_FINAL MOZ_OVERRIDE {
ReentrantMonitorAutoEnter mon(mMonitor);
return mFormat;
};
virtual status_t read(MediaBuffer **buffer,
const ReadOptions *options = nullptr) MOZ_FINAL MOZ_OVERRIDE ;
private:
nsRefPtr<RtspMediaResource> mRtspResource;
sp<MetaData> mFormat;
uint32_t mTrackIdx;
ReentrantMonitor mMonitor;
bool mIsStarted;
// mGroup owns the mBuffer. mFrameMaxSize is the mBuffer size.
// mBuffer is the input buffer for omx decoder.
nsAutoPtr<MediaBufferGroup> mGroup;
MediaBuffer* mBuffer;
uint32_t mFrameMaxSize;
};
status_t RtspMediaSource::start(MetaData *params)
{
ReentrantMonitorAutoEnter mon(mMonitor);
if (!mIsStarted) {
// RtspMediaSource relinquish the ownership of MediaBuffer |buf| to mGroup.
mGroup = new MediaBufferGroup();
MediaBuffer* buf = new MediaBuffer(mFrameMaxSize);
mGroup->add_buffer(buf);
mIsStarted = true;
}
return OK;
}
status_t RtspMediaSource::stop()
{
ReentrantMonitorAutoEnter mon(mMonitor);
if (mIsStarted) {
if (mBuffer) {
mBuffer->release();
mBuffer = nullptr;
}
mGroup = nullptr;
mIsStarted = false;
}
return OK;
}
status_t RtspMediaSource::read(MediaBuffer **out, const ReadOptions *options)
{
ReentrantMonitorAutoEnter mon(mMonitor);
NS_ENSURE_TRUE(mIsStarted, MEDIA_ERROR_BASE);
NS_ENSURE_TRUE(out, MEDIA_ERROR_BASE);
*out = nullptr;
// Video/audio track's initial frame size is FRAME_DEFAULT_SIZE.
// We need to realloc the mBuffer if the mBuffer doesn't have enough space
// for next ReadFrameFromTrack function. (actualFrameSize > mFrameMaxSize)
status_t err;
uint32_t readCount;
uint32_t actualFrameSize;
uint64_t time;
nsresult rv;
while (1) {
err = mGroup->acquire_buffer(&mBuffer);
NS_ENSURE_TRUE(err == OK, err);
rv = mRtspResource->ReadFrameFromTrack((uint8_t *)mBuffer->data(),
mFrameMaxSize, mTrackIdx, readCount,
time, actualFrameSize);
NS_ENSURE_SUCCESS(rv, ERROR_IO);
if (actualFrameSize > mFrameMaxSize) {
// release mGroup and mBuffer
stop();
// re-construct mGroup and mBuffer
mFrameMaxSize = actualFrameSize;
err = start();
NS_ENSURE_TRUE(err == OK, err);
} else {
// ReadFrameFromTrack success, break the while loop.
break;
}
}
mBuffer->set_range(0, readCount);
if (NS_SUCCEEDED(rv)) {
mBuffer->meta_data()->clear();
// fill the meta data
mBuffer->meta_data()->setInt64(kKeyTime, time);
*out = mBuffer;
mBuffer = nullptr;
return OK;
}
return ERROR_END_OF_STREAM;
}
// RtspExtractor is a custom extractor for Rtsp stream, whereas the other
// XXXExtractors are made for container media content.
// The extractor is used for |OmxDecoder::Init|, it provides the essential
// information for creating OMXCodec instance.
// For example, the |getTrackMetaData| returns metadata that includes the
// codec type.
class RtspExtractor: public MediaExtractor
{
public:
virtual size_t countTracks() MOZ_FINAL MOZ_OVERRIDE;
virtual sp<android::MediaSource> getTrack(size_t index)
MOZ_FINAL MOZ_OVERRIDE;
virtual sp<MetaData> getTrackMetaData(
size_t index, uint32_t flag = 0) MOZ_FINAL MOZ_OVERRIDE;
RtspExtractor(RtspMediaResource *aResource)
: mRtspResource(aResource) {
MOZ_COUNT_CTOR(RtspExtractor);
MOZ_ASSERT(aResource);
mController = mRtspResource->GetMediaStreamController();
MOZ_ASSERT(mController);
}
virtual ~RtspExtractor() MOZ_OVERRIDE {
MOZ_COUNT_DTOR(RtspExtractor);
}
private:
// mRtspResource is a pointer to RtspMediaResource. When |getTrack| is called
// we use mRtspResource to construct a RtspMediaSource.
RtspMediaResource* mRtspResource;
// Through the mController in mRtspResource, we can get the essential
// information for the extractor.
nsRefPtr<nsIStreamingProtocolController> mController;
};
size_t RtspExtractor::countTracks()
{
uint8_t tracks = 0;
if (mController) {
mController->GetTotalTracks(&tracks);
}
return size_t(tracks);
}
sp<android::MediaSource> RtspExtractor::getTrack(size_t index)
{
NS_ENSURE_TRUE(index < countTracks(), nullptr);
sp<MetaData> meta = getTrackMetaData(index);
sp<android::MediaSource> source = new RtspMediaSource(mRtspResource,
index,
FRAME_DEFAULT_SIZE,
meta);
return source;
}
sp<MetaData> RtspExtractor::getTrackMetaData(size_t index, uint32_t flag)
{
NS_ENSURE_TRUE(index < countTracks(), nullptr);;
sp<MetaData> meta = new MetaData();
nsCOMPtr<nsIStreamingProtocolMetaData> rtspMetadata;
mController->GetTrackMetaData(index, getter_AddRefs(rtspMetadata));
if (rtspMetadata) {
// Convert msMeta into meta.
// The getter function of nsIStreamingProtocolMetaData will initialize the
// metadata values to 0 before setting them.
nsCString mime;
rtspMetadata->GetMimeType(mime);
meta->setCString(kKeyMIMEType, mime.get());
uint32_t temp32;
rtspMetadata->GetWidth(&temp32);
meta->setInt32(kKeyWidth, temp32);
rtspMetadata->GetHeight(&temp32);
meta->setInt32(kKeyHeight, temp32);
rtspMetadata->GetSampleRate(&temp32);
meta->setInt32(kKeySampleRate, temp32);
rtspMetadata->GetChannelCount(&temp32);
meta->setInt32(kKeyChannelCount, temp32);
uint64_t temp64;
rtspMetadata->GetDuration(&temp64);
meta->setInt64(kKeyDuration, temp64);
nsCString tempCString;
rtspMetadata->GetEsdsData(tempCString);
if (tempCString.Length()) {
meta->setData(kKeyESDS, 0, tempCString.get(), tempCString.Length());
}
rtspMetadata->GetAvccData(tempCString);
if (tempCString.Length()) {
meta->setData(kKeyAVCC, 0, tempCString.get(), tempCString.Length());
}
}
return meta;
}
nsresult RtspOmxReader::InitOmxDecoder()
{
if (!mOmxDecoder.get()) {
NS_ASSERTION(mDecoder, "RtspOmxReader mDecoder is null.");
NS_ASSERTION(mDecoder->GetResource(),
"RtspOmxReader mDecoder->GetResource() is null.");
sp<MediaExtractor> extractor = new RtspExtractor(mRtspResource);
mOmxDecoder = new OmxDecoder(mDecoder->GetResource(), mDecoder);
if (!mOmxDecoder->Init(extractor)) {
return NS_ERROR_FAILURE;
}
}
return NS_OK;
}
nsresult RtspOmxReader::Seek(int64_t aTime, int64_t aStartTime,
int64_t aEndTime, int64_t aCurrentTime)
{
// The seek function of Rtsp is time-based, we call the SeekTime function in
// RtspMediaResource. The SeekTime function finally send a seek command to
// Rtsp stream server through network and also clear the buffer data in
// RtspMediaResource.
if (mRtspResource) {
mRtspResource->SeekTime(aTime);
}
// Call |MediaOmxReader::Seek| to notify the OMX decoder we are performing a
// seek operation. The function will clear the |mVideoQueue| and |mAudioQueue|
// that store the decoded data and also call the |DecodeToTarget| to pass
// the seek time to OMX a/v decoders.
return MediaOmxReader::Seek(aTime, aStartTime, aEndTime, aCurrentTime);
}
} // namespace mozilla

View File

@ -0,0 +1,75 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(RtspOmxReader_h_)
#define RtspOmxReader_h_
#include "MediaResource.h"
#include "MediaDecoderReader.h"
#include "MediaOmxReader.h"
namespace mozilla {
namespace dom {
class TimeRanges;
}
class AbstractMediaDecoder;
class RtspMediaResource;
/* RtspOmxReader is a subclass of MediaOmxReader.
* The major reason that RtspOmxReader inherit from MediaOmxReader is the
* same video/audio decoding logic we can reuse.
*/
class RtspOmxReader : public MediaOmxReader
{
protected:
// Provide a Rtsp extractor.
nsresult InitOmxDecoder() MOZ_FINAL MOZ_OVERRIDE;
public:
RtspOmxReader(AbstractMediaDecoder* aDecoder)
: MediaOmxReader(aDecoder) {
MOZ_COUNT_CTOR(RtspOmxReader);
NS_ASSERTION(mDecoder, "RtspOmxReader mDecoder is null.");
NS_ASSERTION(mDecoder->GetResource(),
"RtspOmxReader mDecoder->GetResource() is null.");
mRtspResource = mDecoder->GetResource()->GetRtspPointer();
MOZ_ASSERT(mRtspResource);
}
virtual ~RtspOmxReader() MOZ_OVERRIDE {
MOZ_COUNT_DTOR(RtspOmxReader);
}
// Implement a time-based seek instead of byte-based..
virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
int64_t aCurrentTime) MOZ_FINAL MOZ_OVERRIDE;
// Override GetBuffered() to do nothing for below reasons:
// 1. Because the Rtsp stream is a/v separated. The buffered data in a/v
// tracks are not consistent with time stamp.
// For example: audio buffer: 1~2s, video buffer: 1.5~2.5s
// 2. Since the Rtsp is a realtime streaming, the buffer we made for
// RtspMediaResource is quite small. The small buffer implies the time ranges
// we returned are not useful for the MediaDecodeStateMachine. Unlike the
// ChannelMediaResource, it has a "cache" that can store the whole streaming
// data so the |GetBuffered| function can retrieve useful time ranges.
virtual nsresult GetBuffered(mozilla::dom::TimeRanges* aBuffered,
int64_t aStartTime) MOZ_FINAL MOZ_OVERRIDE {
return NS_OK;
}
private:
// A pointer to RtspMediaResource for calling the Rtsp specific function.
// The lifetime of mRtspResource is controlled by MediaDecoder. MediaDecoder
// holds the MediaDecoderStateMachine and RtspMediaResource.
// And MediaDecoderStateMachine holds this RtspOmxReader.
RtspMediaResource* mRtspResource;
};
} // namespace mozilla
#endif