Bug 1191170 - Move DecodedStreamData from the header to its source file. r=roc.

This commit is contained in:
JW Wang 2015-07-19 21:25:08 +08:00
parent 2efd915f21
commit 2b6c0d8cee
2 changed files with 62 additions and 61 deletions

View File

@ -87,6 +87,52 @@ UpdateStreamBlocking(MediaStream* aStream, bool aBlocking)
}
}
/*
* All MediaStream-related data is protected by the decoder's monitor.
* We have at most one DecodedStreamDaata per MediaDecoder. Its stream
* is used as the input for each ProcessedMediaStream created by calls to
* captureStream(UntilEnded). Seeking creates a new source stream, as does
* replaying after the input as ended. In the latter case, the new source is
* not connected to streams created by captureStreamUntilEnded.
*/
class DecodedStreamData {
public:
DecodedStreamData(SourceMediaStream* aStream, bool aPlaying);
~DecodedStreamData();
bool IsFinished() const;
int64_t GetPosition() const;
void SetPlaying(bool aPlaying);
/* The following group of fields are protected by the decoder's monitor
* and can be read or written on any thread.
*/
// Count of audio frames written to the stream
int64_t mAudioFramesWritten;
// mNextVideoTime is the end timestamp for the last packet sent to the stream.
// Therefore video packets starting at or after this time need to be copied
// to the output stream.
int64_t mNextVideoTime; // microseconds
int64_t mNextAudioTime; // microseconds
// The last video image sent to the stream. Useful if we need to replicate
// the image.
nsRefPtr<layers::Image> mLastVideoImage;
gfx::IntSize mLastVideoImageDisplaySize;
// This is set to true when the stream is initialized (audio and
// video tracks added).
bool mStreamInitialized;
bool mHaveSentFinish;
bool mHaveSentFinishAudio;
bool mHaveSentFinishVideo;
// The decoder is responsible for calling Destroy() on this stream.
const nsRefPtr<SourceMediaStream> mStream;
nsRefPtr<DecodedStreamGraphListener> mListener;
bool mPlaying;
// True if we need to send a compensation video frame to ensure the
// StreamTime going forward.
bool mEOSVideoCompensation;
};
DecodedStreamData::DecodedStreamData(SourceMediaStream* aStream, bool aPlaying)
: mAudioFramesWritten(0)
, mNextVideoTime(-1)
@ -191,7 +237,10 @@ DecodedStream::DecodedStream(MediaQueue<MediaData>& aAudioQueue,
, mAudioQueue(aAudioQueue)
, mVideoQueue(aVideoQueue)
{
//
}
DecodedStream::~DecodedStream()
{
}
void

View File

@ -7,29 +7,27 @@
#ifndef DecodedStream_h_
#define DecodedStream_h_
#include "mozilla/nsRefPtr.h"
#include "nsTArray.h"
#include "MediaInfo.h"
#include "mozilla/CheckedInt.h"
#include "mozilla/Maybe.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/ReentrantMonitor.h"
#include "mozilla/UniquePtr.h"
#include "mozilla/gfx/Point.h"
#include "mozilla/CheckedInt.h"
#include "mozilla/ReentrantMonitor.h"
#include "mozilla/Maybe.h"
namespace mozilla {
class MediaData;
class AudioSegment;
class MediaStream;
class MediaInputPort;
class SourceMediaStream;
class ProcessedMediaStream;
class DecodedStream;
class DecodedStreamGraphListener;
class OutputStreamListener;
class ReentrantMonitor;
class DecodedStreamData;
class MediaData;
class MediaInputPort;
class MediaStream;
class MediaStreamGraph;
class OutputStreamListener;
class ProcessedMediaStream;
class ReentrantMonitor;
template <class T> class MediaQueue;
@ -37,52 +35,6 @@ namespace layers {
class Image;
} // namespace layers
/*
* All MediaStream-related data is protected by the decoder's monitor.
* We have at most one DecodedStreamDaata per MediaDecoder. Its stream
* is used as the input for each ProcessedMediaStream created by calls to
* captureStream(UntilEnded). Seeking creates a new source stream, as does
* replaying after the input as ended. In the latter case, the new source is
* not connected to streams created by captureStreamUntilEnded.
*/
class DecodedStreamData {
public:
DecodedStreamData(SourceMediaStream* aStream, bool aPlaying);
~DecodedStreamData();
bool IsFinished() const;
int64_t GetPosition() const;
void SetPlaying(bool aPlaying);
/* The following group of fields are protected by the decoder's monitor
* and can be read or written on any thread.
*/
// Count of audio frames written to the stream
int64_t mAudioFramesWritten;
// mNextVideoTime is the end timestamp for the last packet sent to the stream.
// Therefore video packets starting at or after this time need to be copied
// to the output stream.
int64_t mNextVideoTime; // microseconds
int64_t mNextAudioTime; // microseconds
// The last video image sent to the stream. Useful if we need to replicate
// the image.
nsRefPtr<layers::Image> mLastVideoImage;
gfx::IntSize mLastVideoImageDisplaySize;
// This is set to true when the stream is initialized (audio and
// video tracks added).
bool mStreamInitialized;
bool mHaveSentFinish;
bool mHaveSentFinishAudio;
bool mHaveSentFinishVideo;
// The decoder is responsible for calling Destroy() on this stream.
const nsRefPtr<SourceMediaStream> mStream;
nsRefPtr<DecodedStreamGraphListener> mListener;
bool mPlaying;
// True if we need to send a compensation video frame to ensure the
// StreamTime going forward.
bool mEOSVideoCompensation;
};
class OutputStreamData {
public:
~OutputStreamData();
@ -119,7 +71,7 @@ public:
bool SendData(double aVolume, bool aIsSameOrigin);
protected:
virtual ~DecodedStream() {}
virtual ~DecodedStream();
private:
ReentrantMonitor& GetReentrantMonitor() const;