Bug 951496 - Codec getStats. r=smaug, r=jesup

This commit is contained in:
Jan-Ivar Bruaroey 2014-06-07 17:27:26 -04:00
parent 2151e81cd4
commit c8f9921b5d
9 changed files with 220 additions and 13 deletions

View File

@ -5,6 +5,7 @@
*
* The origin of this IDL file is
* http://dev.w3.org/2011/webrtc/editor/webrtc.html#rtcstatsreport-object
* http://www.w3.org/2011/04/webrtc/wiki/Stats
*/
enum RTCStatsType {
@ -31,6 +32,12 @@ dictionary RTCRTPStreamStats : RTCStats {
DOMString mediaTrackId;
DOMString transportId;
DOMString codecId;
// Video encoder/decoder measurements (absent for rtcp)
double bitrateMean;
double bitrateStdDev;
double framerateMean;
double framerateStdDev;
};
dictionary RTCInboundRTPStreamStats : RTCRTPStreamStats {
@ -41,24 +48,38 @@ dictionary RTCInboundRTPStreamStats : RTCRTPStreamStats {
long mozAvSyncDelay;
long mozJitterBufferDelay;
long mozRtt;
// Video decoder measurement (absent in rtcp case)
unsigned long discardedPackets;
};
dictionary RTCOutboundRTPStreamStats : RTCRTPStreamStats {
unsigned long packetsSent;
unsigned long long bytesSent;
double targetBitrate; // config encoder bitrate target of this SSRC in bits/s
// Video encoder measurement (absent in rtcp case)
unsigned long droppedFrames;
};
dictionary RTCMediaStreamTrackStats : RTCStats {
DOMString trackIdentifier; // track.id property
boolean remoteSource;
sequence<DOMString> ssrcIds;
unsigned long audioLevel; // Only for audio, the rest are only for video
// Stuff that makes sense for video
unsigned long frameWidth;
unsigned long frameHeight;
double framesPerSecond; // The nominal FPS value
double framesPerSecond; // The nominal FPS value
unsigned long framesSent;
unsigned long framesReceived; // Only for remoteSource=true
unsigned long framesReceived; // Only for remoteSource=true
unsigned long framesDecoded;
unsigned long framesDropped; // See VideoPlaybackQuality.droppedVideoFrames
unsigned long framesCorrupted; // as above.
// Stuff that makes sense for audio
double audioLevel; // linear, 1.0 = 0 dBov (from RFC 6464).
// AEC stuff on audio tracks sourced from a microphone where AEC is applied
double echoReturnLoss; // in decibels from G.168 (2012) section 3.14
double echoReturnLossEnhancement; // as above, section 3.15
};
dictionary RTCMediaStreamStats : RTCStats {

View File

@ -178,6 +178,22 @@ public:
webrtc::VoiceEngine* GetVoiceEngine() { return mVoiceEngine; }
bool GetLocalSSRC(unsigned int* ssrc);
bool GetRemoteSSRC(unsigned int* ssrc);
bool GetVideoEncoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* droppedFrames)
{
return false;
}
bool GetVideoDecoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* discardedPackets)
{
return false;
}
bool GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs);

View File

@ -12,22 +12,31 @@ using namespace webrtc;
// use the same tag as VideoConduit
static const char* logTag ="WebrtcVideoSessionConduit";
VideoCodecStatistics::VideoCodecStatistics(int channel, ViECodec* codec) :
VideoCodecStatistics::VideoCodecStatistics(int channel,
ViECodec* codec,
bool encoder) :
mChannel(channel),
mSentRawFrames(0),
mPtrViECodec(codec),
mEncoderDroppedFrames(0),
mDecoderDiscardedPackets(0)
mDecoderDiscardedPackets(0),
mEncoderMode(encoder)
{
MOZ_ASSERT(mPtrViECodec);
mPtrViECodec->RegisterEncoderObserver(mChannel, *this);
mPtrViECodec->RegisterDecoderObserver(mChannel, *this);
if (mEncoderMode) {
mPtrViECodec->RegisterEncoderObserver(mChannel, *this);
} else {
mPtrViECodec->RegisterDecoderObserver(mChannel, *this);
}
}
VideoCodecStatistics::~VideoCodecStatistics()
{
mPtrViECodec->DeregisterEncoderObserver(mChannel);
mPtrViECodec->DeregisterDecoderObserver(mChannel);
if (mEncoderMode) {
mPtrViECodec->DeregisterEncoderObserver(mChannel);
} else {
mPtrViECodec->DeregisterDecoderObserver(mChannel);
}
}
void VideoCodecStatistics::OutgoingRate(const int video_channel,

View File

@ -19,7 +19,7 @@ class VideoCodecStatistics : public webrtc::ViEEncoderObserver
, public webrtc::ViEDecoderObserver
{
public:
VideoCodecStatistics(int channel, webrtc::ViECodec* vieCodec);
VideoCodecStatistics(int channel, webrtc::ViECodec* vieCodec, bool encoder);
~VideoCodecStatistics();
void SentFrame();
@ -43,6 +43,35 @@ public:
int jitter_buffer_ms,
int min_playout_delay_ms,
int render_delay_ms) MOZ_OVERRIDE {}
bool GetEncoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* droppedFrames)
{
*framerateMean = mEncoderFps.Mean();
*framerateStdDev = mEncoderFps.StandardDeviation();
*bitrateMean = mEncoderBitRate.Mean();
*bitrateStdDev = mEncoderBitRate.StandardDeviation();
*droppedFrames = mEncoderDroppedFrames;
return true;
}
bool GetDecoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* discardedPackets)
{
*framerateMean = mDecoderFps.Mean();
*framerateStdDev = mDecoderFps.StandardDeviation();
*bitrateMean = mDecoderBitRate.Mean();
*bitrateStdDev = mDecoderBitRate.StandardDeviation();
*discardedPackets = mDecoderDiscardedPackets;
return true;
}
void Dump();
private:
void Dump(RunningStat& s, const char *name);
@ -57,6 +86,7 @@ private:
RunningStat mDecoderBitRate;
RunningStat mDecoderFps;
uint32_t mDecoderDiscardedPackets;
const bool mEncoderMode;
};
}

View File

@ -164,6 +164,16 @@ public:
/**
* Functions returning stats needed by w3c stats model.
*/
virtual bool GetVideoEncoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* droppedFrames) = 0;
virtual bool GetVideoDecoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* discardedPackets) = 0;
virtual bool GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) = 0;

View File

@ -137,14 +137,48 @@ WebrtcVideoConduit::~WebrtcVideoConduit()
}
}
bool WebrtcVideoConduit::GetLocalSSRC(unsigned int* ssrc) {
bool WebrtcVideoConduit::GetLocalSSRC(unsigned int* ssrc)
{
return !mPtrRTP->GetLocalSSRC(mChannel, *ssrc);
}
bool WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc) {
bool WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc)
{
return !mPtrRTP->GetRemoteSSRC(mChannel, *ssrc);
}
bool WebrtcVideoConduit::GetVideoEncoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* droppedFrames)
{
if (!mEngineTransmitting) {
return false;
}
MOZ_ASSERT(mVideoCodecStat);
mVideoCodecStat->GetEncoderStats(framerateMean, framerateStdDev,
bitrateMean, bitrateStdDev,
droppedFrames);
return true;
}
bool WebrtcVideoConduit::GetVideoDecoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* discardedPackets)
{
if (!mEngineReceiving) {
return false;
}
MOZ_ASSERT(mVideoCodecStat);
mVideoCodecStat->GetDecoderStats(framerateMean, framerateStdDev,
bitrateMean, bitrateStdDev,
discardedPackets);
return true;
}
bool WebrtcVideoConduit::GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) {
@ -571,7 +605,9 @@ WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
return kMediaConduitUnknownError;
}
mVideoCodecStat = new VideoCodecStatistics(mChannel, mPtrViECodec);
if (!mVideoCodecStat) {
mVideoCodecStat = new VideoCodecStatistics(mChannel, mPtrViECodec, true);
}
mSendingWidth = 0;
mSendingHeight = 0;
@ -741,6 +777,10 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
return kMediaConduitInvalidReceiveCodec;
}
if (!mVideoCodecStat) {
mVideoCodecStat = new VideoCodecStatistics(mChannel, mPtrViECodec, false);
}
// XXX Currently, we gather up all of the feedback types that the remote
// party indicated it supports for all video codecs and configure the entire
// conduit based on those capabilities. This is technically out of spec,

View File

@ -251,6 +251,16 @@ public:
webrtc::VideoEngine* GetVideoEngine() { return mVideoEngine; }
bool GetLocalSSRC(unsigned int* ssrc);
bool GetRemoteSSRC(unsigned int* ssrc);
bool GetVideoEncoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* droppedFrames);
bool GetVideoDecoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* discardedPackets);
bool GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs);

View File

@ -2247,6 +2247,7 @@ PeerConnectionImpl::ExecuteStatsQuery_s(RTCStatsQuery *query) {
NS_LITERAL_STRING("audio_") : NS_LITERAL_STRING("video_");
idstr.AppendInt(mp.trackid());
// Gather pipeline stats.
switch (mp.direction()) {
case MediaPipeline::TRANSMIT: {
nsString localId = NS_LITERAL_STRING("outbound_rtp_") + idstr;
@ -2302,6 +2303,26 @@ PeerConnectionImpl::ExecuteStatsQuery_s(RTCStatsQuery *query) {
s.mIsRemote = false;
s.mPacketsSent.Construct(mp.rtp_packets_sent());
s.mBytesSent.Construct(mp.rtp_bytes_sent());
// Lastly, fill in video encoder stats if this is video
if (!isAudio) {
double framerateMean;
double framerateStdDev;
double bitrateMean;
double bitrateStdDev;
uint32_t droppedFrames;
if (mp.Conduit()->GetVideoEncoderStats(&framerateMean,
&framerateStdDev,
&bitrateMean,
&bitrateStdDev,
&droppedFrames)) {
s.mFramerateMean.Construct(framerateMean);
s.mFramerateStdDev.Construct(framerateStdDev);
s.mBitrateMean.Construct(bitrateMean);
s.mBitrateStdDev.Construct(bitrateStdDev);
s.mDroppedFrames.Construct(droppedFrames);
}
}
query->report->mOutboundRTPStreamStats.Value().AppendElement(s);
}
break;
@ -2367,6 +2388,25 @@ PeerConnectionImpl::ExecuteStatsQuery_s(RTCStatsQuery *query) {
s.mMozAvSyncDelay.Construct(avSyncDelta);
}
}
// Lastly, fill in video decoder stats if this is video
if (!isAudio) {
double framerateMean;
double framerateStdDev;
double bitrateMean;
double bitrateStdDev;
uint32_t discardedPackets;
if (mp.Conduit()->GetVideoDecoderStats(&framerateMean,
&framerateStdDev,
&bitrateMean,
&bitrateStdDev,
&discardedPackets)) {
s.mFramerateMean.Construct(framerateMean);
s.mFramerateStdDev.Construct(framerateStdDev);
s.mBitrateMean.Construct(bitrateMean);
s.mBitrateStdDev.Construct(bitrateStdDev);
s.mDiscardedPackets.Construct(discardedPackets);
}
}
query->report->mInboundRTPStreamStats.Value().AppendElement(s);
break;
}

View File

@ -171,6 +171,36 @@ function dumpRtpStat(stat, label) {
return div;
}
function dumpCoderStat(stat) {
var div = document.createElement('div');
if (stat.bitrateMean !== undefined ||
stat.framerateMean !== undefined ||
stat.droppedFrames !== undefined ||
stat.discardedPackets !== undefined) {
var statsString = (stat.packetsReceived !== undefined)? " Decoder:" : " Encoder:";
if (stat.bitrateMean !== undefined) {
statsString += " Avg. bitrate: " + (stat.bitrateMean/1000000).toFixed(2) + " Mbps";
if (stat.bitrateStdDev !== undefined) {
statsString += " (" + (stat.bitrateStdDev/1000000).toFixed(2) + " SD)";
}
}
if (stat.framerateMean !== undefined) {
statsString += " Avg. framerate: " + (stat.framerateMean).toFixed(2) + " fps";
if (stat.framerateStdDev !== undefined) {
statsString += " (" + stat.framerateStdDev.toFixed(2) + " SD)";
}
}
if (stat.droppedFrames !== undefined) {
statsString += " Dropped frames: " + stat.droppedFrames;
}
if (stat.discardedPackets !== undefined) {
statsString += " Discarded packets: " + stat.discardedPackets;
}
div.appendChild(document.createTextNode(statsString));
}
return div;
}
function buildPcDiv(stats, pcDivHeading) {
var newPcDiv = document.createElement('div');
@ -300,6 +330,7 @@ function buildPcDiv(stats, pcDivHeading) {
rtpStat.mozJitterBufferDelay !== undefined) {
newPcDiv.appendChild(dumpAvStat(rtpStat));
}
newPcDiv.appendChild(dumpCoderStat(rtpStat));
newPcDiv.appendChild(dumpRtpStat(rtpStat, "Local: "));
// Might not be receiving RTCP, so we have no idea what the