--- a/dom/media/tests/mochitest/test_peerConnection_stats.html
+++ b/dom/media/tests/mochitest/test_peerConnection_stats.html
@@ -9,34 +9,34 @@
createHTML({
bug: "1337525",
title: "webRtc Stats composition and sanity"
});
var statsExpectedByType = {
"inbound-rtp": {
expected: ["id", "timestamp", "type", "ssrc", "isRemote", "mediaType",
"packetsReceived", "packetsLost", "bytesReceived", "jitter",],
- optional: ["mozRtt", "remoteId",],
+ optional: ["mozRtt", "remoteId", "nackCount",],
videoOnly: ["discardedPackets", "framerateStdDev", "framerateMean",
- "bitrateMean", "bitrateStdDev",],
+ "bitrateMean", "bitrateStdDev", "firCount", "pliCount",],
unimplemented: ["mediaTrackId", "transportId", "codecId", "framesDecoded",
- "packetsDiscarded", "associateStatsId", "firCount", "pliCount",
- "nackCount", "sliCount", "qpSum", "packetsRepaired", "fractionLost",
+ "packetsDiscarded", "associateStatsId",
+ "sliCount", "qpSum", "packetsRepaired", "fractionLost",
"burstPacketsLost", "burstLossCount", "burstDiscardCount",
"gapDiscardRate", "gapLossRate",],
},
"outbound-rtp": {
expected: ["id", "timestamp", "type", "ssrc", "isRemote", "mediaType",
"packetsSent", "bytesSent", "remoteId",],
- optional: ["remoteId",],
+ optional: ["remoteId", "nackCount",],
videoOnly: ["droppedFrames", "bitrateMean", "bitrateStdDev",
- "framerateMean", "framerateStdDev",],
+ "framerateMean", "framerateStdDev", "framesEncoded", "firCount",
+ "pliCount",],
unimplemented: ["mediaTrackId", "transportId", "codecId",
- "framesEncoded", "firCount", "pliCount", "nackCount", "sliCount",
- "qpSum", "roundTripTime", "targetBitrate",],
+ "sliCount", "qpSum", "roundTripTime", "targetBitrate",],
},
"codec": { skip: true },
"peer-connection": { skip: true },
"data-channel": { skip: true },
"track": { skip: true },
"transport": { skip: true },
"candidate-pair": { skip : true },
"local-candidate": { skip: true },
@@ -131,16 +131,32 @@ var pedanticChecks = report => {
if (stat.remoteId) {
ok(report.has(stat.remoteId), "remoteId exists in report.");
is(report.get(stat.remoteId).ssrc, stat.ssrc,
"remote ssrc and local ssrc match.");
is(report.get(stat.remoteId).remoteId, stat.id,
"remote object has local object as it's own remote object.");
}
+ // nackCount
+ if (!stat.inner.isRemote) {
+ ok(stat.nackCount >= 0, stat.type + ".nackCount is sane.");
+ }
+
+ if (!stat.inner.isRemote && stat.inner.mediaType == "video") {
+ // firCount
+ ok(stat.firCount >= 0 && stat.firCount < 100,
+ stat.type + ".firCount is a sane number for a short test. value="
+ + stat.firCount);
+
+ // pliCount
+ ok(stat.pliCount >= 0 && stat.pliCount < 100,
+ stat.type + ".pliCount is a sane number for a short test. value="
+ + stat.pliCount);
+ }
}
if (stat.type == "inbound-rtp") {
//
// Required fields
//
// packetsReceived
@@ -189,28 +205,28 @@ var pedanticChecks = report => {
is(stat.mozRtt, undefined, stat.type
+ ".mozRtt is only set when isRemote is true");
}
//
// Local video only stats
//
if (stat.inner.isRemote || stat.inner.mediaType != "video") {
- expectations.videoOnly.forEach(field => {
+ expectations.localVideoOnly.forEach(field => {
if (stat.inner.isRemote) {
ok(stat[field] === undefined, stat.type + " does not have field "
+ field + " when isRemote is true");
} else { // mediaType != video
ok(stat[field] === undefined, stat.type + " does not have field "
+ field + " when mediaType is not 'video'");
}
});
} else {
- expectations.videoOnly.forEach(field => {
- ok(stat[field] !== undefined, stat.type + " has field " + field
+ expectations.localVideoOnly.forEach(field => {
+ ok(stat.inner[field] !== undefined, stat.type + " has field " + field
+ " when mediaType is video");
});
// discardedPackets
ok(stat.discardedPackets < 100, stat.type
+ ".discardedPackets is a sane number for a short test. value="
+ stat.discardedPackets);
// bitrateMean
@@ -267,29 +283,29 @@ var pedanticChecks = report => {
//
// Optional fields
//
//
// Local video only stats
//
if (stat.inner.isRemote || stat.inner.mediaType != "video") {
- expectations.videoOnly.forEach(field => {
+ expectations.localVideoOnly.forEach(field => {
if (stat.inner.isRemote) {
ok(stat[field] === undefined, stat.type + " does not have field "
+ field + " when isRemote is true");
} else { // mediaType != video
ok(stat[field] === undefined, stat.type + " does not have field "
+ field + " when mediaType is not 'video'");
}
});
} else {
- expectations.videoOnly.forEach(field => {
- ok(stat[field] !== undefined, stat.type + " has field " + field
- + " when mediaType is video");
+ expectations.localVideoOnly.forEach(field => {
+ ok(stat.inner[field] !== undefined, stat.type + " has field " + field
+ + " when mediaType is video and isRemote is false");
});
// bitrateMean
if (stat.bitrateMean !== undefined) {
// TODO: uncomment when Bug 1341533 lands
// ok(stat.bitrateMean >= 0 && stat.bitrateMean < 2 ** 25,
// stat.type + ".bitrateMean is sane. value="
// + stat.bitrateMean);
@@ -318,17 +334,21 @@ var pedanticChecks = report => {
// stat.type + ".framerateStdDev is sane. value="
// + stat.framerateStdDev);
}
// droppedFrames
ok(stat.droppedFrames >= 0,
stat.type + ".droppedFrames is not negative. value="
+ stat.droppedFrames);
- }
+
+ // framesEncoded
+ ok(stat.framesEncoded >= 0 && stat.framesEncoded < 100000, stat.type
+ + ".framesEncoded is a sane number for a short test. value="
+ + stat.framesEncoded);
}
//
// Ensure everything was tested
//
[...expectations.expected, ...expectations.optional].forEach(field => {
ok(Object.keys(tested).includes(field), stat.type + "." + field
+ " was tested.");
--- a/dom/media/webrtc/WebrtcGlobal.h
+++ b/dom/media/webrtc/WebrtcGlobal.h
@@ -376,26 +376,34 @@ struct ParamTraits<mozilla::dom::RTCOutb
typedef mozilla::dom::RTCOutboundRTPStreamStats paramType;
static void Write(Message* aMsg, const paramType& aParam)
{
WriteParam(aMsg, aParam.mBytesSent);
WriteParam(aMsg, aParam.mDroppedFrames);
WriteParam(aMsg, aParam.mPacketsSent);
WriteParam(aMsg, aParam.mTargetBitrate);
+ WriteParam(aMsg, aParam.mFramesEncoded);
+ WriteParam(aMsg, aParam.mFirCount);
+ WriteParam(aMsg, aParam.mNackCount);
+ WriteParam(aMsg, aParam.mPliCount);
WriteRTCRTPStreamStats(aMsg, aParam);
WriteRTCStats(aMsg, aParam);
}
static bool Read(const Message* aMsg, PickleIterator* aIter, paramType* aResult)
{
if (!ReadParam(aMsg, aIter, &(aResult->mBytesSent)) ||
!ReadParam(aMsg, aIter, &(aResult->mDroppedFrames)) ||
!ReadParam(aMsg, aIter, &(aResult->mPacketsSent)) ||
!ReadParam(aMsg, aIter, &(aResult->mTargetBitrate)) ||
+ !ReadParam(aMsg, aIter, &(aResult->mFramesEncoded)) ||
+ !ReadParam(aMsg, aIter, &(aResult->mFirCount)) ||
+ !ReadParam(aMsg, aIter, &(aResult->mNackCount)) ||
+ !ReadParam(aMsg, aIter, &(aResult->mPliCount)) ||
!ReadRTCRTPStreamStats(aMsg, aIter, aResult) ||
!ReadRTCStats(aMsg, aIter, aResult)) {
return false;
}
return true;
}
};
--- a/dom/webidl/RTCStatsReport.webidl
+++ b/dom/webidl/RTCStatsReport.webidl
@@ -29,43 +29,50 @@ dictionary RTCRTPStreamStats : RTCStats
DOMString ssrc;
DOMString mediaType;
DOMString remoteId;
boolean isRemote = false;
DOMString mediaTrackId;
DOMString transportId;
DOMString codecId;
- // Video encoder/decoder measurements (absent for rtcp)
+ // Video encoder/decoder measurements, not present in RTCP case
double bitrateMean;
double bitrateStdDev;
double framerateMean;
double framerateStdDev;
+
+ // Local only measurements, RTCP related but not communicated via RTCP. Not
+ // present in RTCP case.
+ unsigned long firCount;
+ unsigned long pliCount;
+ unsigned long nackCount;
};
dictionary RTCInboundRTPStreamStats : RTCRTPStreamStats {
unsigned long packetsReceived;
unsigned long long bytesReceived;
double jitter;
unsigned long packetsLost;
long mozAvSyncDelay;
long mozJitterBufferDelay;
long mozRtt;
- // Video decoder measurement (absent in rtcp case)
+ // Video decoder measurement, not present in RTCP case
unsigned long discardedPackets;
};
dictionary RTCOutboundRTPStreamStats : RTCRTPStreamStats {
unsigned long packetsSent;
unsigned long long bytesSent;
double targetBitrate; // config encoder bitrate target of this SSRC in bits/s
- // Video encoder measurement (absent in rtcp case)
+ // Video encoder measurements, not present in RTCP case
unsigned long droppedFrames;
+ unsigned long framesEncoded;
};
dictionary RTCMediaStreamTrackStats : RTCStats {
DOMString trackIdentifier; // track.id property
boolean remoteSource;
sequence<DOMString> ssrcIds;
// Stuff that makes sense for video
unsigned long frameWidth;
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
@@ -160,16 +160,22 @@ bool WebrtcAudioConduit::GetRemoteSSRC(u
bool WebrtcAudioConduit::SetLocalCNAME(const char* cname)
{
char temp[256];
strncpy(temp, cname, sizeof(temp) - 1);
temp[sizeof(temp) - 1] = 0;
return !mPtrRTP->SetRTCP_CNAME(mChannel, temp);
}
+bool WebrtcAudioConduit::GetPacketTypeStats(
+ webrtc::RtcpPacketTypeCounter* aPacketCounts)
+{
+ return !mPtrVoERTP_RTCP->GetRTCPPacketTypeCounters(mChannel, *aPacketCounts);
+}
+
bool WebrtcAudioConduit::GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) {
return !mPtrVoEVideoSync->GetDelayEstimate(mChannel,
jitterBufferDelayMs,
playoutBufferDelayMs,
avSyncOffsetMs);
}
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.h
@@ -196,21 +196,26 @@ public:
bool SetLocalSSRCs(const std::vector<unsigned int>& aSSRCs) override;
std::vector<unsigned int> GetLocalSSRCs() const override;
bool SetRemoteSSRC(unsigned int ssrc) override
{
return false;
}
bool GetRemoteSSRC(unsigned int* ssrc) override;
bool SetLocalCNAME(const char* cname) override;
+
+ bool
+ GetPacketTypeStats(webrtc::RtcpPacketTypeCounter* aPacketCounts) override;
+
bool GetVideoEncoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
- uint32_t* droppedFrames) override
+ uint32_t* droppedFrames,
+ uint32_t* framesEncoded) override
{
return false;
}
bool GetVideoDecoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* discardedPackets) override
--- a/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
+++ b/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
@@ -246,21 +246,26 @@ public:
virtual bool GetRemoteSSRC(unsigned int* ssrc) = 0;
virtual bool SetRemoteSSRC(unsigned int ssrc) = 0;
virtual bool SetLocalCNAME(const char* cname) = 0;
/**
* Functions returning stats needed by w3c stats model.
*/
+
+ virtual bool
+ GetPacketTypeStats(webrtc::RtcpPacketTypeCounter* aPacketCounts) = 0;
+
virtual bool GetVideoEncoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
- uint32_t* droppedFrames) = 0;
+ uint32_t* droppedFrames,
+ uint32_t* framesEncoded) = 0;
virtual bool GetVideoDecoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* discardedPackets) = 0;
virtual bool GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) = 0;
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -115,22 +115,25 @@ WebrtcVideoConduit::SendStreamStatistics
void
WebrtcVideoConduit::SendStreamStatistics::Update(
const webrtc::VideoSendStream::Stats& aStats)
{
StreamStatistics::Update(aStats.encode_frame_rate, aStats.media_bitrate_bps);
if (!aStats.substreams.empty()) {
const webrtc::FrameCounts& fc =
aStats.substreams.begin()->second.frame_counts;
- CSFLogVerbose(logTag, "%s: framerate: %u, bitrate: %u, dropped frames delta: %u",
- __FUNCTION__, aStats.encode_frame_rate, aStats.media_bitrate_bps,
- (mSentFrames - (fc.key_frames + fc.delta_frames)) - mDroppedFrames);
- mDroppedFrames = mSentFrames - (fc.key_frames + fc.delta_frames);
+ mFramesEncoded = fc.key_frames + fc.delta_frames;
+ CSFLogVerbose(logTag,
+ "%s: framerate: %u, bitrate: %u, dropped frames delta: %u",
+ __FUNCTION__, aStats.encode_frame_rate,
+ aStats.media_bitrate_bps,
+ mFramesDeliveredToEncoder - mFramesEncoded - mDroppedFrames);
+ mDroppedFrames = mFramesDeliveredToEncoder - mFramesEncoded;
} else {
- CSFLogVerbose(logTag, "%s aStats.substreams is empty", __FUNCTION__);
+ CSFLogVerbose(logTag, "%s stats.substreams is empty", __FUNCTION__);
}
};
void
WebrtcVideoConduit::ReceiveStreamStatistics::DiscardedPackets(
uint32_t& aOutDiscPackets) const
{
aOutDiscPackets = mDiscardedPackets;
@@ -206,21 +209,31 @@ WebrtcVideoConduit::WebrtcVideoConduit(R
{
mRecvStreamConfig.renderer = this;
// Video Stats Callback
nsTimerCallbackFunc callback = [](nsITimer* aTimer, void* aClosure) {
CSFLogDebug(logTag, "StreamStats polling scheduled for VideoConduit: %p", aClosure);
auto self = static_cast<WebrtcVideoConduit*>(aClosure);
MutexAutoLock lock(self->mCodecMutex);
+ MOZ_ASSERT(!self->mEngineTransmitting || !self->mEngineReceiving,
+ "Video conduit is not both receiving and transmitting");
if (self->mEngineTransmitting && self->mSendStream) {
- self->mSendStreamStats.Update(self->mSendStream->GetStats());
+ const auto& stats = self->mSendStream->GetStats();
+ self->mSendStreamStats.Update(stats);
+ if (stats.substreams.empty()) {
+ return;
+ }
+ self->mPacketCounts =
+ stats.substreams.begin()->second.rtcp_packet_type_counts;
}
if (self->mEngineReceiving && self->mRecvStream) {
- self->mRecvStreamStats.Update(self->mRecvStream->GetStats());
+ const auto& stats = self->mRecvStream->GetStats();
+ self->mRecvStreamStats.Update(stats);
+ self->mPacketCounts = stats.rtcp_packet_type_counts;
}
};
mVideoStatsTimer->InitWithFuncCallback(
callback, this, 1000, nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP);
}
WebrtcVideoConduit::~WebrtcVideoConduit()
{
@@ -743,30 +756,46 @@ WebrtcVideoConduit::GetRemoteSSRC(unsign
const webrtc::VideoReceiveStream::Stats& stats = mRecvStream->GetStats();
*ssrc = stats.ssrc;
}
return true;
}
bool
+WebrtcVideoConduit::GetPacketTypeStats(
+ webrtc::RtcpPacketTypeCounter* aPacketCounts)
+{
+ MutexAutoLock lock(mCodecMutex);
+ if ((!mEngineTransmitting || !mSendStream) // Not transmitting
+ && (!mEngineReceiving || !mRecvStream)) // And not receiving
+ {
+ return false;
+ }
+ *aPacketCounts = mPacketCounts;
+ return true;
+}
+
+bool
WebrtcVideoConduit::GetVideoEncoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
- uint32_t* droppedFrames)
+ uint32_t* droppedFrames,
+ uint32_t* framesEncoded)
{
{
MutexAutoLock lock(mCodecMutex);
if (!mEngineTransmitting || !mSendStream) {
return false;
}
mSendStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev,
*bitrateMean, *bitrateStdDev);
mSendStreamStats.DroppedFrames(*droppedFrames);
+ *framesEncoded = mSendStreamStats.FramesEncoded();
return true;
}
}
bool
WebrtcVideoConduit::GetVideoDecoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
@@ -1764,17 +1793,17 @@ WebrtcVideoConduit::SendVideoFrame(webrt
}
}
if (mSendStream) { // can happen before StartTransmitting()
mSendStream->Input()->IncomingCapturedFrame(frame);
}
}
- mSendStreamStats.SentFrame();
+ mSendStreamStats.FrameDeliveredToEncoder();
CSFLogDebug(logTag, "%s Inserted a frame", __FUNCTION__);
return kMediaConduitNoError;
}
// Transport Layer Callbacks
MediaConduitErrorCode
WebrtcVideoConduit::DeliverPacket(const void* data, int len)
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.h
@@ -292,25 +292,29 @@ public:
virtual ~WebrtcVideoConduit();
MediaConduitErrorCode InitMain();
virtual MediaConduitErrorCode Init();
virtual void Destroy();
std::vector<unsigned int> GetLocalSSRCs() const override;
bool SetLocalSSRCs(const std::vector<unsigned int> & ssrcs) override;
-
bool GetRemoteSSRC(unsigned int* ssrc) override;
bool SetRemoteSSRC(unsigned int ssrc) override;
bool SetLocalCNAME(const char* cname) override;
+
+ bool
+ GetPacketTypeStats(webrtc::RtcpPacketTypeCounter* aPacketCounts) override;
+
bool GetVideoEncoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
- uint32_t* droppedFrames) override;
+ uint32_t* droppedFrames,
+ uint32_t* framesEncoded) override;
bool GetVideoDecoderStats(double* framerateMean,
double* framerateStdDev,
double* bitrateMean,
double* bitrateStdDev,
uint32_t* discardedPackets) override;
bool GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) override;
@@ -355,26 +359,31 @@ private:
*/
class SendStreamStatistics : public StreamStatistics {
public:
/**
* Returns the calculate number of dropped frames
* @param aOutDroppedFrames: the number of dropped frames
*/
void DroppedFrames(uint32_t& aOutDroppedFrames) const;
+ /**
+ * Returns the number of frames that have been encoded so far
+ */
+ uint32_t FramesEncoded() const {
+ return mFramesEncoded;
+ }
void Update(const webrtc::VideoSendStream::Stats& aStats);
/**
* Call once for every frame delivered for encoding
*/
- void SentFrame() {
- ++mSentFrames;
- }
+ void FrameDeliveredToEncoder() { ++mFramesDeliveredToEncoder; }
private:
uint32_t mDroppedFrames = 0;
- mozilla::Atomic<int32_t> mSentFrames;
+ uint32_t mFramesEncoded = 0;
+ mozilla::Atomic<int32_t> mFramesDeliveredToEncoder;
};
/** Statistics for receiving streams
*/
class ReceiveStreamStatistics : public StreamStatistics {
public:
/**
* Returns the number of discarded packets
@@ -452,22 +461,24 @@ private:
// Engine state we are concerned with.
mozilla::Atomic<bool> mEngineTransmitting; // If true ==> Transmit Subsystem is up and running
mozilla::Atomic<bool> mEngineReceiving; // if true ==> Receive Subsystem up and running
int mCapId; // Capturer for this conduit
//Local database of currently applied receive codecs
nsTArray<UniquePtr<VideoCodecConfig>> mRecvCodecList;
- // protects mCurSendCodecConfig, mInReconfig,mVideoSend/RecvStreamStats, mSend/RecvStreams
+ // protects mCurSendCodecConfig, mInReconfig,mVideoSend/RecvStreamStats, mSend/RecvStreams, mPacketCounts
Mutex mCodecMutex;
nsAutoPtr<VideoCodecConfig> mCurSendCodecConfig;
bool mInReconfig;
SendStreamStatistics mSendStreamStats;
ReceiveStreamStatistics mRecvStreamStats;
+ webrtc::RtcpPacketTypeCounter mPacketCounts;
+
// Must call webrtc::Call::DestroyVideoReceive/SendStream to delete these:
webrtc::VideoReceiveStream* mRecvStream;
webrtc::VideoSendStream* mSendStream;
unsigned short mLastWidth;
unsigned short mLastHeight;
unsigned short mSendingWidth;
unsigned short mSendingHeight;
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
@@ -3654,33 +3654,47 @@ PeerConnectionImpl::ExecuteStatsQuery_s(
s.mSsrc.Construct(ssrc);
}
s.mMediaType.Construct(mediaType);
s.mRemoteId.Construct(remoteId);
s.mIsRemote = false;
s.mPacketsSent.Construct(mp.rtp_packets_sent());
s.mBytesSent.Construct(mp.rtp_bytes_sent());
+ // Fill in packet type statistics
+ webrtc::RtcpPacketTypeCounter counters;
+ if (mp.Conduit()->GetPacketTypeStats(&counters)) {
+ s.mNackCount.Construct(counters.nack_packets);
+ // Fill in video only packet type stats
+ if (!isAudio) {
+ s.mFirCount.Construct(counters.fir_packets);
+ s.mPliCount.Construct(counters.pli_packets);
+ }
+ }
+
// Lastly, fill in video encoder stats if this is video
if (!isAudio) {
double framerateMean;
double framerateStdDev;
double bitrateMean;
double bitrateStdDev;
uint32_t droppedFrames;
+ uint32_t framesEncoded;
if (mp.Conduit()->GetVideoEncoderStats(&framerateMean,
&framerateStdDev,
&bitrateMean,
&bitrateStdDev,
- &droppedFrames)) {
+ &droppedFrames,
+ &framesEncoded)) {
s.mFramerateMean.Construct(framerateMean);
s.mFramerateStdDev.Construct(framerateStdDev);
s.mBitrateMean.Construct(bitrateMean);
s.mBitrateStdDev.Construct(bitrateStdDev);
s.mDroppedFrames.Construct(droppedFrames);
+ s.mFramesEncoded.Construct(framesEncoded);
}
}
query->report->mOutboundRTPStreamStats.Value().AppendElement(s,
fallible);
}
break;
}
case MediaPipeline::RECEIVE: {
@@ -3742,16 +3756,26 @@ PeerConnectionImpl::ExecuteStatsQuery_s(
int32_t avSyncDelta;
if (mp.Conduit()->GetAVStats(&jitterBufferDelay,
&playoutBufferDelay,
&avSyncDelta)) {
s.mMozJitterBufferDelay.Construct(jitterBufferDelay);
s.mMozAvSyncDelay.Construct(avSyncDelta);
}
}
+ // Fill in packet type statistics
+ webrtc::RtcpPacketTypeCounter counters;
+ if (mp.Conduit()->GetPacketTypeStats(&counters)) {
+ s.mNackCount.Construct(counters.nack_packets);
+ // Fill in video only packet type stats
+ if (!isAudio) {
+ s.mFirCount.Construct(counters.fir_packets);
+ s.mPliCount.Construct(counters.pli_packets);
+ }
+ }
// Lastly, fill in video decoder stats if this is video
if (!isAudio) {
double framerateMean;
double framerateStdDev;
double bitrateMean;
double bitrateStdDev;
uint32_t discardedPackets;
if (mp.Conduit()->GetVideoDecoderStats(&framerateMean,
--- a/media/webrtc/trunk/webrtc/voice_engine/channel.cc
+++ b/media/webrtc/trunk/webrtc/voice_engine/channel.cc
@@ -150,17 +150,18 @@ class RtpPacketSenderProxy : public RtpP
struct ChannelStatistics : public RtcpStatistics {
ChannelStatistics() : rtcp(), max_jitter(0) {}
RtcpStatistics rtcp;
uint32_t max_jitter;
};
// Statistics callback, called at each generation of a new RTCP report block.
-class StatisticsProxy : public RtcpStatisticsCallback {
+class StatisticsProxy : public RtcpStatisticsCallback,
+ public RtcpPacketTypeCounterObserver {
public:
StatisticsProxy(uint32_t ssrc)
: stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
ssrc_(ssrc) {}
virtual ~StatisticsProxy() {}
void StatisticsUpdated(const RtcpStatistics& statistics,
uint32_t ssrc) override {
@@ -181,23 +182,38 @@ class StatisticsProxy : public RtcpStati
ssrc_ = ssrc;
}
ChannelStatistics GetStats() {
CriticalSectionScoped cs(stats_lock_.get());
return stats_;
}
+ void RtcpPacketTypesCounterUpdated(uint32_t ssrc,
+ const RtcpPacketTypeCounter& packet_counter) override {
+ CriticalSectionScoped cs(stats_lock_.get());
+ if (ssrc != ssrc_) {
+ return;
+ }
+ packet_counter_ = packet_counter;
+ };
+
+ void GetPacketTypeCounter(RtcpPacketTypeCounter& aPacketTypeCounter) {
+ CriticalSectionScoped cs(stats_lock_.get());
+ aPacketTypeCounter = packet_counter_;
+ }
+
private:
// StatisticsUpdated calls are triggered from threads in the RTP module,
// while GetStats calls can be triggered from the public voice engine API,
// hence synchronization is needed.
rtc::scoped_ptr<CriticalSectionWrapper> stats_lock_;
uint32_t ssrc_;
ChannelStatistics stats_;
+ RtcpPacketTypeCounter packet_counter_;
};
class VoERtcpObserver : public RtcpBandwidthObserver {
public:
explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
virtual ~VoERtcpObserver() {}
void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
@@ -931,17 +947,17 @@ Channel::Channel(int32_t channelId,
seq_num_allocator_proxy_.get();
configuration.transport_feedback_callback = feedback_observer_proxy_.get();
_rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
statistics_proxy_.get());
-
+ configuration.rtcp_packet_type_counter_observer = statistics_proxy_.get();
Config audioproc_config;
audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
audioproc_config.Set<ExtendedFilter>(
new ExtendedFilter(config.Get<ExtendedFilter>().enabled));
audioproc_config.Set<DelayAgnostic>(
new DelayAgnostic(config.Get<DelayAgnostic>().enabled));
rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
}
@@ -3269,16 +3285,24 @@ Channel::GetRTPStatistics(CallStatistics
// --- Timestamps
{
CriticalSectionScoped lock(ts_stats_lock_.get());
stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
}
return 0;
}
+int Channel::GetRTCPPacketTypeCounters(RtcpPacketTypeCounter& stats) {
+ if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
+ return -1;
+ }
+ statistics_proxy_->GetPacketTypeCounter(stats);
+ return 0;
+}
+
int Channel::SetREDStatus(bool enable, int redPayloadtype) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::SetREDStatus()");
if (enable) {
if (redPayloadtype < 0 || redPayloadtype > 127) {
_engineStatisticsPtr->SetLastError(
VE_PLTYPE_ERROR, kTraceError,
--- a/media/webrtc/trunk/webrtc/voice_engine/channel.h
+++ b/media/webrtc/trunk/webrtc/voice_engine/channel.h
@@ -350,16 +350,17 @@ public:
int32_t& rttMs);
int SendApplicationDefinedRTCPPacket(unsigned char subType,
unsigned int name, const char* data,
unsigned short dataLengthInBytes);
int GetRTPStatistics(unsigned int& averageJitterMs,
unsigned int& maxJitterMs,
unsigned int& discardedPackets,
unsigned int& cumulativeLost);
+ int GetRTCPPacketTypeCounters(RtcpPacketTypeCounter& stats);
int GetRemoteRTCPReportBlocks(std::vector<ReportBlock>* report_blocks);
int GetRTPStatistics(CallStatistics& stats);
int SetREDStatus(bool enable, int redPayloadtype);
int GetREDStatus(bool& enabled, int& redPayloadtype);
int SetCodecFECStatus(bool enable);
bool GetCodecFECStatus();
void SetNACKStatus(bool enable, int maxNumberOfPackets);
--- a/media/webrtc/trunk/webrtc/voice_engine/include/voe_rtp_rtcp.h
+++ b/media/webrtc/trunk/webrtc/voice_engine/include/voe_rtp_rtcp.h
@@ -174,16 +174,20 @@ class WEBRTC_DLLEXPORT VoERTP_RTCP {
unsigned int& averageJitterMs,
unsigned int& maxJitterMs,
unsigned int& discardedPackets,
unsigned int& cumulativeLost) = 0;
// Gets RTCP statistics for a specific |channel|.
virtual int GetRTCPStatistics(int channel, CallStatistics& stats) = 0;
+ // Gets RTCP packet type counters for a specific channel
+ virtual int GetRTCPPacketTypeCounters(int channel,
+ RtcpPacketTypeCounter& stats) = 0;
+
// Gets the report block parts of the last received RTCP Sender Report (SR),
// or RTCP Receiver Report (RR) on a specified |channel|. Each vector
// element also contains the SSRC of the sender in addition to a report
// block.
virtual int GetRemoteRTCPReportBlocks(
int channel,
std::vector<ReportBlock>* receive_blocks) = 0;
--- a/media/webrtc/trunk/webrtc/voice_engine/voe_rtp_rtcp_impl.cc
+++ b/media/webrtc/trunk/webrtc/voice_engine/voe_rtp_rtcp_impl.cc
@@ -352,16 +352,32 @@ int VoERTP_RTCPImpl::GetRTCPStatistics(i
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"GetRTPStatistics() failed to locate channel");
return -1;
}
return channelPtr->GetRTPStatistics(stats);
}
+int VoERTP_RTCPImpl::GetRTCPPacketTypeCounters(int channel,
+ RtcpPacketTypeCounter& stats) {
+ if (!_shared->statistics().Initialized()) {
+ _shared->SetLastError(VE_NOT_INITED, kTraceError);
+ return -1;
+ }
+ voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
+ voe::Channel* channelPtr = ch.channel();
+ if (channelPtr == NULL) {
+ _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+ "GetRTCPPacketTypeCounters() failed to locate channel");
+ return -1;
+ }
+ return channelPtr->GetRTCPPacketTypeCounters(stats);
+}
+
int VoERTP_RTCPImpl::GetRemoteRTCPReportBlocks(
int channel, std::vector<ReportBlock>* report_blocks) {
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
voe::Channel* channel_ptr = ch.channel();
--- a/media/webrtc/trunk/webrtc/voice_engine/voe_rtp_rtcp_impl.h
+++ b/media/webrtc/trunk/webrtc/voice_engine/voe_rtp_rtcp_impl.h
@@ -65,16 +65,19 @@ class VoERTP_RTCPImpl : public VoERTP_RT
int GetRTPStatistics(int channel,
unsigned int& averageJitterMs,
unsigned int& maxJitterMs,
unsigned int& discardedPackets,
unsigned int& cumulativeLost) override;
int GetRTCPStatistics(int channel, CallStatistics& stats) override;
+ int GetRTCPPacketTypeCounters(int channel,
+ RtcpPacketTypeCounter& stats) override;
+
int GetRemoteRTCPReportBlocks(
int channel,
std::vector<ReportBlock>* report_blocks) override;
// RED
int SetREDStatus(int channel, bool enable, int redPayloadtype = -1) override;
int GetREDStatus(int channel, bool& enabled, int& redPayloadtype) override;