--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
@@ -28,48 +28,52 @@
#include "webrtc/system_wrappers/include/clock.h"
#ifdef MOZ_WIDGET_ANDROID
#include "AndroidJNIWrapper.h"
#endif
namespace mozilla {
-static const char* logTag ="WebrtcAudioSessionConduit";
+static const char* acLogTag ="WebrtcAudioSessionConduit";
+#ifdef LOGTAG
+#undef LOGTAG
+#endif
+#define LOGTAG acLogTag
// 32 bytes is what WebRTC CodecInst expects
const unsigned int WebrtcAudioConduit::CODEC_PLNAME_SIZE = 32;
/**
* Factory Method for AudioConduit
*/
RefPtr<AudioSessionConduit> AudioSessionConduit::Create()
{
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
WebrtcAudioConduit* obj = new WebrtcAudioConduit();
if(obj->Init() != kMediaConduitNoError)
{
- CSFLogError(logTag, "%s AudioConduit Init Failed ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s AudioConduit Init Failed ", __FUNCTION__);
delete obj;
return nullptr;
}
- CSFLogDebug(logTag, "%s Successfully created AudioConduit ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Successfully created AudioConduit ", __FUNCTION__);
return obj;
}
/**
* Destruction defines for our super-classes
*/
WebrtcAudioConduit::~WebrtcAudioConduit()
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
for(auto & codec : mRecvCodecList)
{
delete codec;
}
// The first one of a pair to be deleted shuts down media for both
if(mPtrVoEXmedia)
{
@@ -242,21 +246,21 @@ bool WebrtcAudioConduit::GetRTCPSenderRe
senderInfo.NTPfraction);
*packetsSent = senderInfo.sendPacketCount;
*bytesSent = senderInfo.sendOctetCount;
}
return result;
}
bool WebrtcAudioConduit::SetDtmfPayloadType(unsigned char type, int freq) {
- CSFLogInfo(logTag, "%s : setting dtmf payload %d", __FUNCTION__, (int)type);
+ CSFLogInfo(LOGTAG, "%s : setting dtmf payload %d", __FUNCTION__, (int)type);
int result = mChannelProxy->SetSendTelephoneEventPayloadType(type, freq);
if (result == -1) {
- CSFLogError(logTag, "%s Failed call to SetSendTelephoneEventPayloadType(%u, %d)",
+ CSFLogError(LOGTAG, "%s Failed call to SetSendTelephoneEventPayloadType(%u, %d)",
__FUNCTION__, type, freq);
}
return result != -1;
}
bool WebrtcAudioConduit::InsertDTMFTone(int channel, int eventCode,
bool outOfBand, int lengthMs,
int attenuationDb) {
@@ -273,152 +277,152 @@ bool WebrtcAudioConduit::InsertDTMFTone(
return result != -1;
}
/*
* WebRTCAudioConduit Implementation
*/
MediaConduitErrorCode WebrtcAudioConduit::Init()
{
- CSFLogDebug(logTag, "%s this=%p", __FUNCTION__, this);
+ CSFLogDebug(LOGTAG, "%s this=%p", __FUNCTION__, this);
#ifdef MOZ_WIDGET_ANDROID
jobject context = jsjni_GetGlobalContextRef();
// get the JVM
JavaVM *jvm = jsjni_GetVM();
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
- CSFLogError(logTag, "%s Unable to set Android objects", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Unable to set Android objects", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
#endif
// Per WebRTC APIs below function calls return nullptr on failure
if(!(mVoiceEngine = webrtc::VoiceEngine::Create()))
{
- CSFLogError(logTag, "%s Unable to create voice engine", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Unable to create voice engine", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoEBase = VoEBase::GetInterface(mVoiceEngine)))
{
- CSFLogError(logTag, "%s Unable to initialize VoEBase", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Unable to initialize VoEBase", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
// init the engine with our audio device layer
if(mPtrVoEBase->Init() == -1)
{
- CSFLogError(logTag, "%s VoiceEngine Base Not Initialized", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s VoiceEngine Base Not Initialized", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoENetwork = VoENetwork::GetInterface(mVoiceEngine)))
{
- CSFLogError(logTag, "%s Unable to initialize VoENetwork", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Unable to initialize VoENetwork", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoECodec = VoECodec::GetInterface(mVoiceEngine)))
{
- CSFLogError(logTag, "%s Unable to initialize VoEBCodec", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Unable to initialize VoEBCodec", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoEProcessing = VoEAudioProcessing::GetInterface(mVoiceEngine)))
{
- CSFLogError(logTag, "%s Unable to initialize VoEProcessing", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Unable to initialize VoEProcessing", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoEXmedia = VoEExternalMedia::GetInterface(mVoiceEngine)))
{
- CSFLogError(logTag, "%s Unable to initialize VoEExternalMedia", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Unable to initialize VoEExternalMedia", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoERTP_RTCP = VoERTP_RTCP::GetInterface(mVoiceEngine)))
{
- CSFLogError(logTag, "%s Unable to initialize VoERTP_RTCP", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Unable to initialize VoERTP_RTCP", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoEVideoSync = VoEVideoSync::GetInterface(mVoiceEngine)))
{
- CSFLogError(logTag, "%s Unable to initialize VoEVideoSync", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Unable to initialize VoEVideoSync", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if (!(mPtrRTP = webrtc::VoERTP_RTCP::GetInterface(mVoiceEngine)))
{
- CSFLogError(logTag, "%s Unable to get audio RTP/RTCP interface ",
+ CSFLogError(LOGTAG, "%s Unable to get audio RTP/RTCP interface ",
__FUNCTION__);
return kMediaConduitSessionNotInited;
}
if( (mChannel = mPtrVoEBase->CreateChannel()) == -1)
{
- CSFLogError(logTag, "%s VoiceEngine Channel creation failed",__FUNCTION__);
+ CSFLogError(LOGTAG, "%s VoiceEngine Channel creation failed",__FUNCTION__);
return kMediaConduitChannelError;
}
// Needed to access TelephoneEvent APIs in 57 if we're not using Call/audio_send_stream/etc
webrtc::VoiceEngineImpl* s = static_cast<webrtc::VoiceEngineImpl*>(mVoiceEngine);
mChannelProxy = s->GetChannelProxy(mChannel);
MOZ_ASSERT(mChannelProxy);
- CSFLogDebug(logTag, "%s Channel Created %d ",__FUNCTION__, mChannel);
+ CSFLogDebug(LOGTAG, "%s Channel Created %d ",__FUNCTION__, mChannel);
if(mPtrVoENetwork->RegisterExternalTransport(mChannel, *this) == -1)
{
- CSFLogError(logTag, "%s VoiceEngine, External Transport Failed",__FUNCTION__);
+ CSFLogError(LOGTAG, "%s VoiceEngine, External Transport Failed",__FUNCTION__);
return kMediaConduitTransportRegistrationFail;
}
if(mPtrVoEXmedia->SetExternalRecordingStatus(true) == -1)
{
- CSFLogError(logTag, "%s SetExternalRecordingStatus Failed %d",__FUNCTION__,
+ CSFLogError(LOGTAG, "%s SetExternalRecordingStatus Failed %d",__FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitExternalPlayoutError;
}
if(mPtrVoEXmedia->SetExternalPlayoutStatus(true) == -1)
{
- CSFLogError(logTag, "%s SetExternalPlayoutStatus Failed %d ",__FUNCTION__,
+ CSFLogError(LOGTAG, "%s SetExternalPlayoutStatus Failed %d ",__FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitExternalRecordingError;
}
- CSFLogDebug(logTag , "%s AudioSessionConduit Initialization Done (%p)",__FUNCTION__, this);
+ CSFLogDebug(LOGTAG , "%s AudioSessionConduit Initialization Done (%p)",__FUNCTION__, this);
return kMediaConduitNoError;
}
// AudioSessionConduit Implementation
MediaConduitErrorCode
WebrtcAudioConduit::SetTransmitterTransport(RefPtr<TransportInterface> aTransport)
{
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
mTransmitterTransport = aTransport;
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::SetReceiverTransport(RefPtr<TransportInterface> aTransport)
{
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
mReceiverTransport = aTransport;
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::ConfigureSendMediaCodec(const AudioCodecConfig* codecConfig)
{
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
MediaConduitErrorCode condError = kMediaConduitNoError;
int error = 0;//webrtc engine errors
webrtc::CodecInst cinst;
{
//validate codec param
if((condError = ValidateCodecConfig(codecConfig, true)) != kMediaConduitNoError)
{
@@ -428,50 +432,50 @@ WebrtcAudioConduit::ConfigureSendMediaCo
condError = StopTransmitting();
if (condError != kMediaConduitNoError) {
return condError;
}
if(!CodecConfigToWebRTCCodec(codecConfig,cinst))
{
- CSFLogError(logTag,"%s CodecConfig to WebRTC Codec Failed ",__FUNCTION__);
+ CSFLogError(LOGTAG,"%s CodecConfig to WebRTC Codec Failed ",__FUNCTION__);
return kMediaConduitMalformedArgument;
}
if(mPtrVoECodec->SetSendCodec(mChannel, cinst) == -1)
{
error = mPtrVoEBase->LastError();
- CSFLogError(logTag, "%s SetSendCodec - Invalid Codec %d ",__FUNCTION__,
+ CSFLogError(LOGTAG, "%s SetSendCodec - Invalid Codec %d ",__FUNCTION__,
error);
if(error == VE_CANNOT_SET_SEND_CODEC || error == VE_CODEC_ERROR)
{
- CSFLogError(logTag, "%s Invalid Send Codec", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Invalid Send Codec", __FUNCTION__);
return kMediaConduitInvalidSendCodec;
}
- CSFLogError(logTag, "%s SetSendCodec Failed %d ", __FUNCTION__,
+ CSFLogError(LOGTAG, "%s SetSendCodec Failed %d ", __FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitUnknownError;
}
// This must be called after SetSendCodec
if (mPtrVoECodec->SetFECStatus(mChannel, codecConfig->mFECEnabled) == -1) {
- CSFLogError(logTag, "%s SetFECStatus Failed %d ", __FUNCTION__,
+ CSFLogError(LOGTAG, "%s SetFECStatus Failed %d ", __FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitFECStatusError;
}
mDtmfEnabled = codecConfig->mDtmfEnabled;
if (codecConfig->mName == "opus" && codecConfig->mMaxPlaybackRate) {
if (mPtrVoECodec->SetOpusMaxPlaybackRate(
mChannel,
codecConfig->mMaxPlaybackRate) == -1) {
- CSFLogError(logTag, "%s SetOpusMaxPlaybackRate Failed %d ", __FUNCTION__,
+ CSFLogError(LOGTAG, "%s SetOpusMaxPlaybackRate Failed %d ", __FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitUnknownError;
}
}
// TEMPORARY - see bug 694814 comment 2
nsresult rv;
nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
@@ -502,31 +506,31 @@ WebrtcAudioConduit::ConfigureSendMediaCo
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::ConfigureRecvMediaCodecs(
const std::vector<AudioCodecConfig*>& codecConfigList)
{
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
MediaConduitErrorCode condError = kMediaConduitNoError;
int error = 0; //webrtc engine errors
bool success = false;
// Are we receiving already? If so, stop receiving and playout
// since we can't apply new recv codec when the engine is playing.
condError = StopReceiving();
if (condError != kMediaConduitNoError) {
return condError;
}
if(codecConfigList.empty())
{
- CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Zero number of codecs to configure", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
// Try Applying the codecs in the list.
// We succeed if at least one codec was applied and reception was
// started successfully.
for(auto codec : codecConfigList)
{
@@ -534,105 +538,105 @@ WebrtcAudioConduit::ConfigureRecvMediaCo
if((condError = ValidateCodecConfig(codec,false)) != kMediaConduitNoError)
{
return condError;
}
webrtc::CodecInst cinst;
if(!CodecConfigToWebRTCCodec(codec,cinst))
{
- CSFLogError(logTag,"%s CodecConfig to WebRTC Codec Failed ",__FUNCTION__);
+ CSFLogError(LOGTAG,"%s CodecConfig to WebRTC Codec Failed ",__FUNCTION__);
continue;
}
if(mPtrVoECodec->SetRecPayloadType(mChannel,cinst) == -1)
{
error = mPtrVoEBase->LastError();
- CSFLogError(logTag, "%s SetRecvCodec Failed %d ",__FUNCTION__, error);
+ CSFLogError(LOGTAG, "%s SetRecvCodec Failed %d ",__FUNCTION__, error);
continue;
}
- CSFLogDebug(logTag, "%s Successfully Set RecvCodec %s", __FUNCTION__,
+ CSFLogDebug(LOGTAG, "%s Successfully Set RecvCodec %s", __FUNCTION__,
codec->mName.c_str());
//copy this to local database
if(!CopyCodecToDB(codec)) {
- CSFLogError(logTag,"%s Unable to updated Codec Database", __FUNCTION__);
+ CSFLogError(LOGTAG,"%s Unable to updated Codec Database", __FUNCTION__);
return kMediaConduitUnknownError;
}
success = true;
} //end for
if(!success)
{
- CSFLogError(logTag, "%s Setting Receive Codec Failed ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Setting Receive Codec Failed ", __FUNCTION__);
return kMediaConduitInvalidReceiveCodec;
}
//If we are here, atleast one codec should have been set
condError = StartReceiving();
if (condError != kMediaConduitNoError) {
return condError;
}
DumpCodecDB();
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::EnableAudioLevelExtension(bool enabled, uint8_t id)
{
- CSFLogDebug(logTag, "%s %d %d ", __FUNCTION__, enabled, id);
+ CSFLogDebug(LOGTAG, "%s %d %d ", __FUNCTION__, enabled, id);
if (mPtrVoERTP_RTCP->SetSendAudioLevelIndicationStatus(mChannel, enabled, id) == -1)
{
- CSFLogError(logTag, "%s SetSendAudioLevelIndicationStatus Failed", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s SetSendAudioLevelIndicationStatus Failed", __FUNCTION__);
return kMediaConduitUnknownError;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::SendAudioFrame(const int16_t audio_data[],
int32_t lengthSamples, // per channel
int32_t samplingFreqHz,
uint32_t channels,
int32_t capture_delay)
{
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
// Following checks need to be performed
// 1. Non null audio buffer pointer,
// 2. invalid sampling frequency - less than 0 or unsupported ones
// 3. Appropriate Sample Length for 10 ms audio-frame. This represents
// block size the VoiceEngine feeds into encoder for passed in audio-frame
// Ex: for 16000 sampling rate , valid block-length is 160
// Similarly for 32000 sampling rate, valid block length is 320
// We do the check by the verify modular operator below to be zero
if(!audio_data || (lengthSamples <= 0) ||
(IsSamplingFreqSupported(samplingFreqHz) == false) ||
((lengthSamples % (samplingFreqHz / 100) != 0)) )
{
- CSFLogError(logTag, "%s Invalid Parameters ",__FUNCTION__);
+ CSFLogError(LOGTAG, "%s Invalid Parameters ",__FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
}
//validate capture time
if(capture_delay < 0 )
{
- CSFLogError(logTag,"%s Invalid Capture Delay ", __FUNCTION__);
+ CSFLogError(LOGTAG,"%s Invalid Capture Delay ", __FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
}
// if transmission is not started .. conduit cannot insert frames
if(!mEngineTransmitting)
{
- CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Engine not transmitting ", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
struct Processing insert = { TimeStamp::Now(), 0 };
mProcessing.AppendElement(insert);
}
@@ -649,61 +653,61 @@ WebrtcAudioConduit::SendAudioFrame(const
MediaConduitErrorCode
WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
int32_t samplingFreqHz,
int32_t capture_delay,
int& lengthSamples)
{
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
unsigned int numSamples = 0;
//validate params
if(!speechData )
{
- CSFLogError(logTag,"%s Null Audio Buffer Pointer", __FUNCTION__);
+ CSFLogError(LOGTAG,"%s Null Audio Buffer Pointer", __FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
}
// Validate sample length
if((numSamples = GetNum10msSamplesForFrequency(samplingFreqHz)) == 0 )
{
- CSFLogError(logTag,"%s Invalid Sampling Frequency ", __FUNCTION__);
+ CSFLogError(LOGTAG,"%s Invalid Sampling Frequency ", __FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
}
//validate capture time
if(capture_delay < 0 )
{
- CSFLogError(logTag,"%s Invalid Capture Delay ", __FUNCTION__);
+ CSFLogError(LOGTAG,"%s Invalid Capture Delay ", __FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
}
//Conduit should have reception enabled before we ask for decoded
// samples
if(!mEngineReceiving)
{
- CSFLogError(logTag, "%s Engine not Receiving ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Engine not Receiving ", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
lengthSamples = 0; //output paramter
if(mPtrVoEXmedia->ExternalPlayoutGetData( speechData,
samplingFreqHz,
capture_delay,
lengthSamples) == -1)
{
int error = mPtrVoEBase->LastError();
- CSFLogError(logTag, "%s Getting audio data Failed %d", __FUNCTION__, error);
+ CSFLogError(LOGTAG, "%s Getting audio data Failed %d", __FUNCTION__, error);
if(error == VE_RUNTIME_PLAY_ERROR)
{
return kMediaConduitPlayoutError;
}
return kMediaConduitUnknownError;
}
// Not #ifdef DEBUG or on a log module so we can use it for about:webrtc/etc
@@ -717,21 +721,21 @@ WebrtcAudioConduit::GetAudioFrame(int16_
&avsync_offset_ms)) {
if (avsync_offset_ms < 0) {
Telemetry::Accumulate(Telemetry::WEBRTC_AVSYNC_WHEN_VIDEO_LAGS_AUDIO_MS,
-avsync_offset_ms);
} else {
Telemetry::Accumulate(Telemetry::WEBRTC_AVSYNC_WHEN_AUDIO_LAGS_VIDEO_MS,
avsync_offset_ms);
}
- CSFLogError(logTag,
+ CSFLogError(LOGTAG,
"A/V sync: sync delta: %dms, audio jitter delay %dms, playout delay %dms",
avsync_offset_ms, jitter_buffer_delay_ms, playout_buffer_delay_ms);
} else {
- CSFLogError(logTag, "A/V sync: GetAVStats failed");
+ CSFLogError(LOGTAG, "A/V sync: GetAVStats failed");
}
mLastSyncLog = mSamples;
}
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
if (mProcessing.Length() > 0) {
unsigned int now;
mPtrVoEVideoSync->GetPlayoutTimestamp(mChannel, now);
@@ -748,82 +752,82 @@ WebrtcAudioConduit::GetAudioFrame(int16_
LogTime(AsyncLatencyLogger::AudioRecvRTP, ((uint64_t) this), delta);
break;
}
mProcessing.RemoveElementAt(0);
}
}
}
}
- CSFLogDebug(logTag,"%s GetAudioFrame:Got samples: length %d ",__FUNCTION__,
+ CSFLogDebug(LOGTAG,"%s GetAudioFrame:Got samples: length %d ",__FUNCTION__,
lengthSamples);
return kMediaConduitNoError;
}
// Transport Layer Callbacks
MediaConduitErrorCode
WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len, uint32_t ssrc)
{
- CSFLogDebug(logTag, "%s : channel %d", __FUNCTION__, mChannel);
+ CSFLogDebug(LOGTAG, "%s : channel %d", __FUNCTION__, mChannel);
if(mEngineReceiving)
{
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
// timestamp is at 32 bits in ([1])
struct Processing insert = { TimeStamp::Now(),
ntohl(static_cast<const uint32_t *>(data)[1]) };
mProcessing.AppendElement(insert);
}
// XXX we need to get passed the time the packet was received
if(mPtrVoENetwork->ReceivedRTPPacket(mChannel, data, len) == -1)
{
int error = mPtrVoEBase->LastError();
- CSFLogError(logTag, "%s RTP Processing Error %d", __FUNCTION__, error);
+ CSFLogError(LOGTAG, "%s RTP Processing Error %d", __FUNCTION__, error);
if(error == VE_RTP_RTCP_MODULE_ERROR)
{
return kMediaConduitRTPRTCPModuleError;
}
return kMediaConduitUnknownError;
}
} else {
- CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
+ CSFLogError(LOGTAG, "Error: %s when not receiving", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::ReceivedRTCPPacket(const void *data, int len)
{
- CSFLogDebug(logTag, "%s : channel %d",__FUNCTION__, mChannel);
+ CSFLogDebug(LOGTAG, "%s : channel %d",__FUNCTION__, mChannel);
if(mPtrVoENetwork->ReceivedRTCPPacket(mChannel, data, len) == -1)
{
int error = mPtrVoEBase->LastError();
- CSFLogError(logTag, "%s RTCP Processing Error %d", __FUNCTION__, error);
+ CSFLogError(LOGTAG, "%s RTCP Processing Error %d", __FUNCTION__, error);
if(error == VE_RTP_RTCP_MODULE_ERROR)
{
return kMediaConduitRTPRTCPModuleError;
}
return kMediaConduitUnknownError;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::StopTransmitting()
{
if(mEngineTransmitting)
{
- CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
if(mPtrVoEBase->StopSend(mChannel) == -1)
{
- CSFLogError(logTag, "%s StopSend() Failed %d ", __FUNCTION__,
+ CSFLogError(LOGTAG, "%s StopSend() Failed %d ", __FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitUnknownError;
}
mEngineTransmitting = false;
}
return kMediaConduitNoError;
}
@@ -831,83 +835,83 @@ WebrtcAudioConduit::StopTransmitting()
MediaConduitErrorCode
WebrtcAudioConduit::StartTransmitting()
{
if (!mEngineTransmitting) {
//Let's Send Transport State-machine on the Engine
if(mPtrVoEBase->StartSend(mChannel) == -1)
{
int error = mPtrVoEBase->LastError();
- CSFLogError(logTag, "%s StartSend failed %d", __FUNCTION__, error);
+ CSFLogError(LOGTAG, "%s StartSend failed %d", __FUNCTION__, error);
return kMediaConduitUnknownError;
}
mEngineTransmitting = true;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::StopReceiving()
{
if(mEngineReceiving)
{
- CSFLogDebug(logTag, "%s Engine Already Receiving. Attemping to Stop ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Engine Already Receiving. Attemping to Stop ", __FUNCTION__);
// AudioEngine doesn't fail fatally on stopping reception. Ref:voe_errors.h.
// hence we need not be strict in failing here on errors
mPtrVoEBase->StopReceive(mChannel);
- CSFLogDebug(logTag, "%s Attemping to Stop playout ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Attemping to Stop playout ", __FUNCTION__);
if(mPtrVoEBase->StopPlayout(mChannel) == -1)
{
if( mPtrVoEBase->LastError() == VE_CANNOT_STOP_PLAYOUT)
{
- CSFLogDebug(logTag, "%s Stop-Playout Failed %d", __FUNCTION__, mPtrVoEBase->LastError());
+ CSFLogDebug(LOGTAG, "%s Stop-Playout Failed %d", __FUNCTION__, mPtrVoEBase->LastError());
return kMediaConduitPlayoutError;
}
}
mEngineReceiving = false;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::StartReceiving()
{
if (!mEngineReceiving) {
if(mPtrVoEBase->StartReceive(mChannel) == -1)
{
int error = mPtrVoEBase->LastError();
- CSFLogError(logTag , "%s StartReceive Failed %d ",__FUNCTION__, error);
+ CSFLogError(LOGTAG , "%s StartReceive Failed %d ",__FUNCTION__, error);
if(error == VE_RECV_SOCKET_ERROR)
{
return kMediaConduitSocketError;
}
return kMediaConduitUnknownError;
}
if(mPtrVoEBase->StartPlayout(mChannel) == -1)
{
- CSFLogError(logTag, "%s Starting playout Failed", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Starting playout Failed", __FUNCTION__);
return kMediaConduitPlayoutError;
}
mEngineReceiving = true;
}
return kMediaConduitNoError;
}
//WebRTC::RTP Callback Implementation
// Called on AudioGUM or MSG thread
bool
WebrtcAudioConduit::SendRtp(const uint8_t* data,
size_t len,
const webrtc::PacketOptions& options)
{
- CSFLogDebug(logTag, "%s: len %lu", __FUNCTION__, (unsigned long)len);
+ CSFLogDebug(LOGTAG, "%s: len %lu", __FUNCTION__, (unsigned long)len);
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
if (mProcessing.Length() > 0) {
TimeStamp started = mProcessing[0].mTimeStamp;
mProcessing.RemoveElementAt(0);
mProcessing.RemoveElementAt(0); // 20ms packetization! Could automate this by watching sizes
TimeDuration t = TimeStamp::Now() - started;
int64_t delta = t.ToMilliseconds();
@@ -918,65 +922,65 @@ WebrtcAudioConduit::SendRtp(const uint8_
// XXX(pkerr) - the PacketOptions are being ignored. This parameter was added along
// with the Call API update in the webrtc.org codebase.
// The only field in it is the packet_id, which is used when the header
// extension for TransportSequenceNumber is being used, which we don't.
(void)options;
if(mTransmitterTransport &&
(mTransmitterTransport->SendRtpPacket(data, len) == NS_OK))
{
- CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Sent RTP Packet ", __FUNCTION__);
return true;
}
- CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s RTP Packet Send Failed ", __FUNCTION__);
return false;
}
// Called on WebRTC Process thread and perhaps others
bool
WebrtcAudioConduit::SendRtcp(const uint8_t* data, size_t len)
{
- CSFLogDebug(logTag, "%s : len %lu, first rtcp = %u ",
+ CSFLogDebug(LOGTAG, "%s : len %lu, first rtcp = %u ",
__FUNCTION__,
(unsigned long) len,
static_cast<unsigned>(data[1]));
// We come here if we have only one pipeline/conduit setup,
// such as for unidirectional streams.
// We also end up here if we are receiving
ReentrantMonitorAutoEnter enter(mTransportMonitor);
if(mReceiverTransport &&
mReceiverTransport->SendRtcpPacket(data, len) == NS_OK)
{
// Might be a sender report, might be a receiver report, we don't know.
- CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Sent RTCP Packet ", __FUNCTION__);
return true;
}
if (mTransmitterTransport &&
(mTransmitterTransport->SendRtcpPacket(data, len) == NS_OK)) {
- CSFLogDebug(logTag, "%s Sent RTCP Packet (sender report) ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Sent RTCP Packet (sender report) ", __FUNCTION__);
return true;
}
- CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s RTCP Packet Send Failed ", __FUNCTION__);
return false;
}
/**
* Converts between CodecConfig to WebRTC Codec Structure.
*/
bool
WebrtcAudioConduit::CodecConfigToWebRTCCodec(const AudioCodecConfig* codecInfo,
webrtc::CodecInst& cinst)
{
const unsigned int plNameLength = codecInfo->mName.length();
memset(&cinst, 0, sizeof(webrtc::CodecInst));
if(sizeof(cinst.plname) < plNameLength+1)
{
- CSFLogError(logTag, "%s Payload name buffer capacity mismatch ",
+ CSFLogError(LOGTAG, "%s Payload name buffer capacity mismatch ",
__FUNCTION__);
return false;
}
memcpy(cinst.plname, codecInfo->mName.c_str(), plNameLength);
cinst.plname[plNameLength]='\0';
cinst.pltype = codecInfo->mType;
cinst.rate = codecInfo->mRate;
cinst.pacsize = codecInfo->mPacSize;
@@ -1080,57 +1084,57 @@ WebrtcAudioConduit::CheckCodecForMatch(c
MediaConduitErrorCode
WebrtcAudioConduit::ValidateCodecConfig(const AudioCodecConfig* codecInfo,
bool send)
{
bool codecAppliedAlready = false;
if(!codecInfo)
{
- CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Null CodecConfig ", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
if((codecInfo->mName.empty()) ||
(codecInfo->mName.length() >= CODEC_PLNAME_SIZE))
{
- CSFLogError(logTag, "%s Invalid Payload Name Length ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Invalid Payload Name Length ", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
//Only mono or stereo channels supported
if( (codecInfo->mChannels != 1) && (codecInfo->mChannels != 2))
{
- CSFLogError(logTag, "%s Channel Unsupported ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Channel Unsupported ", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
//check if we have the same codec already applied
if(send)
{
MutexAutoLock lock(mCodecMutex);
codecAppliedAlready = CheckCodecsForMatch(mCurSendCodecConfig,codecInfo);
} else {
codecAppliedAlready = CheckCodecForMatch(codecInfo);
}
if(codecAppliedAlready)
{
- CSFLogDebug(logTag, "%s Codec %s Already Applied ", __FUNCTION__, codecInfo->mName.c_str());
+ CSFLogDebug(LOGTAG, "%s Codec %s Already Applied ", __FUNCTION__, codecInfo->mName.c_str());
}
return kMediaConduitNoError;
}
void
WebrtcAudioConduit::DumpCodecDB() const
{
for(auto& codec : mRecvCodecList)
{
- CSFLogDebug(logTag,"Payload Name: %s", codec->mName.c_str());
- CSFLogDebug(logTag,"Payload Type: %d", codec->mType);
- CSFLogDebug(logTag,"Payload Frequency: %d", codec->mFreq);
- CSFLogDebug(logTag,"Payload PacketSize: %d", codec->mPacSize);
- CSFLogDebug(logTag,"Payload Channels: %d", codec->mChannels);
- CSFLogDebug(logTag,"Payload Sampling Rate: %d", codec->mRate);
+ CSFLogDebug(LOGTAG,"Payload Name: %s", codec->mName.c_str());
+ CSFLogDebug(LOGTAG,"Payload Type: %d", codec->mType);
+ CSFLogDebug(LOGTAG,"Payload Frequency: %d", codec->mFreq);
+ CSFLogDebug(LOGTAG,"Payload PacketSize: %d", codec->mPacSize);
+ CSFLogDebug(LOGTAG,"Payload Channels: %d", codec->mChannels);
+ CSFLogDebug(LOGTAG,"Payload Sampling Rate: %d", codec->mRate);
}
}
}// end namespace
--- a/media/webrtc/signaling/src/media-conduit/MediaCodecVideoCodec.cpp
+++ b/media/webrtc/signaling/src/media-conduit/MediaCodecVideoCodec.cpp
@@ -6,31 +6,35 @@
#include "nspr.h"
#include "WebrtcMediaCodecVP8VideoCodec.h"
#include "MediaCodecVideoCodec.h"
#include "MediaPrefs.h"
namespace mozilla {
-static const char* logTag ="MediaCodecVideoCodec";
+static const char* mcvcLogTag ="MediaCodecVideoCodec";
+#ifdef LOGTAG
+#undef LOGTAG
+#endif
+#define LOGTAG mcvcLogTag
WebrtcVideoEncoder* MediaCodecVideoCodec::CreateEncoder(CodecType aCodecType) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
if (aCodecType == CODEC_VP8) {
if (MediaPrefs::RemoteMediaCodecVP8EncoderEnabled()) {
return new WebrtcMediaCodecVP8VideoRemoteEncoder();
} else {
return new WebrtcMediaCodecVP8VideoEncoder();
}
}
return nullptr;
}
WebrtcVideoDecoder* MediaCodecVideoCodec::CreateDecoder(CodecType aCodecType) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
if (aCodecType == CODEC_VP8) {
return new WebrtcMediaCodecVP8VideoDecoder();
}
return nullptr;
}
}
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -65,17 +65,21 @@
#include <math.h>
#include <cinttypes>
#define DEFAULT_VIDEO_MAX_FRAMERATE 30
#define INVALID_RTP_PAYLOAD 255 // valid payload types are 0 to 127
namespace mozilla {
-static const char* logTag = "WebrtcVideoSessionConduit";
+static const char* vcLogTag = "WebrtcVideoSessionConduit";
+#ifdef LOGTAG
+#undef LOGTAG
+#endif
+#define LOGTAG vcLogTag
static const int kNullPayloadType = -1;
static const char* kUlpFecPayloadName = "ulpfec";
static const char* kRedPayloadName = "red";
// Convert (SI) kilobits/sec to (SI) bits/sec
#define KBPS(kbps) kbps * 1000
const uint32_t WebrtcVideoConduit::kDefaultMinBitrate_bps = KBPS(200);
@@ -166,24 +170,24 @@ void
WebrtcVideoConduit::SendStreamStatistics::Update(
const webrtc::VideoSendStream::Stats& aStats)
{
StreamStatistics::Update(aStats.encode_frame_rate, aStats.media_bitrate_bps);
if (!aStats.substreams.empty()) {
const webrtc::FrameCounts& fc =
aStats.substreams.begin()->second.frame_counts;
mFramesEncoded = fc.key_frames + fc.delta_frames;
- CSFLogVerbose(logTag,
+ CSFLogVerbose(LOGTAG,
"%s: framerate: %u, bitrate: %u, dropped frames delta: %u",
__FUNCTION__, aStats.encode_frame_rate,
aStats.media_bitrate_bps,
mFramesDeliveredToEncoder - mFramesEncoded - mDroppedFrames);
mDroppedFrames = mFramesDeliveredToEncoder - mFramesEncoded;
} else {
- CSFLogVerbose(logTag, "%s stats.substreams is empty", __FUNCTION__);
+ CSFLogVerbose(LOGTAG, "%s stats.substreams is empty", __FUNCTION__);
}
}
void
WebrtcVideoConduit::ReceiveStreamStatistics::DiscardedPackets(
uint32_t& aOutDiscPackets) const
{
aOutDiscPackets = mDiscardedPackets;
@@ -195,43 +199,43 @@ WebrtcVideoConduit::ReceiveStreamStatist
{
aFramesDecoded = mFramesDecoded;
}
void
WebrtcVideoConduit::ReceiveStreamStatistics::Update(
const webrtc::VideoReceiveStream::Stats& aStats)
{
- CSFLogVerbose(logTag, "%s ", __FUNCTION__);
+ CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__);
StreamStatistics::Update(aStats.decode_frame_rate, aStats.total_bitrate_bps);
mDiscardedPackets = aStats.discarded_packets;
mFramesDecoded = aStats.frame_counts.key_frames
+ aStats.frame_counts.delta_frames;
}
/**
* Factory Method for VideoConduit
*/
RefPtr<VideoSessionConduit>
VideoSessionConduit::Create(RefPtr<WebRtcCallWrapper> aCall)
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
NS_ASSERTION(aCall, "missing required parameter: aCall");
- CSFLogVerbose(logTag, "%s", __FUNCTION__);
+ CSFLogVerbose(LOGTAG, "%s", __FUNCTION__);
if (!aCall) {
return nullptr;
}
nsAutoPtr<WebrtcVideoConduit> obj(new WebrtcVideoConduit(aCall));
if(obj->Init() != kMediaConduitNoError) {
- CSFLogError(logTag, "%s VideoConduit Init Failed ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s VideoConduit Init Failed ", __FUNCTION__);
return nullptr;
}
- CSFLogVerbose(logTag, "%s Successfully created VideoConduit ", __FUNCTION__);
+ CSFLogVerbose(LOGTAG, "%s Successfully created VideoConduit ", __FUNCTION__);
return obj.forget();
}
WebrtcVideoConduit::WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall)
: mTransportMonitor("WebrtcVideoConduit")
, mRenderer(nullptr)
, mVideoAdapter(1)
, mVideoBroadcaster()
@@ -271,17 +275,17 @@ WebrtcVideoConduit::WebrtcVideoConduit(R
, mSendCodecPlugin(nullptr)
, mRecvCodecPlugin(nullptr)
, mVideoStatsTimer(do_CreateInstance(NS_TIMER_CONTRACTID))
{
mRecvStreamConfig.renderer = this;
// Video Stats Callback
nsTimerCallbackFunc callback = [](nsITimer* aTimer, void* aClosure) {
- CSFLogDebug(logTag, "StreamStats polling scheduled for VideoConduit: %p", aClosure);
+ CSFLogDebug(LOGTAG, "StreamStats polling scheduled for VideoConduit: %p", aClosure);
auto self = static_cast<WebrtcVideoConduit*>(aClosure);
MutexAutoLock lock(self->mCodecMutex);
if (self->mEngineTransmitting && self->mSendStream) {
const auto& stats = self->mSendStream->GetStats();
self->mSendStreamStats.Update(stats);
if (!stats.substreams.empty()) {
self->mSendPacketCounts =
stats.substreams.begin()->second.rtcp_packet_type_counts;
@@ -295,22 +299,22 @@ WebrtcVideoConduit::WebrtcVideoConduit(R
};
mVideoStatsTimer->InitWithNamedFuncCallback(
callback, this, 1000, nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP,
"WebrtcVideoConduit::WebrtcVideoConduit");
}
WebrtcVideoConduit::~WebrtcVideoConduit()
{
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
if (mVideoStatsTimer) {
- CSFLogDebug(logTag, "canceling StreamStats for VideoConduit: %p", this);
+ CSFLogDebug(LOGTAG, "canceling StreamStats for VideoConduit: %p", this);
MutexAutoLock lock(mCodecMutex);
- CSFLogDebug(logTag, "StreamStats cancelled for VideoConduit: %p", this);
+ CSFLogDebug(LOGTAG, "StreamStats cancelled for VideoConduit: %p", this);
mVideoStatsTimer->Cancel();
}
// Release AudioConduit first by dropping reference on MainThread, where it expects to be
SyncTo(nullptr);
Destroy();
}
@@ -368,17 +372,17 @@ WebrtcVideoConduit::SetLocalCNAME(const
{
mSendStreamConfig.rtp.c_name = cname;
return true;
}
MediaConduitErrorCode
WebrtcVideoConduit::ConfigureCodecMode(webrtc::VideoCodecMode mode)
{
- CSFLogVerbose(logTag, "%s ", __FUNCTION__);
+ CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__);
if (mode == webrtc::VideoCodecMode::kRealtimeVideo ||
mode == webrtc::VideoCodecMode::kScreensharing) {
mCodecMode = mode;
return kMediaConduitNoError;
}
return kMediaConduitMalformedArgument;
}
@@ -466,28 +470,28 @@ WebrtcVideoConduit::CreateRecvStream()
std::unique_ptr<webrtc::VideoDecoder> decoder;
webrtc::VideoCodecType decoder_type;
mRecvStreamConfig.decoders.clear();
for (auto& config : mRecvCodecList) {
decoder_type = SupportedCodecType(webrtc::PayloadNameToCodecType(config->mName)
.value_or(webrtc::VideoCodecType::kVideoCodecUnknown));
if (decoder_type == webrtc::VideoCodecType::kVideoCodecUnknown) {
- CSFLogError(logTag, "%s Unknown decoder type: %s", __FUNCTION__,
+ CSFLogError(LOGTAG, "%s Unknown decoder type: %s", __FUNCTION__,
config->mName.c_str());
continue;
}
decoder.reset(CreateDecoder(decoder_type));
if (!decoder) {
// This really should never happen unless something went wrong
// in the negotiation code
NS_ASSERTION(decoder, "Failed to create video decoder");
- CSFLogError(logTag, "Failed to create decoder of type %s (%d)",
+ CSFLogError(LOGTAG, "Failed to create decoder of type %s (%d)",
config->mName.c_str(), decoder_type);
// don't stop
continue;
}
decoder_desc.decoder = decoder.get();
mDecoders.push_back(std::move(decoder));
decoder_desc.payload_name = config->mName;
@@ -497,17 +501,17 @@ WebrtcVideoConduit::CreateRecvStream()
mRecvStreamConfig.decoders.push_back(decoder_desc);
}
mRecvStream = mCall->Call()->CreateVideoReceiveStream(mRecvStreamConfig.Copy());
if (!mRecvStream) {
mDecoders.clear();
return kMediaConduitUnknownError;
}
- CSFLogDebug(logTag, "Created VideoReceiveStream %p for SSRC %u (0x%x)",
+ CSFLogDebug(LOGTAG, "Created VideoReceiveStream %p for SSRC %u (0x%x)",
mRecvStream, mRecvStreamConfig.rtp.remote_ssrc, mRecvStreamConfig.rtp.remote_ssrc);
return kMediaConduitNoError;
}
static rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
ConfigureVideoEncoderSettings(const VideoCodecConfig* aConfig,
const WebrtcVideoConduit* aConduit)
@@ -584,22 +588,22 @@ WebrtcVideoConduit::VideoStreamFactory::
#if 0
// XXX What we'd like to do for each simulcast stream...
if (simulcastEncoding.constraints.scaleDownBy > 1.0) {
uint32_t new_width = width / simulcastEncoding.constraints.scaleDownBy;
uint32_t new_height = height / simulcastEncoding.constraints.scaleDownBy;
if (new_width != width || new_height != height) {
if (streamCount == 1) {
- CSFLogVerbose(logTag, "%s: ConstrainPreservingAspectRatio", __FUNCTION__);
+ CSFLogVerbose(LOGTAG, "%s: ConstrainPreservingAspectRatio", __FUNCTION__);
// Use less strict scaling in unicast. That way 320x240 / 3 = 106x79.
ConstrainPreservingAspectRatio(new_width, new_height,
&width, &height);
} else {
- CSFLogVerbose(logTag, "%s: ConstrainPreservingAspectRatioExact", __FUNCTION__);
+ CSFLogVerbose(LOGTAG, "%s: ConstrainPreservingAspectRatioExact", __FUNCTION__);
// webrtc.org supposedly won't tolerate simulcast unless every stream
// is exactly the same aspect ratio. 320x240 / 3 = 80x60.
ConstrainPreservingAspectRatioExact(new_width * new_height,
&width, &height);
}
}
}
#endif
@@ -663,17 +667,17 @@ WebrtcVideoConduit::VideoStreamFactory::
}
video_stream.max_qp = kQpMax;
video_stream.SetRid(simulcastEncoding.rid);
if (mConduit->mCurSendCodecConfig->mName == "H264") {
if (mConduit->mCurSendCodecConfig->mEncodingConstraints.maxMbps > 0) {
// Not supported yet!
- CSFLogError(logTag, "%s H.264 max_mbps not supported yet", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s H.264 max_mbps not supported yet", __FUNCTION__);
}
}
streams.push_back(video_stream);
}
return streams;
}
/**
@@ -685,29 +689,29 @@ WebrtcVideoConduit::VideoStreamFactory::
* renegotiation/reconfiguration, this now needs a lock! Alternatively
* changes could be queued until the next frame is delivered using an
* Atomic pointer and swaps.
*/
MediaConduitErrorCode
WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
{
- CSFLogDebug(logTag, "%s for %s", __FUNCTION__,
+ CSFLogDebug(LOGTAG, "%s for %s", __FUNCTION__,
codecConfig ? codecConfig->mName.c_str() : "<null>");
MediaConduitErrorCode condError = kMediaConduitNoError;
// validate basic params
if ((condError = ValidateCodecConfig(codecConfig)) != kMediaConduitNoError) {
return condError;
}
size_t streamCount = std::min(codecConfig->mSimulcastEncodings.size(),
(size_t)webrtc::kMaxSimulcastStreams);
- CSFLogDebug(logTag, "%s for VideoConduit:%p stream count:%d", __FUNCTION__,
+ CSFLogDebug(LOGTAG, "%s for VideoConduit:%p stream count:%d", __FUNCTION__,
this, static_cast<int>(streamCount));
mSendingFramerate = 0;
mEncoderConfig.ClearStreams();
mSendStreamConfig.rtp.rids.clear();
int max_framerate;
if (codecConfig->mEncodingConstraints.maxFps > 0) {
@@ -845,17 +849,17 @@ WebrtcVideoConduit::ConfigureSendMediaCo
}
return condError;
}
bool
WebrtcVideoConduit::SetRemoteSSRC(unsigned int ssrc)
{
- CSFLogDebug(logTag, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc);
+ CSFLogDebug(LOGTAG, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc);
mRecvStreamConfig.rtp.remote_ssrc = ssrc;
unsigned int current_ssrc;
if (!GetRemoteSSRC(¤t_ssrc)) {
return false;
}
if (current_ssrc == ssrc) {
@@ -876,17 +880,17 @@ WebrtcVideoConduit::SetRemoteSSRC(unsign
// On the next StartReceiving() or ConfigureRecvMediaCodec, force
// building a new RecvStream to switch SSRCs.
DeleteRecvStream();
if (!wasReceiving) {
return true;
}
MediaConduitErrorCode rval = CreateRecvStream();
if (rval != kMediaConduitNoError) {
- CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, rval);
+ CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval);
return false;
}
}
return (StartReceiving() == kMediaConduitNoError);
}
bool
WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc)
@@ -977,17 +981,17 @@ WebrtcVideoConduit::GetAVStats(int32_t*
{
return false;
}
bool
WebrtcVideoConduit::GetRTPStats(unsigned int* jitterMs,
unsigned int* cumulativeLost)
{
- CSFLogVerbose(logTag, "%s for VideoConduit:%p", __FUNCTION__, this);
+ CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
{
MutexAutoLock lock(mCodecMutex);
if (!mRecvStream) {
return false;
}
const webrtc::VideoReceiveStream::Stats& stats = mRecvStream->GetStats();
*jitterMs =
@@ -1000,44 +1004,44 @@ WebrtcVideoConduit::GetRTPStats(unsigned
bool WebrtcVideoConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,
uint32_t* jitterMs,
uint32_t* packetsReceived,
uint64_t* bytesReceived,
uint32_t* cumulativeLost,
int32_t* rttMs)
{
{
- CSFLogVerbose(logTag, "%s for VideoConduit:%p", __FUNCTION__, this);
+ CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
MutexAutoLock lock(mCodecMutex);
if (!mSendStream) {
return false;
}
const webrtc::VideoSendStream::Stats& sendStats = mSendStream->GetStats();
if (sendStats.substreams.empty()
|| mSendStreamConfig.rtp.ssrcs.empty()) {
return false;
}
uint32_t ssrc = mSendStreamConfig.rtp.ssrcs.front();
auto ind = sendStats.substreams.find(ssrc);
if (ind == sendStats.substreams.end()) {
- CSFLogError(logTag,
+ CSFLogError(LOGTAG,
"%s for VideoConduit:%p ssrc not found in SendStream stats.",
__FUNCTION__, this);
return false;
}
*jitterMs = ind->second.rtcp_stats.jitter
/ (webrtc::kVideoPayloadTypeFrequency / 1000);
*cumulativeLost = ind->second.rtcp_stats.cumulative_lost;
*bytesReceived = ind->second.rtp_stats.MediaPayloadBytes();
*packetsReceived = ind->second.rtp_stats.transmitted.packets;
auto stats = mCall->Call()->GetStats();
int64_t rtt = stats.rtt_ms;
#ifdef DEBUG
if (rtt > INT32_MAX) {
- CSFLogError(logTag,
+ CSFLogError(LOGTAG,
"%s for VideoConduit:%p RTT is larger than the"
" maximum size of an RTCP RTT.", __FUNCTION__, this);
}
#endif
if (rtt > 0) {
*rttMs = rtt;
} else {
*rttMs = 0;
@@ -1049,17 +1053,17 @@ bool WebrtcVideoConduit::GetRTCPReceiver
return true;
}
bool
WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
unsigned int* packetsSent,
uint64_t* bytesSent)
{
- CSFLogVerbose(logTag, "%s for VideoConduit:%p", __FUNCTION__, this);
+ CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
webrtc::RTCPSenderInfo senderInfo;
{
MutexAutoLock lock(mCodecMutex);
if (!mRecvStream || !mRecvStream->GetRemoteRTCPSenderInfo(&senderInfo)) {
return false;
}
}
*timestamp = webrtc::Clock::GetRealTimeClock()->TimeInMilliseconds();
@@ -1144,39 +1148,39 @@ WebrtcVideoConduit::InitMain()
"media.peerconnection.video.lock_scaling", &mLockScaling)));
}
}
#ifdef MOZ_WIDGET_ANDROID
// get the JVM
JavaVM *jvm = jsjni_GetVM();
if (mozilla::camera::VideoEngine::SetAndroidObjects(jvm) != 0) {
- CSFLogError(logTag, "%s: could not set Android objects", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: could not set Android objects", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
#endif //MOZ_WIDGET_ANDROID
return kMediaConduitNoError;
}
/**
* Performs initialization of the MANDATORY components of the Video Engine
*/
MediaConduitErrorCode
WebrtcVideoConduit::Init()
{
- CSFLogDebug(logTag, "%s this=%p", __FUNCTION__, this);
+ CSFLogDebug(LOGTAG, "%s this=%p", __FUNCTION__, this);
MediaConduitErrorCode result;
// Run code that must run on MainThread first
MOZ_ASSERT(NS_IsMainThread());
result = InitMain();
if (result != kMediaConduitNoError) {
return result;
}
- CSFLogError(logTag, "%s Initialization Done", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Initialization Done", __FUNCTION__);
return kMediaConduitNoError;
}
void
WebrtcVideoConduit::Destroy()
{
// We can't delete the VideoEngine until all these are released!
// And we can't use a Scoped ptr, since the order is arbitrary
@@ -1184,22 +1188,22 @@ WebrtcVideoConduit::Destroy()
MutexAutoLock lock(mCodecMutex);
DeleteSendStream();
DeleteRecvStream();
}
void
WebrtcVideoConduit::SyncTo(WebrtcAudioConduit* aConduit)
{
- CSFLogDebug(logTag, "%s Synced to %p", __FUNCTION__, aConduit);
+ CSFLogDebug(LOGTAG, "%s Synced to %p", __FUNCTION__, aConduit);
{
MutexAutoLock lock(mCodecMutex);
if (!mRecvStream) {
- CSFLogError(logTag, "SyncTo called with no receive stream");
+ CSFLogError(LOGTAG, "SyncTo called with no receive stream");
return;
}
if (aConduit) {
mRecvStream->SetSyncChannel(aConduit->GetVoiceEngine(),
aConduit->GetChannel());
} else if (mSyncedTo) {
mRecvStream->SetSyncChannel(mSyncedTo->GetVoiceEngine(), -1);
@@ -1207,21 +1211,21 @@ WebrtcVideoConduit::SyncTo(WebrtcAudioCo
}
mSyncedTo = aConduit;
}
MediaConduitErrorCode
WebrtcVideoConduit::AttachRenderer(RefPtr<mozilla::VideoRenderer> aVideoRenderer)
{
- CSFLogDebug(logTag, "%s", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
// null renderer
if (!aVideoRenderer) {
- CSFLogError(logTag, "%s NULL Renderer", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s NULL Renderer", __FUNCTION__);
MOZ_ASSERT(false);
return kMediaConduitInvalidRenderer;
}
// This function is called only from main, so we only need to protect against
// modifying mRenderer while any webrtc.org code is trying to use it.
{
ReentrantMonitorAutoEnter enter(mTransportMonitor);
@@ -1245,45 +1249,45 @@ WebrtcVideoConduit::DetachRenderer()
}
}
}
MediaConduitErrorCode
WebrtcVideoConduit::SetTransmitterTransport(
RefPtr<TransportInterface> aTransport)
{
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
mTransmitterTransport = aTransport;
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::SetReceiverTransport(RefPtr<TransportInterface> aTransport)
{
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
mReceiverTransport = aTransport;
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::ConfigureRecvMediaCodecs(
const std::vector<VideoCodecConfig* >& codecConfigList)
{
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
MediaConduitErrorCode condError = kMediaConduitNoError;
std::string payloadName;
if (codecConfigList.empty()) {
- CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Zero number of codecs to configure", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
webrtc::KeyFrameRequestMethod kf_request_method = webrtc::kKeyFrameReqPliRtcp;
bool kf_request_enabled = false;
bool use_nack_basic = false;
bool use_tmmbr = false;
bool use_remb = false;
@@ -1295,17 +1299,17 @@ WebrtcVideoConduit::ConfigureRecvMediaCo
// Try Applying the codecs in the list
// we treat as success if at least one codec was applied and reception was
// started successfully.
std::set<unsigned int> codec_types_seen;
for (const auto& codec_config : codecConfigList) {
if ((condError = ValidateCodecConfig(codec_config))
!= kMediaConduitNoError) {
- CSFLogError(logTag, "%s Invalid config for %s decoder: %i", __FUNCTION__,
+ CSFLogError(LOGTAG, "%s Invalid config for %s decoder: %i", __FUNCTION__,
codec_config ? codec_config->mName.c_str() : "<null>",
condError);
continue;
}
if (codec_config->mName == "H264") {
// TODO(bug 1200768): We can only handle configuring one recv H264 codec
if (configuredH264) {
continue;
@@ -1342,17 +1346,17 @@ WebrtcVideoConduit::ConfigureRecvMediaCo
use_tmmbr |= codec_config->RtcpFbCcmIsSet("tmmbr");
use_remb |= codec_config->RtcpFbRembIsSet();
use_fec |= codec_config->RtcpFbFECIsSet();
recv_codecs.AppendElement(new VideoCodecConfig(*codec_config));
}
if (!recv_codecs.Length()) {
- CSFLogError(logTag, "%s Found no valid receive codecs", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Found no valid receive codecs", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
// Now decide if we need to recreate the receive stream, or can keep it
if (!mRecvStream ||
CodecsDifferent(recv_codecs, mRecvCodecList) ||
mRecvStreamConfig.rtp.nack.rtp_history_ms != (use_nack_basic ? 1000 : 0) ||
mRecvStreamConfig.rtp.remb != use_remb ||
@@ -1415,33 +1419,33 @@ WebrtcVideoConduit::ConfigureRecvMediaCo
SECStatus rv = PK11_GenerateRandom(reinterpret_cast<unsigned char*>(&ssrc), sizeof(ssrc));
if (rv != SECSuccess) {
return kMediaConduitUnknownError;
}
}
// webrtc.org code has fits if you select an SSRC of 0
mRecvStreamConfig.rtp.local_ssrc = ssrc;
- CSFLogDebug(logTag, "%s (%p): Local SSRC 0x%08x (of %u), remote SSRC 0x%08x",
+ CSFLogDebug(LOGTAG, "%s (%p): Local SSRC 0x%08x (of %u), remote SSRC 0x%08x",
__FUNCTION__, (void*) this, ssrc,
(uint32_t) mSendStreamConfig.rtp.ssrcs.size(),
mRecvStreamConfig.rtp.remote_ssrc);
// XXX Copy over those that are the same and don't rebuild them
mRecvCodecList.SwapElements(recv_codecs);
recv_codecs.Clear();
mRecvStreamConfig.rtp.rtx.clear();
{
MutexAutoLock lock(mCodecMutex);
DeleteRecvStream();
// Rebuilds mRecvStream from mRecvStreamConfig
MediaConduitErrorCode rval = CreateRecvStream();
if (rval != kMediaConduitNoError) {
- CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, rval);
+ CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval);
return rval;
}
}
return StartReceiving();
}
return kMediaConduitNoError;
}
@@ -1703,32 +1707,32 @@ WebrtcVideoConduit::SelectSendResolution
}
}
// Adapt to getUserMedia resolution changes
// check if we need to reconfigure the sending resolution.
// NOTE: mSendingWidth != mLastWidth, because of maxwidth/height/etc above
bool changed = false;
if (mSendingWidth != width || mSendingHeight != height) {
- CSFLogDebug(logTag, "%s: resolution changing to %ux%u (from %ux%u)",
+ CSFLogDebug(LOGTAG, "%s: resolution changing to %ux%u (from %ux%u)",
__FUNCTION__, width, height, mSendingWidth, mSendingHeight);
// This will avoid us continually retrying this operation if it fails.
// If the resolution changes, we'll try again. In the meantime, we'll
// keep using the old size in the encoder.
mSendingWidth = width;
mSendingHeight = height;
changed = true;
}
unsigned int framerate = SelectSendFrameRate(mCurSendCodecConfig,
mSendingFramerate,
mSendingWidth,
mSendingHeight);
if (mSendingFramerate != framerate) {
- CSFLogDebug(logTag, "%s: framerate changing to %u (from %u)",
+ CSFLogDebug(LOGTAG, "%s: framerate changing to %u (from %u)",
__FUNCTION__, framerate, mSendingFramerate);
mSendingFramerate = framerate;
changed = true;
}
if (changed) {
// On a resolution change, bounce this to the correct thread to
// re-configure (same as used for Init(). Do *not* block the calling
@@ -1754,17 +1758,17 @@ WebrtcVideoConduit::SelectSendResolution
RefPtr<Runnable> webrtc_runnable =
media::NewRunnableFrom([self, width, height, new_frame]() -> nsresult {
UniquePtr<webrtc::VideoFrame> local_frame(new_frame); // Simplify cleanup
MutexAutoLock lock(self->mCodecMutex);
return self->ReconfigureSendCodec(width, height, new_frame);
});
// new_frame now owned by lambda
- CSFLogDebug(logTag, "%s: proxying lambda to WebRTC thread for reconfig (width %u/%u, height %u/%u",
+ CSFLogDebug(LOGTAG, "%s: proxying lambda to WebRTC thread for reconfig (width %u/%u, height %u/%u",
__FUNCTION__, width, mLastWidth, height, mLastHeight);
NS_DispatchToMainThread(webrtc_runnable.forget());
if (new_frame) {
return true; // queued it
}
} else {
// already on the right thread
ReconfigureSendCodec(width, height, frame);
@@ -1783,17 +1787,17 @@ WebrtcVideoConduit::ReconfigureSendCodec
// Test in case the stream hasn't started yet! We could get a frame in
// before we get around to StartTransmitting(), and that would dispatch a
// runnable to call this.
mInReconfig = false;
if (mSendStream) {
mSendStream->ReconfigureVideoEncoder(mEncoderConfig.CopyConfig());
if (frame) {
mVideoBroadcaster.OnFrame(*frame);
- CSFLogDebug(logTag, "%s Inserted a frame from reconfig lambda", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Inserted a frame from reconfig lambda", __FUNCTION__);
}
}
return NS_OK;
}
unsigned int
WebrtcVideoConduit::SelectSendFrameRate(const VideoCodecConfig* codecConfig,
unsigned int old_framerate,
@@ -1825,25 +1829,25 @@ WebrtcVideoConduit::SendVideoFrame(unsig
unsigned int video_length,
unsigned short width,
unsigned short height,
VideoType video_type,
uint64_t capture_time)
{
// check for parameter sanity
if (!video_buffer || video_length == 0 || width == 0 || height == 0) {
- CSFLogError(logTag, "%s Invalid Parameters ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Invalid Parameters ", __FUNCTION__);
MOZ_ASSERT(false);
return kMediaConduitMalformedArgument;
}
MOZ_ASSERT(video_type == VideoType::kVideoI420);
// Transmission should be enabled before we insert any frames.
if (!mEngineTransmitting) {
- CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Engine not transmitting ", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
// insert the frame to video engine in I420 format only
const int stride_y = width;
const int stride_uv = (width + 1) / 2;
const uint8_t* buffer_y = video_buffer;
@@ -1864,17 +1868,17 @@ WebrtcVideoConduit::SendVideoFrame(unsig
return SendVideoFrame(video_frame);
}
void
WebrtcVideoConduit::AddOrUpdateSink(
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants)
{
- CSFLogDebug(logTag, "%s (send SSRC %u (0x%x)) - wants pixels = %d/%d", __FUNCTION__,
+ CSFLogDebug(LOGTAG, "%s (send SSRC %u (0x%x)) - wants pixels = %d/%d", __FUNCTION__,
mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front(),
wants.max_pixel_count ? *wants.max_pixel_count : -1,
wants.max_pixel_count_step_up ? *wants.max_pixel_count_step_up : -1);
// MUST run on the same thread as first call (MainThread)
if (!NS_IsMainThread()) {
// This can be asynchronous
RefPtr<WebrtcVideoConduit> self(this);
@@ -1929,28 +1933,28 @@ WebrtcVideoConduit::OnSinkWantsChanged(
MediaConduitErrorCode
WebrtcVideoConduit::SendVideoFrame(webrtc::VideoFrame& frame)
{
// XXX Google uses a "timestamp_aligner" to translate timestamps from the
// camera via TranslateTimestamp(); we should look at doing the same. This
// avoids sampling error when capturing frames, but google had to deal with some
// broken cameras, include Logitech c920's IIRC.
- CSFLogVerbose(logTag, "%s (send SSRC %u (0x%x))", __FUNCTION__,
+ CSFLogVerbose(LOGTAG, "%s (send SSRC %u (0x%x))", __FUNCTION__,
mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front());
// See if we need to recalculate what we're sending.
// Don't compute mSendingWidth/Height, since those may not be the same as the input.
{
MutexAutoLock lock(mCodecMutex);
if (mInReconfig) {
// Waiting for it to finish
return kMediaConduitNoError;
}
if (frame.width() != mLastWidth || frame.height() != mLastHeight) {
- CSFLogVerbose(logTag, "%s: call SelectSendResolution with %ux%u",
+ CSFLogVerbose(LOGTAG, "%s: call SelectSendResolution with %ux%u",
__FUNCTION__, frame.width(), frame.height());
if (SelectSendResolution(frame.width(), frame.height(), &frame)) {
// SelectSendResolution took ownership of the data in i420_frame.
// Submit the frame after reconfig is done
return kMediaConduitNoError;
}
}
// adapt input video to wants of sink
@@ -2030,28 +2034,28 @@ WebrtcVideoConduit::SendVideoFrame(webrt
// Transport Layer Callbacks
MediaConduitErrorCode
WebrtcVideoConduit::DeliverPacket(const void* data, int len)
{
// Media Engine should be receiving already.
if (!mCall) {
- CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
+ CSFLogError(LOGTAG, "Error: %s when not receiving", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
// XXX we need to get passed the time the packet was received
webrtc::PacketReceiver::DeliveryStatus status =
mCall->Call()->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO,
static_cast<const uint8_t*>(data),
len, webrtc::PacketTime());
if (status != webrtc::PacketReceiver::DELIVERY_OK) {
- CSFLogError(logTag, "%s DeliverPacket Failed, %d", __FUNCTION__, status);
+ CSFLogError(LOGTAG, "%s DeliverPacket Failed, %d", __FUNCTION__, status);
return kMediaConduitRTPProcessingFailed;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc)
@@ -2064,29 +2068,29 @@ WebrtcVideoConduit::ReceivedRTPPacket(co
if (queue || mRecvSSRC != ssrc) {
// capture packet for insertion after ssrc is set -- do this before
// sending the runnable, since it may pull from this. Since it
// dispatches back to us, it's less critial to do this here, but doesn't
// hurt.
UniquePtr<QueuedPacket> packet((QueuedPacket*) malloc(sizeof(QueuedPacket) + len-1));
packet->mLen = len;
memcpy(packet->mData, data, len);
- CSFLogDebug(logTag, "queuing packet: seq# %u, Len %d ",
+ CSFLogDebug(LOGTAG, "queuing packet: seq# %u, Len %d ",
(uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen);
if (queue) {
mQueuedPackets.AppendElement(Move(packet));
return kMediaConduitNoError;
}
// a new switch needs to be done
// any queued packets are from a previous switch that hasn't completed
// yet; drop them and only process the latest SSRC
mQueuedPackets.Clear();
mQueuedPackets.AppendElement(Move(packet));
- CSFLogDebug(logTag, "%s: switching from SSRC %u to %u", __FUNCTION__,
+ CSFLogDebug(LOGTAG, "%s: switching from SSRC %u to %u", __FUNCTION__,
mRecvSSRC, ssrc);
// we "switch" here immediately, but buffer until the queue is released
mRecvSSRC = ssrc;
mRecvSSRCSetInProgress = true;
queue = true;
// Ensure lamba captures refs
RefPtr<WebrtcVideoConduit> self = this;
@@ -2102,95 +2106,95 @@ WebrtcVideoConduit::ReceivedRTPPacket(co
// errors to the PC.
WebrtcGmpPCHandleSetter setter(self->mPCHandle);
self->SetRemoteSSRC(ssrc); // this will likely re-create the VideoReceiveStream
// We want to unblock the queued packets on the original thread
thread->Dispatch(media::NewRunnableFrom([self, ssrc]() mutable {
if (ssrc == self->mRecvSSRC) {
// SSRC is set; insert queued packets
for (auto& packet : self->mQueuedPackets) {
- CSFLogDebug(logTag, "Inserting queued packets: seq# %u, Len %d ",
+ CSFLogDebug(LOGTAG, "Inserting queued packets: seq# %u, Len %d ",
(uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen);
if (self->DeliverPacket(packet->mData, packet->mLen) != kMediaConduitNoError) {
- CSFLogError(logTag, "%s RTP Processing Failed", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s RTP Processing Failed", __FUNCTION__);
// Keep delivering and then clear the queue
}
}
self->mQueuedPackets.Clear();
// we don't leave inprogress until there are no changes in-flight
self->mRecvSSRCSetInProgress = false;
}
// else this is an intermediate switch; another is in-flight
return NS_OK;
}), NS_DISPATCH_NORMAL);
return NS_OK;
}));
return kMediaConduitNoError;
}
- CSFLogVerbose(logTag, "%s: seq# %u, Len %d, SSRC %u (0x%x) ", __FUNCTION__,
+ CSFLogVerbose(LOGTAG, "%s: seq# %u, Len %d, SSRC %u (0x%x) ", __FUNCTION__,
(uint16_t)ntohs(((uint16_t*) data)[1]), len,
(uint32_t) ntohl(((uint32_t*) data)[2]),
(uint32_t) ntohl(((uint32_t*) data)[2]));
if (DeliverPacket(data, len) != kMediaConduitNoError) {
- CSFLogError(logTag, "%s RTP Processing Failed", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s RTP Processing Failed", __FUNCTION__);
return kMediaConduitRTPProcessingFailed;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::ReceivedRTCPPacket(const void* data, int len)
{
- CSFLogVerbose(logTag, " %s Len %d ", __FUNCTION__, len);
+ CSFLogVerbose(LOGTAG, " %s Len %d ", __FUNCTION__, len);
if (DeliverPacket(data, len) != kMediaConduitNoError) {
- CSFLogError(logTag, "%s RTCP Processing Failed", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s RTCP Processing Failed", __FUNCTION__);
return kMediaConduitRTPProcessingFailed;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::StopTransmitting()
{
if (mEngineTransmitting) {
{
MutexAutoLock lock(mCodecMutex);
if (mSendStream) {
- CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
mSendStream->Stop();
}
}
mEngineTransmitting = false;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::StartTransmitting()
{
if (mEngineTransmitting) {
return kMediaConduitNoError;
}
- CSFLogDebug(logTag, "%s Attemping to start... ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Attemping to start... ", __FUNCTION__);
{
// Start Transmitting on the video engine
MutexAutoLock lock(mCodecMutex);
if (!mSendStream) {
MediaConduitErrorCode rval = CreateSendStream();
if (rval != kMediaConduitNoError) {
- CSFLogError(logTag, "%s Start Send Error %d ", __FUNCTION__, rval);
+ CSFLogError(LOGTAG, "%s Start Send Error %d ", __FUNCTION__, rval);
return rval;
}
}
mSendStream->Start();
// XXX File a bug to consider hooking this up to the state of mtransport
mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp);
mEngineTransmitting = true;
@@ -2201,32 +2205,32 @@ WebrtcVideoConduit::StartTransmitting()
MediaConduitErrorCode
WebrtcVideoConduit::StopReceiving()
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
// Are we receiving already? If so, stop receiving and playout
// since we can't apply new recv codec when the engine is playing.
if (mEngineReceiving && mRecvStream) {
- CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
mRecvStream->Stop();
}
mEngineReceiving = false;
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::StartReceiving()
{
if (mEngineReceiving) {
return kMediaConduitNoError;
}
- CSFLogDebug(logTag, "%s Attemping to start... (SSRC %u (0x%x))", __FUNCTION__, mRecvSSRC, mRecvSSRC);
+ CSFLogDebug(LOGTAG, "%s Attemping to start... (SSRC %u (0x%x))", __FUNCTION__, mRecvSSRC, mRecvSSRC);
{
// Start Receive on the video engine
MutexAutoLock lock(mCodecMutex);
MOZ_ASSERT(mRecvStream);
mRecvStream->Start();
// XXX File a bug to consider hooking this up to the state of mtransport
mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp);
@@ -2240,67 +2244,67 @@ WebrtcVideoConduit::StartReceiving()
// Called on MSG thread
bool
WebrtcVideoConduit::SendRtp(const uint8_t* packet, size_t length,
const webrtc::PacketOptions& options)
{
// XXX(pkerr) - PacketOptions possibly containing RTP extensions are ignored.
// The only field in it is the packet_id, which is used when the header
// extension for TransportSequenceNumber is being used, which we don't.
- CSFLogVerbose(logTag, "%s Sent RTP Packet seq %d, len %lu, SSRC %u (0x%x)",
+ CSFLogVerbose(LOGTAG, "%s Sent RTP Packet seq %d, len %lu, SSRC %u (0x%x)",
__FUNCTION__,
(uint16_t) ntohs(*((uint16_t*) &packet[2])),
(unsigned long)length,
(uint32_t) ntohl(*((uint32_t*) &packet[8])),
(uint32_t) ntohl(*((uint32_t*) &packet[8])));
ReentrantMonitorAutoEnter enter(mTransportMonitor);
if (!mTransmitterTransport ||
NS_FAILED(mTransmitterTransport->SendRtpPacket(packet, length)))
{
- CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s RTP Packet Send Failed ", __FUNCTION__);
return false;
}
return true;
}
// Called from multiple threads including webrtc Process thread
bool
WebrtcVideoConduit::SendRtcp(const uint8_t* packet, size_t length)
{
- CSFLogVerbose(logTag, "%s : len %lu ", __FUNCTION__, (unsigned long)length);
+ CSFLogVerbose(LOGTAG, "%s : len %lu ", __FUNCTION__, (unsigned long)length);
// We come here if we have only one pipeline/conduit setup,
// such as for unidirectional streams.
// We also end up here if we are receiving
ReentrantMonitorAutoEnter enter(mTransportMonitor);
if (mReceiverTransport &&
NS_SUCCEEDED(mReceiverTransport->SendRtcpPacket(packet, length)))
{
// Might be a sender report, might be a receiver report, we don't know.
- CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s Sent RTCP Packet ", __FUNCTION__);
return true;
}
if (mTransmitterTransport &&
NS_SUCCEEDED(mTransmitterTransport->SendRtcpPacket(packet, length))) {
return true;
}
- CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s RTCP Packet Send Failed ", __FUNCTION__);
return false;
}
void
WebrtcVideoConduit::OnFrame(const webrtc::VideoFrame& video_frame)
{
- CSFLogVerbose(logTag, "%s: recv SSRC %u (0x%x), size %ux%u", __FUNCTION__,
+ CSFLogVerbose(LOGTAG, "%s: recv SSRC %u (0x%x), size %ux%u", __FUNCTION__,
mRecvSSRC, mRecvSSRC, video_frame.width(), video_frame.height());
ReentrantMonitorAutoEnter enter(mTransportMonitor);
if (!mRenderer) {
- CSFLogError(logTag, "%s Renderer is NULL ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Renderer is NULL ", __FUNCTION__);
return;
}
if (mReceivingWidth != video_frame.width() ||
mReceivingHeight != video_frame.height()) {
mReceivingWidth = video_frame.width();
mReceivingHeight = video_frame.height();
mRenderer->FrameSizeChange(mReceivingWidth, mReceivingHeight, mNumReceivingStreams);
@@ -2350,37 +2354,37 @@ WebrtcVideoConduit::CodecsDifferent(cons
/**
* Perform validation on the codecConfig to be applied
* Verifies if the codec is already applied.
*/
MediaConduitErrorCode
WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo)
{
if(!codecInfo) {
- CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Null CodecConfig ", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
if((codecInfo->mName.empty()) ||
(codecInfo->mName.length() >= CODEC_PLNAME_SIZE)) {
- CSFLogError(logTag, "%s Invalid Payload Name Length ", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s Invalid Payload Name Length ", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
return kMediaConduitNoError;
}
void
WebrtcVideoConduit::DumpCodecDB() const
{
for (auto& entry : mRecvCodecList) {
- CSFLogDebug(logTag, "Payload Name: %s", entry->mName.c_str());
- CSFLogDebug(logTag, "Payload Type: %d", entry->mType);
- CSFLogDebug(logTag, "Payload Max Frame Size: %d", entry->mEncodingConstraints.maxFs);
- CSFLogDebug(logTag, "Payload Max Frame Rate: %d", entry->mEncodingConstraints.maxFps);
+ CSFLogDebug(LOGTAG, "Payload Name: %s", entry->mName.c_str());
+ CSFLogDebug(LOGTAG, "Payload Type: %d", entry->mType);
+ CSFLogDebug(LOGTAG, "Payload Max Frame Size: %d", entry->mEncodingConstraints.maxFs);
+ CSFLogDebug(LOGTAG, "Payload Max Frame Rate: %d", entry->mEncodingConstraints.maxFps);
}
}
void
WebrtcVideoConduit::VideoLatencyUpdate(uint64_t newSample)
{
mVideoLatencyAvg = (sRoundingPadding * newSample + sAlphaNum * mVideoLatencyAvg) / sAlphaDen;
}
--- a/media/webrtc/signaling/src/media-conduit/WebrtcMediaCodecVP8VideoCodec.cpp
+++ b/media/webrtc/signaling/src/media-conduit/WebrtcMediaCodecVP8VideoCodec.cpp
@@ -35,67 +35,71 @@ using namespace mozilla;
using namespace mozilla::java;
using namespace mozilla::java::sdk;
static const int32_t DECODER_TIMEOUT = 10 * PR_USEC_PER_MSEC; // 10ms
static const char MEDIACODEC_VIDEO_MIME_VP8[] = "video/x-vnd.on2.vp8";
namespace mozilla {
-static const char* logTag ="WebrtcMediaCodecVP8VideoCodec";
+static const char* wmcLogTag ="WebrtcMediaCodecVP8VideoCodec";
+#ifdef LOGTAG
+#undef LOGTAG
+#endif
+#define LOGTAG wmcLogTag
class CallbacksSupport final : public JavaCallbacksSupport
{
public:
CallbacksSupport(webrtc::EncodedImageCallback* aCallback)
: mCallback(aCallback)
, mCritSect(webrtc::CriticalSectionWrapper::CreateCriticalSection())
, mPictureId(0) {
- CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
+ CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
memset(&mEncodedImage, 0, sizeof(mEncodedImage));
}
~CallbacksSupport() {
- CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
+ CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
if (mEncodedImage._size) {
delete [] mEncodedImage._buffer;
mEncodedImage._buffer = nullptr;
mEncodedImage._size = 0;
}
}
void VerifyAndAllocate(const uint32_t minimumSize)
{
- CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
+ CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
if(minimumSize > mEncodedImage._size)
{
uint8_t* newBuffer = new uint8_t[minimumSize];
MOZ_RELEASE_ASSERT(newBuffer);
if(mEncodedImage._buffer) {
delete [] mEncodedImage._buffer;
}
mEncodedImage._buffer = newBuffer;
mEncodedImage._size = minimumSize;
}
}
void HandleInput(jlong aTimestamp, bool aProcessed) override
{
- CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
+ CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
}
void HandleOutputFormatChanged(MediaFormat::Param aFormat) override
{
- CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
+ CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
}
void HandleOutput(Sample::Param aSample)
{
- CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
+ CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
BufferInfo::LocalRef info = aSample->Info();
int32_t size;
bool ok = NS_SUCCEEDED(info->Size(&size));
MOZ_RELEASE_ASSERT(ok);
if (size > 0) {
webrtc::CriticalSectionScoped lock(mCritSect.get());
@@ -140,17 +144,17 @@ public:
MOZ_RELEASE_ASSERT(mCallback);
mCallback->OnEncodedImage(mEncodedImage, &info, &header);
}
}
void HandleError(const MediaResult& aError) override
{
- CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
+ CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
}
friend class WebrtcMediaCodecVP8VideoRemoteEncoder;
private:
webrtc::EncodedImageCallback* mCallback;
Atomic<bool> mCanceled;
webrtc::EncodedImage mEncodedImage;
@@ -289,96 +293,96 @@ private:
class WebrtcAndroidMediaCodec {
public:
WebrtcAndroidMediaCodec()
: mEncoderCallback(nullptr)
, mDecoderCallback(nullptr)
, isStarted(false)
, mEnding(false) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
}
nsresult Configure(uint32_t width,
uint32_t height,
const jobject aSurface,
uint32_t flags,
const char* mime,
bool encoder) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
nsresult res = NS_OK;
if (!mCoder) {
mWidth = width;
mHeight = height;
MediaFormat::LocalRef format;
res = MediaFormat::CreateVideoFormat(nsCString(mime),
mWidth,
mHeight,
&format);
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, CreateVideoFormat failed err = %d", __FUNCTION__, (int)res);
+ CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, CreateVideoFormat failed err = %d", __FUNCTION__, (int)res);
return NS_ERROR_FAILURE;
}
if (encoder) {
mCoder = CreateEncoder(mime);
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, CreateEncoderByType failed err = %d", __FUNCTION__, (int)res);
+ CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, CreateEncoderByType failed err = %d", __FUNCTION__, (int)res);
return NS_ERROR_FAILURE;
}
res = format->SetInteger(MediaFormat::KEY_BIT_RATE, 1000*300);
res = format->SetInteger(MediaFormat::KEY_BITRATE_MODE, 2);
res = format->SetInteger(MediaFormat::KEY_COLOR_FORMAT, 21);
res = format->SetInteger(MediaFormat::KEY_FRAME_RATE, 30);
res = format->SetInteger(MediaFormat::KEY_I_FRAME_INTERVAL, 100);
} else {
mCoder = CreateDecoder(mime);
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, CreateDecoderByType failed err = %d", __FUNCTION__, (int)res);
+ CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, CreateDecoderByType failed err = %d", __FUNCTION__, (int)res);
return NS_ERROR_FAILURE;
}
}
res = mCoder->Configure(format, nullptr, nullptr, flags);
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, err = %d", __FUNCTION__, (int)res);
+ CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, err = %d", __FUNCTION__, (int)res);
}
}
return res;
}
nsresult Start() {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
if (!mCoder) {
return NS_ERROR_FAILURE;
}
mEnding = false;
nsresult res;
res = mCoder->Start();
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->start() return err = %d",
+ CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, mCoder->start() return err = %d",
__FUNCTION__, (int)res);
return res;
}
isStarted = true;
return NS_OK;
}
nsresult Stop() {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
mEnding = true;
if (mOutputDrain != nullptr) {
mOutputDrain->Stop();
mOutputDrain = nullptr;
}
mCoder->Stop();
@@ -386,17 +390,17 @@ public:
isStarted = false;
return NS_OK;
}
void GenerateVideoFrame(
size_t width, size_t height, uint32_t timeStamp,
void* decoded, int color_format) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
// TODO: eliminate extra pixel copy/color conversion
size_t widthUV = (width + 1) / 2;
rtc::scoped_refptr<webrtc::I420Buffer> buffer;
buffer = webrtc::I420Buffer::Create(width, height, width, widthUV, widthUV);
uint8_t* src_nv12 = static_cast<uint8_t *>(decoded);
int src_nv12_y_size = width * height;
@@ -418,40 +422,40 @@ public:
int32_t
FeedMediaCodecInput(
const webrtc::EncodedImage& inputImage,
int64_t renderTimeMs) {
#ifdef WEBRTC_MEDIACODEC_DEBUG
uint32_t time = PR_IntervalNow();
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
#endif
int inputIndex = DequeueInputBuffer(DECODER_TIMEOUT);
if (inputIndex == -1) {
- CSFLogError(logTag, "%s equeue input buffer failed", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s equeue input buffer failed", __FUNCTION__);
return inputIndex;
}
#ifdef WEBRTC_MEDIACODEC_DEBUG
- CSFLogDebug(logTag, "%s dequeue input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+ CSFLogDebug(LOGTAG, "%s dequeue input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
time = PR_IntervalNow();
#endif
size_t size = inputImage._length;
JNIEnv* env = jsjni_GetJNIForThread();
jobject buffer = env->GetObjectArrayElement(mInputBuffers, inputIndex);
void* directBuffer = env->GetDirectBufferAddress(buffer);
PodCopy((uint8_t*)directBuffer, inputImage._buffer, size);
if (inputIndex >= 0) {
- CSFLogError(logTag, "%s queue input buffer inputIndex = %d", __FUNCTION__, inputIndex);
+ CSFLogError(LOGTAG, "%s queue input buffer inputIndex = %d", __FUNCTION__, inputIndex);
QueueInputBuffer(inputIndex, 0, size, renderTimeMs, 0);
{
if (mOutputDrain == nullptr) {
mOutputDrain = new OutputDrain(this);
mOutputDrain->Start();
}
EncodedFrame frame;
@@ -476,38 +480,38 @@ public:
#ifdef WEBRTC_MEDIACODEC_DEBUG
uint32_t time = PR_IntervalNow();
#endif
nsresult res;
BufferInfo::LocalRef bufferInfo;
res = BufferInfo::New(&bufferInfo);
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, BufferInfo::New return err = %d",
+ CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, BufferInfo::New return err = %d",
__FUNCTION__, (int)res);
return res;
}
int32_t outputIndex = DequeueOutputBuffer(bufferInfo);
if (outputIndex == MediaCodec::INFO_TRY_AGAIN_LATER) {
// Not an error: output not available yet. Try later.
- CSFLogDebug(logTag, "%s dequeue output buffer try again:%d", __FUNCTION__, outputIndex);
+ CSFLogDebug(LOGTAG, "%s dequeue output buffer try again:%d", __FUNCTION__, outputIndex);
} else if (outputIndex == MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) {
// handle format change
- CSFLogDebug(logTag, "%s dequeue output buffer format changed:%d", __FUNCTION__, outputIndex);
+ CSFLogDebug(LOGTAG, "%s dequeue output buffer format changed:%d", __FUNCTION__, outputIndex);
} else if (outputIndex == MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) {
- CSFLogDebug(logTag, "%s dequeue output buffer changed:%d", __FUNCTION__, outputIndex);
+ CSFLogDebug(LOGTAG, "%s dequeue output buffer changed:%d", __FUNCTION__, outputIndex);
GetOutputBuffers();
} else if (outputIndex < 0) {
- CSFLogDebug(logTag, "%s dequeue output buffer unknow error:%d", __FUNCTION__, outputIndex);
+ CSFLogDebug(LOGTAG, "%s dequeue output buffer unknow error:%d", __FUNCTION__, outputIndex);
MonitorAutoLock lock(aMonitor);
aInputFrames.pop();
} else {
#ifdef WEBRTC_MEDIACODEC_DEBUG
- CSFLogDebug(logTag, "%s dequeue output buffer# return status is %d took %u ms", __FUNCTION__, outputIndex, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+ CSFLogDebug(LOGTAG, "%s dequeue output buffer# return status is %d took %u ms", __FUNCTION__, outputIndex, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
#endif
EncodedFrame frame;
{
MonitorAutoLock lock(aMonitor);
frame = aInputFrames.front();
aInputFrames.pop();
}
@@ -519,58 +523,58 @@ public:
JNIEnv* env = jsjni_GetJNIForThread();
jobject buffer = env->GetObjectArrayElement(mOutputBuffers, outputIndex);
if (buffer) {
// The buffer will be null on Android L if we are decoding to a Surface
void* directBuffer = env->GetDirectBufferAddress(buffer);
int color_format = 0;
- CSFLogDebug(logTag, "%s generate video frame, width = %d, height = %d, timeStamp_ = %d", __FUNCTION__, frame.width_, frame.height_, frame.timeStamp_);
+ CSFLogDebug(LOGTAG, "%s generate video frame, width = %d, height = %d, timeStamp_ = %d", __FUNCTION__, frame.width_, frame.height_, frame.timeStamp_);
GenerateVideoFrame(frame.width_, frame.height_, frame.timeStamp_, directBuffer, color_format);
mDecoderCallback->Decoded(*mVideoFrame);
ReleaseOutputBuffer(outputIndex, false);
env->DeleteLocalRef(buffer);
}
}
return NS_OK;
}
int32_t DequeueInputBuffer(int64_t time) {
nsresult res;
int32_t inputIndex;
res = mCoder->DequeueInputBuffer(time, &inputIndex);
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->DequeueInputBuffer() return err = %d",
+ CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, mCoder->DequeueInputBuffer() return err = %d",
__FUNCTION__, (int)res);
return -1;
}
return inputIndex;
}
void QueueInputBuffer(int32_t inputIndex, int32_t offset, size_t size, int64_t renderTimes, int32_t flags) {
nsresult res = NS_OK;
res = mCoder->QueueInputBuffer(inputIndex, offset, size, renderTimes, flags);
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->QueueInputBuffer() return err = %d",
+ CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, mCoder->QueueInputBuffer() return err = %d",
__FUNCTION__, (int)res);
}
}
int32_t DequeueOutputBuffer(BufferInfo::Param aInfo) {
nsresult res;
int32_t outputStatus;
res = mCoder->DequeueOutputBuffer(aInfo, DECODER_TIMEOUT, &outputStatus);
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->DequeueOutputBuffer() return err = %d",
+ CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, mCoder->DequeueOutputBuffer() return err = %d",
__FUNCTION__, (int)res);
return -1;
}
return outputStatus;
}
void ReleaseOutputBuffer(int32_t index, bool flag) {
@@ -584,17 +588,17 @@ public:
env->DeleteGlobalRef(mInputBuffers);
}
nsresult res;
jni::ObjectArray::LocalRef inputBuffers;
res = mCoder->GetInputBuffers(&inputBuffers);
mInputBuffers = (jobjectArray) env->NewGlobalRef(inputBuffers.Get());
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, GetInputBuffers return err = %d",
+ CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, GetInputBuffers return err = %d",
__FUNCTION__, (int)res);
return nullptr;
}
return mInputBuffers;
}
jobjectArray GetOutputBuffers() {
@@ -604,17 +608,17 @@ public:
env->DeleteGlobalRef(mOutputBuffers);
}
nsresult res;
jni::ObjectArray::LocalRef outputBuffers;
res = mCoder->GetOutputBuffers(&outputBuffers);
mOutputBuffers = (jobjectArray) env->NewGlobalRef(outputBuffers.Get());
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, GetOutputBuffers return err = %d",
+ CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, GetOutputBuffers return err = %d",
__FUNCTION__, (int)res);
return nullptr;
}
return mOutputBuffers;
}
void SetDecoderCallback(webrtc::DecodedImageCallback* aCallback) {
@@ -693,17 +697,17 @@ static bool I420toNV12(uint8_t* dstY, ui
inputImage.height());
return converted;
}
// Encoder.
WebrtcMediaCodecVP8VideoEncoder::WebrtcMediaCodecVP8VideoEncoder()
: mCallback(nullptr)
, mMediaCodecEncoder(nullptr) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
memset(&mEncodedImage, 0, sizeof(mEncodedImage));
}
bool WebrtcMediaCodecVP8VideoEncoder::ResetInputBuffers() {
mInputBuffers = mMediaCodecEncoder->GetInputBuffers();
if (!mInputBuffers)
@@ -742,26 +746,26 @@ WebrtcMediaCodecVP8VideoEncoder::VerifyA
return 0;
}
int32_t WebrtcMediaCodecVP8VideoEncoder::InitEncode(
const webrtc::VideoCodec* codecSettings,
int32_t numberOfCores,
size_t maxPayloadSize) {
mMaxPayloadSize = maxPayloadSize;
- CSFLogDebug(logTag, "%s, w = %d, h = %d", __FUNCTION__, codecSettings->width, codecSettings->height);
+ CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, codecSettings->width, codecSettings->height);
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
const webrtc::VideoFrame& inputImage,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const std::vector<webrtc::FrameType>* frame_types) {
- CSFLogDebug(logTag, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(), inputImage.height());
+ CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(), inputImage.height());
if (!mMediaCodecEncoder) {
mMediaCodecEncoder = new WebrtcAndroidMediaCodec();
}
if (!mMediaCodecEncoder->isStarted) {
if (inputImage.width() == 0 || inputImage.height() == 0) {
return WEBRTC_VIDEO_CODEC_ERROR;
@@ -769,37 +773,37 @@ int32_t WebrtcMediaCodecVP8VideoEncoder:
mFrameWidth = inputImage.width();
mFrameHeight = inputImage.height();
}
mMediaCodecEncoder->SetEncoderCallback(mCallback);
nsresult res = mMediaCodecEncoder->Configure(mFrameWidth, mFrameHeight, nullptr, MediaCodec::CONFIGURE_FLAG_ENCODE, MEDIACODEC_VIDEO_MIME_VP8, true /* encoder */);
if (res != NS_OK) {
- CSFLogDebug(logTag, "%s, encoder configure return err = %d",
+ CSFLogDebug(LOGTAG, "%s, encoder configure return err = %d",
__FUNCTION__, (int)res);
return WEBRTC_VIDEO_CODEC_ERROR;
}
res = mMediaCodecEncoder->Start();
if (NS_FAILED(res)) {
mMediaCodecEncoder->isStarted = false;
- CSFLogDebug(logTag, "%s start encoder. err = %d", __FUNCTION__, (int)res);
+ CSFLogDebug(LOGTAG, "%s start encoder. err = %d", __FUNCTION__, (int)res);
return WEBRTC_VIDEO_CODEC_ERROR;
}
bool retBool = ResetInputBuffers();
if (!retBool) {
- CSFLogDebug(logTag, "%s ResetInputBuffers failed.", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ResetInputBuffers failed.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
retBool = ResetOutputBuffers();
if (!retBool) {
- CSFLogDebug(logTag, "%s ResetOutputBuffers failed.", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ResetOutputBuffers failed.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
mMediaCodecEncoder->isStarted = true;
}
#ifdef WEBRTC_MEDIACODEC_DEBUG
uint32_t time = PR_IntervalNow();
@@ -807,79 +811,79 @@ int32_t WebrtcMediaCodecVP8VideoEncoder:
rtc::scoped_refptr<webrtc::VideoFrameBuffer> inputBuffer = inputImage.video_frame_buffer();
size_t sizeY = inputImage.height() * inputBuffer->StrideY();
size_t sizeUV = ((inputImage.height() + 1)/2) * inputBuffer->StrideU();
size_t size = sizeY + 2 * sizeUV;
int inputIndex = mMediaCodecEncoder->DequeueInputBuffer(DECODER_TIMEOUT);
if (inputIndex == -1) {
- CSFLogError(logTag, "%s dequeue input buffer failed", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s dequeue input buffer failed", __FUNCTION__);
return inputIndex;
}
#ifdef WEBRTC_MEDIACODEC_DEBUG
- CSFLogDebug(logTag, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() dequeue OMX input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+ CSFLogDebug(LOGTAG, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() dequeue OMX input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
#endif
if (inputIndex >= 0) {
JNIEnv* env = jsjni_GetJNIForThread();
jobject buffer = env->GetObjectArrayElement(mInputBuffers, inputIndex);
void* directBuffer = env->GetDirectBufferAddress(buffer);
uint8_t* dstY = static_cast<uint8_t*>(directBuffer);
uint16_t* dstUV = reinterpret_cast<uint16_t*>(dstY + sizeY);
bool converted = I420toNV12(dstY, dstUV, inputImage);
if (!converted) {
- CSFLogError(logTag, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input buffer to NV12 error.", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input buffer to NV12 error.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
env->DeleteLocalRef(buffer);
#ifdef WEBRTC_MEDIACODEC_DEBUG
time = PR_IntervalNow();
- CSFLogError(logTag, "%s queue input buffer inputIndex = %d", __FUNCTION__, inputIndex);
+ CSFLogError(LOGTAG, "%s queue input buffer inputIndex = %d", __FUNCTION__, inputIndex);
#endif
mMediaCodecEncoder->QueueInputBuffer(inputIndex, 0, size, inputImage.render_time_ms() * PR_USEC_PER_MSEC /* ms to us */, 0);
#ifdef WEBRTC_MEDIACODEC_DEBUG
- CSFLogDebug(logTag, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() queue input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+ CSFLogDebug(LOGTAG, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() queue input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
#endif
mEncodedImage._encodedWidth = inputImage.width();
mEncodedImage._encodedHeight = inputImage.height();
mEncodedImage._timeStamp = inputImage.timestamp();
mEncodedImage.capture_time_ms_ = inputImage.timestamp();
nsresult res;
BufferInfo::LocalRef bufferInfo;
res = BufferInfo::New(&bufferInfo);
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "WebrtcMediaCodecVP8VideoEncoder::%s, BufferInfo::New return err = %d",
+ CSFLogDebug(LOGTAG, "WebrtcMediaCodecVP8VideoEncoder::%s, BufferInfo::New return err = %d",
__FUNCTION__, (int)res);
return -1;
}
int32_t outputIndex = mMediaCodecEncoder->DequeueOutputBuffer(bufferInfo);
if (outputIndex == MediaCodec::INFO_TRY_AGAIN_LATER) {
// Not an error: output not available yet. Try later.
- CSFLogDebug(logTag, "%s dequeue output buffer try again:%d", __FUNCTION__, outputIndex);
+ CSFLogDebug(LOGTAG, "%s dequeue output buffer try again:%d", __FUNCTION__, outputIndex);
} else if (outputIndex == MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) {
// handle format change
- CSFLogDebug(logTag, "%s dequeue output buffer format changed:%d", __FUNCTION__, outputIndex);
+ CSFLogDebug(LOGTAG, "%s dequeue output buffer format changed:%d", __FUNCTION__, outputIndex);
} else if (outputIndex == MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) {
- CSFLogDebug(logTag, "%s dequeue output buffer changed:%d", __FUNCTION__, outputIndex);
+ CSFLogDebug(LOGTAG, "%s dequeue output buffer changed:%d", __FUNCTION__, outputIndex);
mMediaCodecEncoder->GetOutputBuffers();
} else if (outputIndex < 0) {
- CSFLogDebug(logTag, "%s dequeue output buffer unknow error:%d", __FUNCTION__, outputIndex);
+ CSFLogDebug(LOGTAG, "%s dequeue output buffer unknow error:%d", __FUNCTION__, outputIndex);
} else {
#ifdef WEBRTC_MEDIACODEC_DEBUG
- CSFLogDebug(logTag, "%s dequeue output buffer return status is %d took %u ms", __FUNCTION__, outputIndex, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+ CSFLogDebug(LOGTAG, "%s dequeue output buffer return status is %d took %u ms", __FUNCTION__, outputIndex, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
#endif
JNIEnv* env = jsjni_GetJNIForThread();
jobject buffer = env->GetObjectArrayElement(mOutputBuffers, outputIndex);
if (buffer) {
int32_t offset;
bufferInfo->Offset(&offset);
int32_t flags;
@@ -893,21 +897,21 @@ int32_t WebrtcMediaCodecVP8VideoEncoder:
} else {
mEncodedImage._frameType = webrtc::kVideoFrameDelta;
}
mEncodedImage._completeFrame = true;
int32_t size;
bufferInfo->Size(&size);
#ifdef WEBRTC_MEDIACODEC_DEBUG
- CSFLogDebug(logTag, "%s dequeue output buffer ok, index:%d, buffer size = %d, buffer offset = %d, flags = %d", __FUNCTION__, outputIndex, size, offset, flags);
+ CSFLogDebug(LOGTAG, "%s dequeue output buffer ok, index:%d, buffer size = %d, buffer offset = %d, flags = %d", __FUNCTION__, outputIndex, size, offset, flags);
#endif
if(VerifyAndAllocate(size) == -1) {
- CSFLogDebug(logTag, "%s VerifyAndAllocate buffers failed", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s VerifyAndAllocate buffers failed", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
mEncodedImage._length = size;
// xxx It's too bad the mediacodec API forces us to memcpy this....
// we should find a way that able to 'hold' the buffer or transfer it from inputImage (ping-pong
// buffers or select them from a small pool)
@@ -933,85 +937,85 @@ int32_t WebrtcMediaCodecVP8VideoEncoder:
}
}
}
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t WebrtcMediaCodecVP8VideoEncoder::RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback* callback) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
mCallback = callback;
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t WebrtcMediaCodecVP8VideoEncoder::Release() {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
delete mMediaCodecEncoder;
mMediaCodecEncoder = nullptr;
delete [] mEncodedImage._buffer;
mEncodedImage._buffer = nullptr;
mEncodedImage._size = 0;
return WEBRTC_VIDEO_CODEC_OK;
}
WebrtcMediaCodecVP8VideoEncoder::~WebrtcMediaCodecVP8VideoEncoder() {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
Release();
}
int32_t WebrtcMediaCodecVP8VideoEncoder::SetChannelParameters(uint32_t packetLoss, int64_t rtt) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t WebrtcMediaCodecVP8VideoEncoder::SetRates(uint32_t newBitRate, uint32_t frameRate) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
if (!mMediaCodecEncoder) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
// XXX
// 1. implement MediaCodec's setParameters method
// 2.find a way to initiate a Java Bundle instance as parameter for MediaCodec setParameters method.
// mMediaCodecEncoder->setParameters
return WEBRTC_VIDEO_CODEC_OK;
}
WebrtcMediaCodecVP8VideoRemoteEncoder::~WebrtcMediaCodecVP8VideoRemoteEncoder() {
- CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
+ CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
Release();
}
int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::InitEncode(
const webrtc::VideoCodec* codecSettings,
int32_t numberOfCores,
size_t maxPayloadSize) {
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::SetRates(uint32_t newBitRate, uint32_t frameRate) {
- CSFLogDebug(logTag, "%s, newBitRate: %d, frameRate: %d", __FUNCTION__, newBitRate, frameRate);
+ CSFLogDebug(LOGTAG, "%s, newBitRate: %d, frameRate: %d", __FUNCTION__, newBitRate, frameRate);
if (!mJavaEncoder) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
mJavaEncoder->SetRates(newBitRate);
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::Encode(
const webrtc::VideoFrame& inputImage,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const std::vector<webrtc::FrameType>* frame_types) {
- CSFLogDebug(logTag, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(), inputImage.height());
+ CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(), inputImage.height());
if (inputImage.width() == 0 || inputImage.height() == 0) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
if (!mJavaEncoder) {
JavaCallbacksSupport::Init();
mJavaCallbacks = CodecProxy::NativeCallbacks::New();
@@ -1021,17 +1025,17 @@ int32_t WebrtcMediaCodecVP8VideoRemoteEn
MediaFormat::LocalRef format;
nsresult res = MediaFormat::CreateVideoFormat(nsCString(MEDIACODEC_VIDEO_MIME_VP8),
inputImage.width(),
inputImage.height(),
&format);
if (NS_FAILED(res)) {
- CSFLogDebug(logTag, "%s, CreateVideoFormat failed err = %d", __FUNCTION__, (int)res);
+ CSFLogDebug(LOGTAG, "%s, CreateVideoFormat failed err = %d", __FUNCTION__, (int)res);
return WEBRTC_VIDEO_CODEC_ERROR;
}
res = format->SetInteger(nsCString("bitrate"), 300 * 1000);
res = format->SetInteger(nsCString("bitrate-mode"), 2);
res = format->SetInteger(nsCString("color-format"), 21);
res = format->SetInteger(nsCString("frame-rate"), 30);
res = format->SetInteger(nsCString("i-frame-interval"), 100);
@@ -1057,17 +1061,17 @@ int32_t WebrtcMediaCodecVP8VideoRemoteEn
mConvertBufsize = size;
}
uint8_t* dstY = mConvertBuf;
uint16_t* dstUV = reinterpret_cast<uint16_t*>(dstY + sizeY);
bool converted = I420toNV12(dstY, dstUV, inputImage);
if (!converted) {
- CSFLogError(logTag, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input buffer to NV12 error.", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input buffer to NV12 error.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
jni::ByteBuffer::LocalRef bytes = jni::ByteBuffer::New(mConvertBuf, size);
BufferInfo::LocalRef bufferInfo;
nsresult rv = BufferInfo::New(&bufferInfo);
if (NS_FAILED(rv)) {
@@ -1086,17 +1090,17 @@ int32_t WebrtcMediaCodecVP8VideoRemoteEn
}
int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback* callback) {
mCallback = callback;
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::Release() {
- CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
+ CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
if (mJavaEncoder) {
mJavaEncoder->Release();
mJavaEncoder = nullptr;
}
if (mJavaCallbacks) {
JavaCallbacksSupport::GetNative(mJavaCallbacks)->Cancel();
@@ -1113,17 +1117,17 @@ int32_t WebrtcMediaCodecVP8VideoRemoteEn
}
// Decoder.
WebrtcMediaCodecVP8VideoDecoder::WebrtcMediaCodecVP8VideoDecoder()
: mCallback(nullptr)
, mFrameWidth(0)
, mFrameHeight(0)
, mMediaCodecDecoder(nullptr) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
}
bool WebrtcMediaCodecVP8VideoDecoder::ResetInputBuffers() {
mInputBuffers = mMediaCodecDecoder->GetInputBuffers();
if (!mInputBuffers)
return false;
@@ -1153,102 +1157,102 @@ int32_t WebrtcMediaCodecVP8VideoDecoder:
int32_t WebrtcMediaCodecVP8VideoDecoder::Decode(
const webrtc::EncodedImage& inputImage,
bool missingFrames,
const webrtc::RTPFragmentationHeader* fragmentation,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
int64_t renderTimeMs) {
- CSFLogDebug(logTag, "%s, renderTimeMs = %" PRId64, __FUNCTION__, renderTimeMs);
+ CSFLogDebug(LOGTAG, "%s, renderTimeMs = %" PRId64, __FUNCTION__, renderTimeMs);
if (inputImage._length== 0 || !inputImage._buffer) {
- CSFLogDebug(logTag, "%s, input Image invalid. length = %" PRIdPTR, __FUNCTION__, inputImage._length);
+ CSFLogDebug(LOGTAG, "%s, input Image invalid. length = %" PRIdPTR, __FUNCTION__, inputImage._length);
return WEBRTC_VIDEO_CODEC_ERROR;
}
if (inputImage._frameType == webrtc::kVideoFrameKey) {
- CSFLogDebug(logTag, "%s, inputImage is Golden frame",
+ CSFLogDebug(LOGTAG, "%s, inputImage is Golden frame",
__FUNCTION__);
mFrameWidth = inputImage._encodedWidth;
mFrameHeight = inputImage._encodedHeight;
}
if (!mMediaCodecDecoder->isStarted) {
if (mFrameWidth == 0 || mFrameHeight == 0) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
mMediaCodecDecoder->SetDecoderCallback(mCallback);
nsresult res = mMediaCodecDecoder->Configure(mFrameWidth, mFrameHeight, nullptr, 0, MEDIACODEC_VIDEO_MIME_VP8, false /* decoder */);
if (res != NS_OK) {
- CSFLogDebug(logTag, "%s, decoder configure return err = %d",
+ CSFLogDebug(LOGTAG, "%s, decoder configure return err = %d",
__FUNCTION__, (int)res);
return WEBRTC_VIDEO_CODEC_ERROR;
}
res = mMediaCodecDecoder->Start();
if (NS_FAILED(res)) {
mMediaCodecDecoder->isStarted = false;
- CSFLogDebug(logTag, "%s start decoder. err = %d", __FUNCTION__, (int)res);
+ CSFLogDebug(LOGTAG, "%s start decoder. err = %d", __FUNCTION__, (int)res);
return WEBRTC_VIDEO_CODEC_ERROR;
}
bool retBool = ResetInputBuffers();
if (!retBool) {
- CSFLogDebug(logTag, "%s ResetInputBuffers failed.", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ResetInputBuffers failed.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
retBool = ResetOutputBuffers();
if (!retBool) {
- CSFLogDebug(logTag, "%s ResetOutputBuffers failed.", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ResetOutputBuffers failed.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
mMediaCodecDecoder->isStarted = true;
}
#ifdef WEBRTC_MEDIACODEC_DEBUG
uint32_t time = PR_IntervalNow();
- CSFLogDebug(logTag, "%s start decoder took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+ CSFLogDebug(LOGTAG, "%s start decoder took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
#endif
bool feedFrame = true;
int32_t ret = WEBRTC_VIDEO_CODEC_ERROR;
while (feedFrame) {
ret = mMediaCodecDecoder->FeedMediaCodecInput(inputImage, renderTimeMs);
feedFrame = (ret == -1);
}
- CSFLogDebug(logTag, "%s end, ret = %d", __FUNCTION__, ret);
+ CSFLogDebug(LOGTAG, "%s end, ret = %d", __FUNCTION__, ret);
return ret;
}
void WebrtcMediaCodecVP8VideoDecoder::DecodeFrame(EncodedFrame* frame) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
}
int32_t WebrtcMediaCodecVP8VideoDecoder::RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* callback) {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
mCallback = callback;
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t WebrtcMediaCodecVP8VideoDecoder::Release() {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
delete mMediaCodecDecoder;
mMediaCodecDecoder = nullptr;
return WEBRTC_VIDEO_CODEC_OK;
}
WebrtcMediaCodecVP8VideoDecoder::~WebrtcMediaCodecVP8VideoDecoder() {
- CSFLogDebug(logTag, "%s ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
Release();
}
}
--- a/media/webrtc/signaling/src/media-conduit/moz.build
+++ b/media/webrtc/signaling/src/media-conduit/moz.build
@@ -13,28 +13,24 @@ LOCAL_INCLUDES += [
'/media/webrtc',
'/media/webrtc/signaling/src/common',
'/media/webrtc/signaling/src/common/browser_logging',
'/media/webrtc/signaling/src/common/time_profiling',
'/media/webrtc/signaling/src/peerconnection',
'/media/webrtc/trunk',
]
-SOURCES += [
+UNIFIED_SOURCES += [
'AudioConduit.cpp',
- 'VideoConduit.cpp',
-]
-
-UNIFIED_SOURCES += [
'GmpVideoCodec.cpp',
'MediaDataDecoderCodec.cpp',
+ 'VideoConduit.cpp',
'WebrtcGmpVideoCodec.cpp',
'WebrtcMediaDataDecoderCodec.cpp',
]
if CONFIG['OS_TARGET'] == 'Android':
- # Duplicate definition of logTag
- SOURCES += [
+ UNIFIED_SOURCES += [
'MediaCodecVideoCodec.cpp',
'WebrtcMediaCodecVP8VideoCodec.cpp',
]
FINAL_LIBRARY = 'xul'
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionCtx.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionCtx.cpp
@@ -24,17 +24,21 @@
#include "mozilla/Services.h"
#include "mozilla/StaticPtr.h"
#include "nsCRTGlue.h"
#include "gmp-video-decode.h" // GMP_API_VIDEO_DECODER
#include "gmp-video-encode.h" // GMP_API_VIDEO_ENCODER
-static const char* logTag = "PeerConnectionCtx";
+static const char* pccLogTag = "PeerConnectionCtx";
+#ifdef LOGTAG
+#undef LOGTAG
+#endif
+#define LOGTAG pccLogTag
namespace mozilla {
using namespace dom;
class PeerConnectionCtxObserver : public nsIObserver
{
public:
@@ -60,17 +64,17 @@ public:
false);
MOZ_ALWAYS_SUCCEEDS(rv);
(void) rv;
}
NS_IMETHOD Observe(nsISupports* aSubject, const char* aTopic,
const char16_t* aData) override {
if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
- CSFLogDebug(logTag, "Shutting down PeerConnectionCtx");
+ CSFLogDebug(LOGTAG, "Shutting down PeerConnectionCtx");
PeerConnectionCtx::Destroy();
nsCOMPtr<nsIObserverService> observerService =
services::GetObserverService();
if (!observerService)
return NS_ERROR_FAILURE;
nsresult rv = observerService->RemoveObserver(this,
@@ -81,23 +85,23 @@ public:
MOZ_ALWAYS_SUCCEEDS(rv);
// Make sure we're not deleted while still inside ::Observe()
RefPtr<PeerConnectionCtxObserver> kungFuDeathGrip(this);
PeerConnectionCtx::gPeerConnectionCtxObserver = nullptr;
}
if (strcmp(aTopic, NS_IOSERVICE_OFFLINE_STATUS_TOPIC) == 0) {
if (NS_strcmp(aData, u"" NS_IOSERVICE_OFFLINE) == 0) {
- CSFLogDebug(logTag, "Updating network state to offline");
+ CSFLogDebug(LOGTAG, "Updating network state to offline");
PeerConnectionCtx::UpdateNetworkState(false);
} else if(NS_strcmp(aData, u"" NS_IOSERVICE_ONLINE) == 0) {
- CSFLogDebug(logTag, "Updating network state to online");
+ CSFLogDebug(LOGTAG, "Updating network state to online");
PeerConnectionCtx::UpdateNetworkState(true);
} else {
- CSFLogDebug(logTag, "Received unsupported network state event");
+ CSFLogDebug(LOGTAG, "Received unsupported network state event");
MOZ_CRASH();
}
}
return NS_OK;
}
private:
virtual ~PeerConnectionCtxObserver()
@@ -134,17 +138,17 @@ nsresult PeerConnectionCtx::InitializeGl
MOZ_ASSERT(gMainThread == mainThread);
}
nsresult res;
MOZ_ASSERT(NS_IsMainThread());
if (!gInstance) {
- CSFLogDebug(logTag, "Creating PeerConnectionCtx");
+ CSFLogDebug(LOGTAG, "Creating PeerConnectionCtx");
PeerConnectionCtx *ctx = new PeerConnectionCtx();
res = ctx->Initialize();
PR_ASSERT(NS_SUCCEEDED(res));
if (!NS_SUCCEEDED(res))
return res;
gInstance = ctx;
@@ -164,17 +168,17 @@ PeerConnectionCtx* PeerConnectionCtx::Ge
return gInstance;
}
bool PeerConnectionCtx::isActive() {
return gInstance;
}
void PeerConnectionCtx::Destroy() {
- CSFLogDebug(logTag, "%s", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
if (gInstance) {
gInstance->Cleanup();
delete gInstance;
gInstance = nullptr;
}
StopWebRtcLog();
@@ -392,39 +396,39 @@ static void GMPReady() {
NS_DISPATCH_NORMAL);
};
void PeerConnectionCtx::initGMP()
{
mGMPService = do_GetService("@mozilla.org/gecko-media-plugin-service;1");
if (!mGMPService) {
- CSFLogError(logTag, "%s failed to get the gecko-media-plugin-service",
+ CSFLogError(LOGTAG, "%s failed to get the gecko-media-plugin-service",
__FUNCTION__);
return;
}
nsCOMPtr<nsIThread> thread;
nsresult rv = mGMPService->GetThread(getter_AddRefs(thread));
if (NS_FAILED(rv)) {
mGMPService = nullptr;
- CSFLogError(logTag,
+ CSFLogError(LOGTAG,
"%s failed to get the gecko-media-plugin thread, err=%u",
__FUNCTION__,
static_cast<unsigned>(rv));
return;
}
// presumes that all GMP dir scans have been queued for the GMPThread
thread->Dispatch(WrapRunnableNM(&GMPReady), NS_DISPATCH_NORMAL);
}
nsresult PeerConnectionCtx::Cleanup() {
- CSFLogDebug(logTag, "%s", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
mQueuedJSEPOperations.Clear();
mGMPService = nullptr;
return NS_OK;
}
PeerConnectionCtx::~PeerConnectionCtx() {
// ensure mTelemetryTimer ends on main thread
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
@@ -129,17 +129,22 @@
#define ICE_PARSING "In RTCConfiguration passed to RTCPeerConnection constructor"
using namespace mozilla;
using namespace mozilla::dom;
typedef PCObserverString ObString;
-static const char* logTag = "PeerConnectionImpl";
+static const char* pciLogTag = "PeerConnectionImpl";
+#ifdef LOGTAG
+#undef LOGTAG
+#endif
+#define LOGTAG pciLogTag
+
static mozilla::LazyLogModule logModuleInfo("signaling");
// Getting exceptions back down from PCObserver is generally not harmful.
namespace {
// This is a terrible hack. The problem is that SuppressException is not
// inline, and we link this file without libxul in some cases (e.g. for our test
// setup). So we can't use ErrorResult or IgnoredErrorResult because those call
// SuppressException... And we can't use FastErrorResult because we can't
@@ -197,22 +202,22 @@ static nsresult InitNSSInContent()
}
static bool nssStarted = false;
if (nssStarted) {
return NS_OK;
}
if (NSS_NoDB_Init(nullptr) != SECSuccess) {
- CSFLogError(logTag, "NSS_NoDB_Init failed.");
+ CSFLogError(LOGTAG, "NSS_NoDB_Init failed.");
return NS_ERROR_FAILURE;
}
if (NS_FAILED(mozilla::psm::InitializeCipherSuite())) {
- CSFLogError(logTag, "Fail to set up nss cipher suite.");
+ CSFLogError(LOGTAG, "Fail to set up nss cipher suite.");
return NS_ERROR_FAILURE;
}
mozilla::psm::DisableMD5();
nssStarted = true;
return NS_OK;
@@ -324,17 +329,17 @@ PeerConnectionImpl::PeerConnectionImpl(c
mWindow = do_QueryInterface(aGlobal->GetAsSupports());
if (IsPrivateBrowsing(mWindow)) {
mPrivateWindow = true;
log->EnterPrivateMode();
}
mWindow->AddPeerConnection();
mActiveOnWindow = true;
}
- CSFLogInfo(logTag, "%s: PeerConnectionImpl constructor for %s",
+ CSFLogInfo(LOGTAG, "%s: PeerConnectionImpl constructor for %s",
__FUNCTION__, mHandle.c_str());
STAMP_TIMECARD(mTimeCard, "Constructor Completed");
mAllowIceLoopback = Preferences::GetBool(
"media.peerconnection.ice.loopback", false);
mAllowIceLinkLocal = Preferences::GetBool(
"media.peerconnection.ice.link_local", false);
mForceIceTcp = Preferences::GetBool(
"media.peerconnection.ice.force_ice_tcp", false);
@@ -365,20 +370,20 @@ PeerConnectionImpl::~PeerConnectionImpl(
if (log) {
log->ExitPrivateMode();
}
mPrivateWindow = false;
}
if (PeerConnectionCtx::isActive()) {
PeerConnectionCtx::GetInstance()->mPeerConnections.erase(mHandle);
} else {
- CSFLogError(logTag, "PeerConnectionCtx is already gone. Ignoring...");
+ CSFLogError(LOGTAG, "PeerConnectionCtx is already gone. Ignoring...");
}
- CSFLogInfo(logTag, "%s: PeerConnectionImpl destructor invoked for %s",
+ CSFLogInfo(LOGTAG, "%s: PeerConnectionImpl destructor invoked for %s",
__FUNCTION__, mHandle.c_str());
Close();
// Since this and Initialize() occur on MainThread, they can't both be
// running at once
// Right now, we delete PeerConnectionCtx at XPCOM shutdown only, but we
@@ -391,17 +396,17 @@ already_AddRefed<DOMMediaStream>
PeerConnectionImpl::MakeMediaStream()
{
MediaStreamGraph* graph =
MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER, GetWindow());
RefPtr<DOMMediaStream> stream =
DOMMediaStream::CreateSourceStreamAsInput(GetWindow(), graph);
- CSFLogDebug(logTag, "Created media stream %p, inner: %p", stream.get(), stream->GetInputStream());
+ CSFLogDebug(LOGTAG, "Created media stream %p, inner: %p", stream.get(), stream->GetInputStream());
return stream.forget();
}
nsresult
PeerConnectionImpl::CreateRemoteSourceStreamInfo(RefPtr<RemoteSourceStreamInfo>*
aInfo,
const std::string& aStreamID)
@@ -678,45 +683,45 @@ PeerConnectionImpl::Initialize(PeerConne
mMedia->SignalCandidate.connect(this, &PeerConnectionImpl::CandidateReady);
// Initialize the media object.
res = mMedia->Init(aConfiguration.getStunServers(),
aConfiguration.getTurnServers(),
aConfiguration.getIceTransportPolicy());
if (NS_FAILED(res)) {
- CSFLogError(logTag, "%s: Couldn't initialize media object", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Couldn't initialize media object", __FUNCTION__);
return res;
}
PeerConnectionCtx::GetInstance()->mPeerConnections[mHandle] = this;
mJsepSession = MakeUnique<JsepSessionImpl>(mName,
MakeUnique<PCUuidGenerator>());
res = mJsepSession->Init();
if (NS_FAILED(res)) {
- CSFLogError(logTag, "%s: Couldn't init JSEP Session, res=%u",
+ CSFLogError(LOGTAG, "%s: Couldn't init JSEP Session, res=%u",
__FUNCTION__,
static_cast<unsigned>(res));
return res;
}
res = mJsepSession->SetIceCredentials(mMedia->ice_ctx()->ufrag(),
mMedia->ice_ctx()->pwd());
if (NS_FAILED(res)) {
- CSFLogError(logTag, "%s: Couldn't set ICE credentials, res=%u",
+ CSFLogError(LOGTAG, "%s: Couldn't set ICE credentials, res=%u",
__FUNCTION__,
static_cast<unsigned>(res));
return res;
}
res = mJsepSession->SetBundlePolicy(aConfiguration.getBundlePolicy());
if (NS_FAILED(res)) {
- CSFLogError(logTag, "%s: Couldn't set bundle policy, res=%u, error=%s",
+ CSFLogError(LOGTAG, "%s: Couldn't set bundle policy, res=%u, error=%s",
__FUNCTION__,
static_cast<unsigned>(res),
mJsepSession->GetLastError().c_str());
return res;
}
return NS_OK;
}
@@ -730,17 +735,17 @@ PeerConnectionImpl::Initialize(PeerConne
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aThread);
mThread = do_QueryInterface(aThread);
PeerConnectionConfiguration converted;
nsresult res = converted.Init(aConfiguration);
if (NS_FAILED(res)) {
- CSFLogError(logTag, "%s: Invalid RTCConfiguration", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Invalid RTCConfiguration", __FUNCTION__);
rv.Throw(res);
return;
}
res = Initialize(aObserver, &aWindow, converted, aThread);
if (NS_FAILED(res)) {
rv.Throw(res);
return;
@@ -758,25 +763,25 @@ PeerConnectionImpl::SetCertificate(mozil
PC_AUTO_ENTER_API_CALL_NO_CHECK();
MOZ_ASSERT(!mCertificate, "This can only be called once");
mCertificate = &aCertificate;
std::vector<uint8_t> fingerprint;
nsresult rv = CalculateFingerprint(DtlsIdentity::DEFAULT_HASH_ALGORITHM,
&fingerprint);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "%s: Couldn't calculate fingerprint, rv=%u",
+ CSFLogError(LOGTAG, "%s: Couldn't calculate fingerprint, rv=%u",
__FUNCTION__, static_cast<unsigned>(rv));
mCertificate = nullptr;
return;
}
rv = mJsepSession->AddDtlsFingerprint(DtlsIdentity::DEFAULT_HASH_ALGORITHM,
fingerprint);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "%s: Couldn't set DTLS credentials, rv=%u",
+ CSFLogError(LOGTAG, "%s: Couldn't set DTLS credentials, rv=%u",
__FUNCTION__, static_cast<unsigned>(rv));
mCertificate = nullptr;
}
}
const RefPtr<mozilla::dom::RTCCertificate>&
PeerConnectionImpl::Certificate() const
{
@@ -857,17 +862,17 @@ class ConfigureCodec {
// Ok, it is preffed on. Can we actually do it?
android::sp<android::OMXCodecReservation> encode = new android::OMXCodecReservation(true);
android::sp<android::OMXCodecReservation> decode = new android::OMXCodecReservation(false);
// Currently we just check if they're available right now, which will fail if we're
// trying to call ourself, for example. It will work for most real-world cases, like
// if we try to add a person to a 2-way call to make a 3-way mesh call
if (encode->ReserveOMXCodec() && decode->ReserveOMXCodec()) {
- CSFLogDebug( logTag, "%s: H264 hardware codec available", __FUNCTION__);
+ CSFLogDebug( LOGTAG, "%s: H264 hardware codec available", __FUNCTION__);
mHardwareH264Supported = true;
}
}
#endif // MOZ_WEBRTC_OMX
mSoftwareH264Enabled = PeerConnectionCtx::GetInstance()->gmpHasH264();
@@ -1036,25 +1041,25 @@ class ConfigureRedCodec {
nsresult
PeerConnectionImpl::ConfigureJsepSessionCodecs() {
nsresult res;
nsCOMPtr<nsIPrefService> prefs =
do_GetService("@mozilla.org/preferences-service;1", &res);
if (NS_FAILED(res)) {
- CSFLogError(logTag, "%s: Couldn't get prefs service, res=%u",
+ CSFLogError(LOGTAG, "%s: Couldn't get prefs service, res=%u",
__FUNCTION__,
static_cast<unsigned>(res));
return res;
}
nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
if (!branch) {
- CSFLogError(logTag, "%s: Couldn't get prefs branch", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Couldn't get prefs branch", __FUNCTION__);
return NS_ERROR_FAILURE;
}
ConfigureCodec configurer(branch);
mJsepSession->ForEachCodec(configurer);
// first find the red codec description
std::vector<JsepCodecDescription*>& codecs = mJsepSession->Codecs();
@@ -1100,30 +1105,30 @@ NS_IMETHODIMP
PeerConnectionImpl::EnsureDataConnection(uint16_t aLocalPort,
uint16_t aNumstreams,
uint32_t aMaxMessageSize,
bool aMMSSet)
{
PC_AUTO_ENTER_API_CALL(false);
if (mDataConnection) {
- CSFLogDebug(logTag,"%s DataConnection already connected",__FUNCTION__);
+ CSFLogDebug(LOGTAG,"%s DataConnection already connected",__FUNCTION__);
mDataConnection->SetMaxMessageSize(aMMSSet, aMaxMessageSize);
return NS_OK;
}
nsCOMPtr<nsIEventTarget> target = mWindow
? mWindow->EventTargetFor(TaskCategory::Other)
: nullptr;
mDataConnection = new DataChannelConnection(this, target);
if (!mDataConnection->Init(aLocalPort, aNumstreams, aMMSSet, aMaxMessageSize)) {
- CSFLogError(logTag,"%s DataConnection Init Failed",__FUNCTION__);
+ CSFLogError(LOGTAG,"%s DataConnection Init Failed",__FUNCTION__);
return NS_ERROR_FAILURE;
}
- CSFLogDebug(logTag,"%s DataChannelConnection %p attached to %s",
+ CSFLogDebug(LOGTAG,"%s DataChannelConnection %p attached to %s",
__FUNCTION__, (void*) mDataConnection.get(), mHandle.c_str());
return NS_OK;
}
nsresult
PeerConnectionImpl::GetDatachannelParameters(
uint32_t* channels,
uint16_t* localport,
@@ -1144,34 +1149,34 @@ PeerConnectionImpl::GetDatachannelParame
MOZ_ASSERT(sendDataChannel == recvDataChannel);
if (sendDataChannel) {
// This will release assert if there is no such index, and that's ok
const JsepTrackEncoding& encoding =
trackPair.mSending->GetNegotiatedDetails()->GetEncoding(0);
if (encoding.GetCodecs().empty()) {
- CSFLogError(logTag, "%s: Negotiated m=application with no codec. "
+ CSFLogError(LOGTAG, "%s: Negotiated m=application with no codec. "
"This is likely to be broken.",
__FUNCTION__);
return NS_ERROR_FAILURE;
}
for (const JsepCodecDescription* codec : encoding.GetCodecs()) {
if (codec->mType != SdpMediaSection::kApplication) {
- CSFLogError(logTag, "%s: Codec type for m=application was %u, this "
+ CSFLogError(LOGTAG, "%s: Codec type for m=application was %u, this "
"is a bug.",
__FUNCTION__,
static_cast<unsigned>(codec->mType));
MOZ_ASSERT(false, "Codec for m=application was not \"application\"");
return NS_ERROR_FAILURE;
}
if (codec->mName != "webrtc-datachannel") {
- CSFLogWarn(logTag, "%s: Codec for m=application was not "
+ CSFLogWarn(LOGTAG, "%s: Codec for m=application was not "
"webrtc-datachannel (was instead %s). ",
__FUNCTION__,
codec->mName.c_str());
continue;
}
if (codec->mChannels) {
*channels = codec->mChannels;
@@ -1226,65 +1231,65 @@ PeerConnectionImpl::DeferredAddTrackToJs
nsresult
PeerConnectionImpl::AddTrackToJsepSession(SdpMediaSection::MediaType type,
const std::string& streamId,
const std::string& trackId)
{
nsresult res = ConfigureJsepSessionCodecs();
if (NS_FAILED(res)) {
- CSFLogError(logTag, "Failed to configure codecs");
+ CSFLogError(LOGTAG, "Failed to configure codecs");
return res;
}
res = mJsepSession->AddTrack(
new JsepTrack(type, streamId, trackId, sdp::kSend));
if (NS_FAILED(res)) {
std::string errorString = mJsepSession->GetLastError();
- CSFLogError(logTag, "%s (%s) : pc = %s, error = %s",
+ CSFLogError(LOGTAG, "%s (%s) : pc = %s, error = %s",
__FUNCTION__,
type == SdpMediaSection::kAudio ? "audio" : "video",
mHandle.c_str(),
errorString.c_str());
return NS_ERROR_FAILURE;
}
return NS_OK;
}
nsresult
PeerConnectionImpl::InitializeDataChannel()
{
PC_AUTO_ENTER_API_CALL(false);
- CSFLogDebug(logTag, "%s", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
uint32_t channels = 0;
uint16_t localport = 0;
uint16_t remoteport = 0;
uint32_t remotemaxmessagesize = 0;
bool mmsset = false;
uint16_t level = 0;
nsresult rv = GetDatachannelParameters(&channels, &localport, &remoteport,
&remotemaxmessagesize, &mmsset, &level);
if (NS_FAILED(rv)) {
- CSFLogDebug(logTag, "%s: We did not negotiate datachannel", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s: We did not negotiate datachannel", __FUNCTION__);
return NS_OK;
}
if (channels > MAX_NUM_STREAMS) {
channels = MAX_NUM_STREAMS;
}
rv = EnsureDataConnection(localport, channels, remotemaxmessagesize, mmsset);
if (NS_SUCCEEDED(rv)) {
// use the specified TransportFlow
RefPtr<TransportFlow> flow = mMedia->GetTransportFlow(level, false).get();
- CSFLogDebug(logTag, "Transportflow[%u] = %p",
+ CSFLogDebug(LOGTAG, "Transportflow[%u] = %p",
static_cast<unsigned>(level), flow.get());
if (flow) {
if (mDataConnection->ConnectViaTransportFlow(flow,
localport,
remoteport)) {
return NS_OK;
}
}
@@ -1342,17 +1347,17 @@ PeerConnectionImpl::CreateDataChannel(co
NS_ConvertUTF16toUTF8(aLabel), NS_ConvertUTF16toUTF8(aProtocol), theType,
ordered,
aType == DataChannelConnection::PARTIAL_RELIABLE_REXMIT ? aMaxNum :
(aType == DataChannelConnection::PARTIAL_RELIABLE_TIMED ? aMaxTime : 0),
nullptr, nullptr, aExternalNegotiated, aStream
);
NS_ENSURE_TRUE(dataChannel,NS_ERROR_FAILURE);
- CSFLogDebug(logTag, "%s: making DOMDataChannel", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s: making DOMDataChannel", __FUNCTION__);
if (!mHaveDataStream) {
std::string streamId;
std::string trackId;
// Generate random ids because these aren't linked to any local streams.
if (!mUuidGen->Generate(&streamId)) {
@@ -1365,17 +1370,17 @@ PeerConnectionImpl::CreateDataChannel(co
RefPtr<JsepTrack> track(new JsepTrack(
mozilla::SdpMediaSection::kApplication,
streamId,
trackId,
sdp::kSend));
rv = mJsepSession->AddTrack(track);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "%s: Failed to add application track.",
+ CSFLogError(LOGTAG, "%s: Failed to add application track.",
__FUNCTION__);
return rv;
}
mHaveDataStream = true;
OnNegotiationNeeded();
}
nsIDOMDataChannel *retval;
rv = NS_NewDOMDataChannel(dataChannel.forget(), mWindow, &retval);
@@ -1431,17 +1436,17 @@ PeerConnectionImpl::NotifyDataChannel(al
// XXXkhuey this is completely fucked up. We can't use RefPtr<DataChannel>
// here because DataChannel's AddRef/Release are non-virtual and not visible
// if !MOZILLA_INTERNAL_API, but this function leaks the DataChannel if
// !MOZILLA_INTERNAL_API because it never transfers the ref to
// NS_NewDOMDataChannel.
DataChannel* channel = aChannel.take();
MOZ_ASSERT(channel);
- CSFLogDebug(logTag, "%s: channel: %p", __FUNCTION__, channel);
+ CSFLogDebug(LOGTAG, "%s: channel: %p", __FUNCTION__, channel);
nsCOMPtr<nsIDOMDataChannel> domchannel;
nsresult rv = NS_NewDOMDataChannel(already_AddRefed<DataChannel>(channel),
mWindow, getter_AddRefs(domchannel));
NS_ENSURE_SUCCESS_VOID(rv);
mHaveDataStream = true;
@@ -1510,17 +1515,17 @@ PeerConnectionImpl::CreateOffer(const Js
if (!PeerConnectionCtx::GetInstance()->isReady()) {
// Uh oh. We're not ready yet. Enqueue this operation.
PeerConnectionCtx::GetInstance()->queueJSEPOperation(
WrapRunnableNM(DeferredCreateOffer, mHandle, aOptions));
STAMP_TIMECARD(mTimeCard, "Deferring CreateOffer (not ready)");
return NS_OK;
}
- CSFLogDebug(logTag, "CreateOffer()");
+ CSFLogDebug(LOGTAG, "CreateOffer()");
nsresult nrv;
if (restartIce &&
!mJsepSession->GetLocalDescription(kJsepDescriptionCurrent).empty()) {
// If restart is requested and a restart is already in progress, we
// need to make room for the restart request so we either rollback
// or finalize to "clear" the previous restart.
if (mMedia->GetIceRestartState() ==
@@ -1529,29 +1534,29 @@ PeerConnectionImpl::CreateOffer(const Js
RollbackIceRestart();
} else if (mMedia->GetIceRestartState() ==
PeerConnectionMedia::ICE_RESTART_COMMITTED) {
// we're mid-restart and can't rollback, finalize restart even
// though we're not really ready yet
FinalizeIceRestart();
}
- CSFLogInfo(logTag, "Offerer restarting ice");
+ CSFLogInfo(LOGTAG, "Offerer restarting ice");
nrv = SetupIceRestart();
if (NS_FAILED(nrv)) {
- CSFLogError(logTag, "%s: SetupIceRestart failed, res=%u",
+ CSFLogError(LOGTAG, "%s: SetupIceRestart failed, res=%u",
__FUNCTION__,
static_cast<unsigned>(nrv));
return nrv;
}
}
nrv = ConfigureJsepSessionCodecs();
if (NS_FAILED(nrv)) {
- CSFLogError(logTag, "Failed to configure codecs");
+ CSFLogError(LOGTAG, "Failed to configure codecs");
return nrv;
}
STAMP_TIMECARD(mTimeCard, "Create Offer");
std::string offer;
nrv = mJsepSession->CreateOffer(aOptions, &offer);
@@ -1562,17 +1567,17 @@ PeerConnectionImpl::CreateOffer(const Js
case NS_ERROR_UNEXPECTED:
error = kInvalidState;
break;
default:
error = kInternalError;
}
std::string errorString = mJsepSession->GetLastError();
- CSFLogError(logTag, "%s: pc = %s, error = %s",
+ CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
__FUNCTION__, mHandle.c_str(), errorString.c_str());
pco->OnCreateOfferError(error, ObString(errorString.c_str()), rv);
} else {
pco->OnCreateOfferSuccess(ObString(offer.c_str()), rv);
}
UpdateSignalingState();
return NS_OK;
@@ -1583,28 +1588,28 @@ PeerConnectionImpl::CreateAnswer()
{
PC_AUTO_ENTER_API_CALL(true);
RefPtr<PeerConnectionObserver> pco = do_QueryObjectReferent(mPCObserver);
if (!pco) {
return NS_OK;
}
- CSFLogDebug(logTag, "CreateAnswer()");
+ CSFLogDebug(LOGTAG, "CreateAnswer()");
nsresult nrv;
if (mJsepSession->RemoteIceIsRestarting()) {
if (mMedia->GetIceRestartState() ==
PeerConnectionMedia::ICE_RESTART_COMMITTED) {
FinalizeIceRestart();
} else if (!mMedia->IsIceRestarting()) {
- CSFLogInfo(logTag, "Answerer restarting ice");
+ CSFLogInfo(LOGTAG, "Answerer restarting ice");
nrv = SetupIceRestart();
if (NS_FAILED(nrv)) {
- CSFLogError(logTag, "%s: SetupIceRestart failed, res=%u",
+ CSFLogError(LOGTAG, "%s: SetupIceRestart failed, res=%u",
__FUNCTION__,
static_cast<unsigned>(nrv));
return nrv;
}
}
}
STAMP_TIMECARD(mTimeCard, "Create Answer");
@@ -1621,71 +1626,71 @@ PeerConnectionImpl::CreateAnswer()
case NS_ERROR_UNEXPECTED:
error = kInvalidState;
break;
default:
error = kInternalError;
}
std::string errorString = mJsepSession->GetLastError();
- CSFLogError(logTag, "%s: pc = %s, error = %s",
+ CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
__FUNCTION__, mHandle.c_str(), errorString.c_str());
pco->OnCreateAnswerError(error, ObString(errorString.c_str()), rv);
} else {
pco->OnCreateAnswerSuccess(ObString(answer.c_str()), rv);
}
UpdateSignalingState();
return NS_OK;
}
nsresult
PeerConnectionImpl::SetupIceRestart()
{
if (mMedia->IsIceRestarting()) {
- CSFLogError(logTag, "%s: ICE already restarting",
+ CSFLogError(LOGTAG, "%s: ICE already restarting",
__FUNCTION__);
return NS_ERROR_UNEXPECTED;
}
std::string ufrag = mMedia->ice_ctx()->GetNewUfrag();
std::string pwd = mMedia->ice_ctx()->GetNewPwd();
if (ufrag.empty() || pwd.empty()) {
- CSFLogError(logTag, "%s: Bad ICE credentials (ufrag:'%s'/pwd:'%s')",
+ CSFLogError(LOGTAG, "%s: Bad ICE credentials (ufrag:'%s'/pwd:'%s')",
__FUNCTION__,
ufrag.c_str(), pwd.c_str());
return NS_ERROR_UNEXPECTED;
}
// hold on to the current ice creds in case of rollback
mPreviousIceUfrag = mJsepSession->GetUfrag();
mPreviousIcePwd = mJsepSession->GetPwd();
mMedia->BeginIceRestart(ufrag, pwd);
nsresult nrv = mJsepSession->SetIceCredentials(ufrag, pwd);
if (NS_FAILED(nrv)) {
- CSFLogError(logTag, "%s: Couldn't set ICE credentials, res=%u",
+ CSFLogError(LOGTAG, "%s: Couldn't set ICE credentials, res=%u",
__FUNCTION__,
static_cast<unsigned>(nrv));
return nrv;
}
return NS_OK;
}
nsresult
PeerConnectionImpl::RollbackIceRestart()
{
mMedia->RollbackIceRestart();
// put back the previous ice creds
nsresult nrv = mJsepSession->SetIceCredentials(mPreviousIceUfrag,
mPreviousIcePwd);
if (NS_FAILED(nrv)) {
- CSFLogError(logTag, "%s: Couldn't set ICE credentials, res=%u",
+ CSFLogError(LOGTAG, "%s: Couldn't set ICE credentials, res=%u",
__FUNCTION__,
static_cast<unsigned>(nrv));
return nrv;
}
mPreviousIceUfrag = "";
mPreviousIcePwd = "";
return NS_OK;
@@ -1701,17 +1706,17 @@ PeerConnectionImpl::FinalizeIceRestart()
}
NS_IMETHODIMP
PeerConnectionImpl::SetLocalDescription(int32_t aAction, const char* aSDP)
{
PC_AUTO_ENTER_API_CALL(true);
if (!aSDP) {
- CSFLogError(logTag, "%s - aSDP is NULL", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s - aSDP is NULL", __FUNCTION__);
return NS_ERROR_FAILURE;
}
JSErrorResult rv;
RefPtr<PeerConnectionObserver> pco = do_QueryObjectReferent(mPCObserver);
if (!pco) {
return NS_OK;
}
@@ -1753,17 +1758,17 @@ PeerConnectionImpl::SetLocalDescription(
case NS_ERROR_UNEXPECTED:
error = kInvalidState;
break;
default:
error = kInternalError;
}
std::string errorString = mJsepSession->GetLastError();
- CSFLogError(logTag, "%s: pc = %s, error = %s",
+ CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
__FUNCTION__, mHandle.c_str(), errorString.c_str());
pco->OnSetLocalDescriptionError(error, ObString(errorString.c_str()), rv);
} else {
pco->OnSetLocalDescriptionSuccess(rv);
}
UpdateSignalingState(sdpType == mozilla::kJsepSdpRollback);
return NS_OK;
@@ -1801,17 +1806,17 @@ static void StartTrack(MediaStream* aSou
StreamTime current_end = mStream->GetTracksEnd();
TrackTicks current_ticks =
mStream->TimeToTicksRoundUp(track_rate, current_end);
// Add a track 'now' to avoid possible underrun, especially if we add
// a track "later".
if (current_end != 0L) {
- CSFLogDebug(logTag, "added track @ %u -> %f",
+ CSFLogDebug(LOGTAG, "added track @ %u -> %f",
static_cast<unsigned>(current_end),
mStream->StreamTimeToSeconds(current_end));
}
// To avoid assertions, we need to insert a dummy segment that covers up
// to the "start" time for the track
segment_->AppendNullData(current_ticks);
if (segment_->GetType() == MediaSegment::AUDIO) {
@@ -1826,17 +1831,17 @@ static void StartTrack(MediaStream* aSou
}
private:
TrackID track_id_;
nsAutoPtr<MediaSegment> segment_;
};
aSource->GraphImpl()->AppendMessage(
MakeUnique<Message>(aSource, aTrackId, Move(aSegment)));
- CSFLogInfo(logTag, "Dispatched track-add for track id %u on stream %p",
+ CSFLogInfo(LOGTAG, "Dispatched track-add for track id %u on stream %p",
aTrackId, aSource);
}
nsresult
PeerConnectionImpl::CreateNewRemoteTracks(RefPtr<PeerConnectionObserver>& aPco)
{
JSErrorResult jrv;
@@ -1877,17 +1882,17 @@ PeerConnectionImpl::CreateNewRemoteTrack
if (NS_FAILED(nrv)) {
aPco->OnSetRemoteDescriptionError(
kInternalError,
ObString("AddRemoteStream failed"),
jrv);
return nrv;
}
- CSFLogDebug(logTag, "Added remote stream %s", info->GetId().c_str());
+ CSFLogDebug(LOGTAG, "Added remote stream %s", info->GetId().c_str());
info->GetMediaStream()->AssignId(NS_ConvertUTF8toUTF16(streamId.c_str()));
info->GetMediaStream()->SetLogicalStreamStartTime(
info->GetMediaStream()->GetPlaybackStream()->GetCurrentTime());
}
Sequence<OwningNonNull<DOMMediaStream>> streams;
if (!streams.AppendElement(OwningNonNull<DOMMediaStream>(
@@ -1943,33 +1948,33 @@ PeerConnectionImpl::CreateNewRemoteTrack
source);
info->GetMediaStream()->AddTrackInternal(domTrack);
segment = new VideoSegment;
}
StartTrack(info->GetMediaStream()->GetInputStream()->AsSourceStream(),
trackID, Move(segment));
info->AddTrack(webrtcTrackId, domTrack);
- CSFLogDebug(logTag, "Added remote track %s/%s",
+ CSFLogDebug(LOGTAG, "Added remote track %s/%s",
info->GetId().c_str(), webrtcTrackId.c_str());
domTrack->AssignId(NS_ConvertUTF8toUTF16(webrtcTrackId.c_str()));
aPco->OnAddTrack(*domTrack, streams, jrv);
if (jrv.Failed()) {
- CSFLogError(logTag, ": OnAddTrack(%s) failed! Error: %u",
+ CSFLogError(LOGTAG, ": OnAddTrack(%s) failed! Error: %u",
webrtcTrackId.c_str(),
jrv.ErrorCodeAsInt());
}
}
}
if (newStream) {
aPco->OnAddStream(*info->GetMediaStream(), jrv);
if (jrv.Failed()) {
- CSFLogError(logTag, ": OnAddStream() failed! Error: %u",
+ CSFLogError(LOGTAG, ": OnAddStream() failed! Error: %u",
jrv.ErrorCodeAsInt());
}
}
}
return NS_OK;
}
void
@@ -2016,17 +2021,17 @@ PeerConnectionImpl::RemoveOldRemoteTrack
}
NS_IMETHODIMP
PeerConnectionImpl::SetRemoteDescription(int32_t action, const char* aSDP)
{
PC_AUTO_ENTER_API_CALL(true);
if (!aSDP) {
- CSFLogError(logTag, "%s - aSDP is NULL", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s - aSDP is NULL", __FUNCTION__);
return NS_ERROR_FAILURE;
}
JSErrorResult jrv;
RefPtr<PeerConnectionObserver> pco = do_QueryObjectReferent(mPCObserver);
if (!pco) {
return NS_OK;
}
@@ -2041,17 +2046,17 @@ PeerConnectionImpl::SetRemoteDescription
action,
std::string(aSDP)));
STAMP_TIMECARD(mTimeCard, "Deferring SetRemote (not ready)");
return NS_OK;
}
nsresult nrv = ConfigureJsepSessionCodecs();
if (NS_FAILED(nrv)) {
- CSFLogError(logTag, "Failed to configure codecs");
+ CSFLogError(LOGTAG, "Failed to configure codecs");
return nrv;
}
}
STAMP_TIMECARD(mTimeCard, "Set Remote Description");
mRemoteRequestedSDP = aSDP;
JsepSdpType sdpType;
@@ -2084,17 +2089,17 @@ PeerConnectionImpl::SetRemoteDescription
case NS_ERROR_UNEXPECTED:
error = kInvalidState;
break;
default:
error = kInternalError;
}
std::string errorString = mJsepSession->GetLastError();
- CSFLogError(logTag, "%s: pc = %s, error = %s",
+ CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
__FUNCTION__, mHandle.c_str(), errorString.c_str());
pco->OnSetRemoteDescriptionError(error, ObString(errorString.c_str()), jrv);
} else {
nrv = CreateNewRemoteTracks(pco);
if (NS_FAILED(nrv)) {
// aPco was already notified, just return early.
return NS_OK;
}
@@ -2161,29 +2166,29 @@ PeerConnectionImpl::GetStats(MediaStream
return NS_OK;
}
NS_IMETHODIMP
PeerConnectionImpl::AddIceCandidate(const char* aCandidate, const char* aMid, unsigned short aLevel) {
PC_AUTO_ENTER_API_CALL(true);
if (mForceIceTcp && std::string::npos != std::string(aCandidate).find(" UDP ")) {
- CSFLogError(logTag, "Blocking remote UDP candidate: %s", aCandidate);
+ CSFLogError(LOGTAG, "Blocking remote UDP candidate: %s", aCandidate);
return NS_OK;
}
JSErrorResult rv;
RefPtr<PeerConnectionObserver> pco = do_QueryObjectReferent(mPCObserver);
if (!pco) {
return NS_OK;
}
STAMP_TIMECARD(mTimeCard, "Add Ice Candidate");
- CSFLogDebug(logTag, "AddIceCandidate: %s", aCandidate);
+ CSFLogDebug(LOGTAG, "AddIceCandidate: %s", aCandidate);
// When remote candidates are added before our ICE ctx is up and running
// (the transition to New is async through STS, so this is not impossible),
// we won't record them as trickle candidates. Is this what we want?
if(!mIceStartTime.IsNull()) {
TimeDuration timeDelta = TimeStamp::Now() - mIceStartTime;
if (mIceConnectionState == PCImplIceConnectionState::Failed) {
Telemetry::Accumulate(Telemetry::WEBRTC_ICE_LATE_TRICKLE_ARRIVAL_TIME,
@@ -2215,17 +2220,17 @@ PeerConnectionImpl::AddIceCandidate(cons
error = kInvalidCandidate;
break;
default:
error = kInternalError;
}
std::string errorString = mJsepSession->GetLastError();
- CSFLogError(logTag, "Failed to incorporate remote candidate into SDP:"
+ CSFLogError(LOGTAG, "Failed to incorporate remote candidate into SDP:"
" res = %u, candidate = %s, level = %u, error = %s",
static_cast<unsigned>(res),
aCandidate,
static_cast<unsigned>(aLevel),
errorString.c_str());
pco->OnAddIceCandidateError(error, ObString(errorString.c_str()), rv);
}
@@ -2258,17 +2263,17 @@ PeerConnectionImpl::SetPeerIdentity(cons
if (mPeerIdentity) {
if (!mPeerIdentity->Equals(aPeerIdentity)) {
return NS_ERROR_FAILURE;
}
} else {
mPeerIdentity = new PeerIdentity(aPeerIdentity);
nsIDocument* doc = GetWindow()->GetExtantDoc();
if (!doc) {
- CSFLogInfo(logTag, "Can't update principal on streams; document gone");
+ CSFLogInfo(LOGTAG, "Can't update principal on streams; document gone");
return NS_ERROR_FAILURE;
}
MediaStreamTrack* allTracks = nullptr;
mMedia->UpdateSinkIdentity_m(allTracks, doc->NodePrincipal(), mPeerIdentity);
}
return NS_OK;
}
@@ -2280,33 +2285,33 @@ PeerConnectionImpl::SetDtlsConnected(boo
// For this, as with mPrivacyRequested, once we've connected to a peer, we
// fixate on that peer. Dealing with multiple peers or connections is more
// than this run-down wreck of an object can handle.
// Besides, this is only used to say if we have been connected ever.
if (!mPrivacyRequested && !aPrivacyRequested && !mDtlsConnected) {
// now we know that privacy isn't needed for sure
nsIDocument* doc = GetWindow()->GetExtantDoc();
if (!doc) {
- CSFLogInfo(logTag, "Can't update principal on streams; document gone");
+ CSFLogInfo(LOGTAG, "Can't update principal on streams; document gone");
return NS_ERROR_FAILURE;
}
mMedia->UpdateRemoteStreamPrincipals_m(doc->NodePrincipal());
}
mDtlsConnected = true;
mPrivacyRequested = mPrivacyRequested || aPrivacyRequested;
return NS_OK;
}
void
PeerConnectionImpl::PrincipalChanged(MediaStreamTrack* aTrack) {
nsIDocument* doc = GetWindow()->GetExtantDoc();
if (doc) {
mMedia->UpdateSinkIdentity_m(aTrack, doc->NodePrincipal(), mPeerIdentity);
} else {
- CSFLogInfo(logTag, "Can't update sink principal; document gone");
+ CSFLogInfo(LOGTAG, "Can't update sink principal; document gone");
}
}
std::string
PeerConnectionImpl::GetTrackId(const MediaStreamTrack& aTrack)
{
nsString wideTrackId;
aTrack.GetId(wideTrackId);
@@ -2319,28 +2324,28 @@ PeerConnectionImpl::GetStreamId(const DO
nsString wideStreamId;
aStream.GetId(wideStreamId);
return NS_ConvertUTF16toUTF8(wideStreamId).get();
}
void
PeerConnectionImpl::OnMediaError(const std::string& aError)
{
- CSFLogError(logTag, "Encountered media error! %s", aError.c_str());
+ CSFLogError(LOGTAG, "Encountered media error! %s", aError.c_str());
// TODO: Let content know about this somehow.
}
nsresult
PeerConnectionImpl::AddTrack(MediaStreamTrack& aTrack,
const Sequence<OwningNonNull<DOMMediaStream>>& aStreams)
{
PC_AUTO_ENTER_API_CALL(true);
if (!aStreams.Length()) {
- CSFLogError(logTag, "%s: At least one stream arg required", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: At least one stream arg required", __FUNCTION__);
return NS_ERROR_FAILURE;
}
return AddTrack(aTrack, aStreams[0]);
}
nsresult
PeerConnectionImpl::AddTrack(MediaStreamTrack& aTrack,
@@ -2348,17 +2353,17 @@ PeerConnectionImpl::AddTrack(MediaStream
{
std::string streamId = PeerConnectionImpl::GetStreamId(aMediaStream);
std::string trackId = PeerConnectionImpl::GetTrackId(aTrack);
nsresult res = mMedia->AddTrack(aMediaStream, streamId, aTrack, trackId);
if (NS_FAILED(res)) {
return res;
}
- CSFLogDebug(logTag, "Added track (%s) to stream %s",
+ CSFLogDebug(LOGTAG, "Added track (%s) to stream %s",
trackId.c_str(), streamId.c_str());
aTrack.AddPrincipalChangeObserver(this);
PrincipalChanged(&aTrack);
if (aTrack.AsAudioStreamTrack()) {
res = AddTrackToJsepSession(SdpMediaSection::kAudio, streamId, trackId);
if (NS_FAILED(res)) {
@@ -2436,25 +2441,25 @@ PeerConnectionImpl::RemoveTrack(MediaStr
mDTMFStates.RemoveElementAt(i);
break;
}
}
RefPtr<LocalSourceStreamInfo> info = media()->GetLocalStreamByTrackId(trackId);
if (!info) {
- CSFLogError(logTag, "%s: Unknown stream", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Unknown stream", __FUNCTION__);
return NS_ERROR_INVALID_ARG;
}
nsresult rv =
mJsepSession->RemoveTrack(info->GetId(), trackId);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "%s: Unknown stream/track ids %s %s",
+ CSFLogError(LOGTAG, "%s: Unknown stream/track ids %s %s",
__FUNCTION__,
info->GetId().c_str(),
trackId.c_str());
return rv;
}
media()->RemoveLocalTrack(info->GetId(), trackId);
@@ -2593,100 +2598,100 @@ PeerConnectionImpl::ReplaceTrack(MediaSt
if (!pco) {
return NS_ERROR_UNEXPECTED;
}
JSErrorResult jrv;
if (&aThisTrack == &aWithTrack) {
pco->OnReplaceTrackSuccess(jrv);
if (jrv.Failed()) {
- CSFLogError(logTag, "Error firing replaceTrack success callback");
+ CSFLogError(LOGTAG, "Error firing replaceTrack success callback");
return NS_ERROR_UNEXPECTED;
}
return NS_OK;
}
nsString thisKind;
aThisTrack.GetKind(thisKind);
nsString withKind;
aWithTrack.GetKind(withKind);
if (thisKind != withKind) {
pco->OnReplaceTrackError(kIncompatibleMediaStreamTrack,
ObString(mJsepSession->GetLastError().c_str()),
jrv);
if (jrv.Failed()) {
- CSFLogError(logTag, "Error firing replaceTrack success callback");
+ CSFLogError(LOGTAG, "Error firing replaceTrack success callback");
return NS_ERROR_UNEXPECTED;
}
return NS_OK;
}
std::string origTrackId = PeerConnectionImpl::GetTrackId(aThisTrack);
std::string newTrackId = PeerConnectionImpl::GetTrackId(aWithTrack);
RefPtr<LocalSourceStreamInfo> info =
media()->GetLocalStreamByTrackId(origTrackId);
if (!info) {
- CSFLogError(logTag, "Could not find stream from trackId");
+ CSFLogError(LOGTAG, "Could not find stream from trackId");
return NS_ERROR_UNEXPECTED;
}
std::string origStreamId = info->GetId();
std::string newStreamId =
PeerConnectionImpl::GetStreamId(*aWithTrack.mOwningStream);
nsresult rv = mJsepSession->ReplaceTrack(origStreamId,
origTrackId,
newStreamId,
newTrackId);
if (NS_FAILED(rv)) {
pco->OnReplaceTrackError(kInvalidMediastreamTrack,
ObString(mJsepSession->GetLastError().c_str()),
jrv);
if (jrv.Failed()) {
- CSFLogError(logTag, "Error firing replaceTrack error callback");
+ CSFLogError(LOGTAG, "Error firing replaceTrack error callback");
return NS_ERROR_UNEXPECTED;
}
return NS_OK;
}
rv = media()->ReplaceTrack(origStreamId,
origTrackId,
aWithTrack,
newStreamId,
newTrackId);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "Unexpected error in ReplaceTrack: %d",
+ CSFLogError(LOGTAG, "Unexpected error in ReplaceTrack: %d",
static_cast<int>(rv));
pco->OnReplaceTrackError(kInvalidMediastreamTrack,
ObString("Failed to replace track"),
jrv);
if (jrv.Failed()) {
- CSFLogError(logTag, "Error firing replaceTrack error callback");
+ CSFLogError(LOGTAG, "Error firing replaceTrack error callback");
return NS_ERROR_UNEXPECTED;
}
return NS_OK;
}
aThisTrack.RemovePrincipalChangeObserver(this);
aWithTrack.AddPrincipalChangeObserver(this);
PrincipalChanged(&aWithTrack);
// We update the media pipelines here so we can apply different codec
// settings for different sources (e.g. screensharing as opposed to camera.)
// TODO: We should probably only do this if the source has in fact changed.
if (NS_FAILED((rv = mMedia->UpdateMediaPipelines(*mJsepSession)))) {
- CSFLogError(logTag, "Error Updating MediaPipelines");
+ CSFLogError(LOGTAG, "Error Updating MediaPipelines");
return rv;
}
pco->OnReplaceTrackSuccess(jrv);
if (jrv.Failed()) {
- CSFLogError(logTag, "Error firing replaceTrack success callback");
+ CSFLogError(LOGTAG, "Error firing replaceTrack success callback");
return NS_ERROR_UNEXPECTED;
}
return NS_OK;
}
NS_IMETHODIMP
PeerConnectionImpl::SetParameters(MediaStreamTrack& aTrack,
@@ -2713,17 +2718,17 @@ PeerConnectionImpl::SetParameters(MediaS
nsresult
PeerConnectionImpl::SetParameters(
MediaStreamTrack& aTrack,
const std::vector<JsepTrack::JsConstraints>& aConstraints)
{
std::string trackId = PeerConnectionImpl::GetTrackId(aTrack);
RefPtr<LocalSourceStreamInfo> info = media()->GetLocalStreamByTrackId(trackId);
if (!info) {
- CSFLogError(logTag, "%s: Unknown stream", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Unknown stream", __FUNCTION__);
return NS_ERROR_INVALID_ARG;
}
std::string streamId = info->GetId();
return mJsepSession->SetParameters(streamId, trackId, aConstraints);
}
NS_IMETHODIMP
@@ -2750,17 +2755,17 @@ PeerConnectionImpl::GetParameters(MediaS
nsresult
PeerConnectionImpl::GetParameters(
MediaStreamTrack& aTrack,
std::vector<JsepTrack::JsConstraints>* aOutConstraints)
{
std::string trackId = PeerConnectionImpl::GetTrackId(aTrack);
RefPtr<LocalSourceStreamInfo> info = media()->GetLocalStreamByTrackId(trackId);
if (!info) {
- CSFLogError(logTag, "%s: Unknown stream", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Unknown stream", __FUNCTION__);
return NS_ERROR_INVALID_ARG;
}
std::string streamId = info->GetId();
return mJsepSession->GetParameters(streamId, trackId, aOutConstraints);
}
nsresult
@@ -2771,17 +2776,17 @@ PeerConnectionImpl::CalculateFingerprint
size_t len = 0;
MOZ_ASSERT(fingerprint);
const UniqueCERTCertificate& cert = mCertificate->Certificate();
nsresult rv = DtlsIdentity::ComputeFingerprint(cert, algorithm,
&buf[0], sizeof(buf),
&len);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "Unable to calculate certificate fingerprint, rv=%u",
+ CSFLogError(LOGTAG, "Unable to calculate certificate fingerprint, rv=%u",
static_cast<unsigned>(rv));
return rv;
}
MOZ_ASSERT(len > 0 && len <= DtlsIdentity::HASH_ALGORITHM_MAX_LENGTH);
fingerprint->assign(buf, buf + len);
return NS_OK;
}
@@ -2907,30 +2912,30 @@ PeerConnectionImpl::IceGatheringState(PC
nsresult
PeerConnectionImpl::CheckApiState(bool assert_ice_ready) const
{
PC_AUTO_ENTER_API_CALL_NO_CHECK();
MOZ_ASSERT(mTrickle || !assert_ice_ready ||
(mIceGatheringState == PCImplIceGatheringState::Complete));
if (IsClosed()) {
- CSFLogError(logTag, "%s: called API while closed", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: called API while closed", __FUNCTION__);
return NS_ERROR_FAILURE;
}
if (!mMedia) {
- CSFLogError(logTag, "%s: called API with disposed mMedia", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: called API with disposed mMedia", __FUNCTION__);
return NS_ERROR_FAILURE;
}
return NS_OK;
}
NS_IMETHODIMP
PeerConnectionImpl::Close()
{
- CSFLogDebug(logTag, "%s: for %s", __FUNCTION__, mHandle.c_str());
+ CSFLogDebug(LOGTAG, "%s: for %s", __FUNCTION__, mHandle.c_str());
PC_AUTO_ENTER_API_CALL_NO_CHECK();
SetSignalingState_m(PCImplSignalingState::SignalingClosed);
return NS_OK;
}
bool
@@ -2938,17 +2943,17 @@ PeerConnectionImpl::PluginCrash(uint32_t
const nsAString& aPluginName)
{
// fire an event to the DOM window if this is "ours"
bool result = mMedia ? mMedia->AnyCodecHasPluginID(aPluginID) : false;
if (!result) {
return false;
}
- CSFLogError(logTag, "%s: Our plugin %llu crashed", __FUNCTION__, static_cast<unsigned long long>(aPluginID));
+ CSFLogError(LOGTAG, "%s: Our plugin %llu crashed", __FUNCTION__, static_cast<unsigned long long>(aPluginID));
nsCOMPtr<nsIDocument> doc = mWindow->GetExtantDoc();
if (!doc) {
NS_WARNING("Couldn't get document for PluginCrashed event!");
return true;
}
PluginCrashedEventInit init;
@@ -3034,23 +3039,23 @@ PeerConnectionImpl::CloseInt()
// We do this at the end of the call because we want to make sure we've waited
// for all trickle ICE candidates to come in; this can happen well after we've
// transitioned to connected. As a bonus, this allows us to detect race
// conditions where a stats dispatch happens right as the PC closes.
if (!mPrivateWindow) {
RecordLongtermICEStatistics();
}
RecordEndOfCallTelemetry();
- CSFLogInfo(logTag, "%s: Closing PeerConnectionImpl %s; "
+ CSFLogInfo(LOGTAG, "%s: Closing PeerConnectionImpl %s; "
"ending call", __FUNCTION__, mHandle.c_str());
if (mJsepSession) {
mJsepSession->Close();
}
if (mDataConnection) {
- CSFLogInfo(logTag, "%s: Destroying DataChannelConnection %p for %s",
+ CSFLogInfo(LOGTAG, "%s: Destroying DataChannelConnection %p for %s",
__FUNCTION__, (void *) mDataConnection.get(), mHandle.c_str());
mDataConnection->Destroy();
mDataConnection = nullptr; // it may not go away until the runnables are dead
}
ShutdownMedia();
// DataConnection will need to stay alive until all threads/runnables exit
@@ -3118,26 +3123,26 @@ PeerConnectionImpl::SetSignalingState_m(
// Either negotiation is done, or we've rolled back. In either case, we
// need to re-evaluate whether further negotiation is required.
mNegotiationNeeded = false;
// If we're rolling back a local offer, we might need to remove some
// transports, but nothing further needs to be done.
mMedia->ActivateOrRemoveTransports(*mJsepSession, mForceIceTcp);
if (!rollback) {
if (NS_FAILED(mMedia->UpdateMediaPipelines(*mJsepSession))) {
- CSFLogError(logTag, "Error Updating MediaPipelines");
+ CSFLogError(LOGTAG, "Error Updating MediaPipelines");
NS_ASSERTION(false, "Error Updating MediaPipelines in SetSignalingState_m()");
// XXX what now? Not much we can do but keep going, without major restructuring
}
InitializeDataChannel();
mMedia->StartIceChecks(*mJsepSession);
}
if (!mJsepSession->AllLocalTracksAreAssigned()) {
- CSFLogInfo(logTag, "Not all local tracks were assigned to an "
+ CSFLogInfo(LOGTAG, "Not all local tracks were assigned to an "
"m-section, either because the offerer did not offer"
" to receive enough tracks, or because tracks were "
"added after CreateOffer/Answer, but before "
"offer/answer completed. This requires "
"renegotiation.");
fireNegotiationNeeded = true;
}
@@ -3290,50 +3295,50 @@ toDomIceGatheringState(NrIceCtx::Gatheri
}
void
PeerConnectionImpl::CandidateReady(const std::string& candidate,
uint16_t level) {
PC_AUTO_ENTER_API_CALL_VOID_RETURN(false);
if (mForceIceTcp && std::string::npos != candidate.find(" UDP ")) {
- CSFLogError(logTag, "Blocking local UDP candidate: %s", candidate.c_str());
+ CSFLogError(LOGTAG, "Blocking local UDP candidate: %s", candidate.c_str());
return;
}
std::string mid;
bool skipped = false;
nsresult res = mJsepSession->AddLocalIceCandidate(candidate,
level,
&mid,
&skipped);
if (NS_FAILED(res)) {
std::string errorString = mJsepSession->GetLastError();
- CSFLogError(logTag, "Failed to incorporate local candidate into SDP:"
+ CSFLogError(LOGTAG, "Failed to incorporate local candidate into SDP:"
" res = %u, candidate = %s, level = %u, error = %s",
static_cast<unsigned>(res),
candidate.c_str(),
static_cast<unsigned>(level),
errorString.c_str());
return;
}
if (skipped) {
- CSFLogDebug(logTag, "Skipped adding local candidate %s (level %u) to SDP, "
+ CSFLogDebug(LOGTAG, "Skipped adding local candidate %s (level %u) to SDP, "
"this typically happens because the m-section is "
"bundled, which means it doesn't make sense for it to "
"have its own transport-related attributes.",
candidate.c_str(),
static_cast<unsigned>(level));
return;
}
- CSFLogDebug(logTag, "Passing local candidate to content: %s",
+ CSFLogDebug(LOGTAG, "Passing local candidate to content: %s",
candidate.c_str());
SendLocalIceCandidateToContent(level, mid, candidate);
}
static void
SendLocalIceCandidateToContentImpl(nsWeakPtr weakPCObserver,
uint16_t level,
const std::string& mid,
@@ -3381,17 +3386,17 @@ static bool isFailed(PCImplIceConnection
return state == PCImplIceConnectionState::Failed;
}
void PeerConnectionImpl::IceConnectionStateChange(
NrIceCtx* ctx,
NrIceCtx::ConnectionState state) {
PC_AUTO_ENTER_API_CALL_VOID_RETURN(false);
- CSFLogDebug(logTag, "%s", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
auto domState = toDomIceConnectionState(state);
if (domState == mIceConnectionState) {
// no work to be done since the states are the same.
// this can happen during ICE rollback situations.
return;
}
@@ -3468,17 +3473,17 @@ void PeerConnectionImpl::IceConnectionSt
void
PeerConnectionImpl::IceGatheringStateChange(
NrIceCtx* ctx,
NrIceCtx::GatheringState state)
{
PC_AUTO_ENTER_API_CALL_VOID_RETURN(false);
- CSFLogDebug(logTag, "%s", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
mIceGatheringState = toDomIceGatheringState(state);
// Would be nice if we had a means of converting one of these dom enums
// to a string that wasn't almost as much text as this switch statement...
switch (mIceGatheringState) {
case PCImplIceGatheringState::New:
STAMP_TIMECARD(mTimeCard, "Ice gathering state: new");
@@ -3511,57 +3516,57 @@ PeerConnectionImpl::IceGatheringStateCha
}
void
PeerConnectionImpl::UpdateDefaultCandidate(const std::string& defaultAddr,
uint16_t defaultPort,
const std::string& defaultRtcpAddr,
uint16_t defaultRtcpPort,
uint16_t level) {
- CSFLogDebug(logTag, "%s", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
mJsepSession->UpdateDefaultCandidate(defaultAddr,
defaultPort,
defaultRtcpAddr,
defaultRtcpPort,
level);
}
void
PeerConnectionImpl::EndOfLocalCandidates(uint16_t level) {
- CSFLogDebug(logTag, "%s", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
mJsepSession->EndOfLocalCandidates(level);
}
nsresult
PeerConnectionImpl::BuildStatsQuery_m(
mozilla::dom::MediaStreamTrack *aSelector,
RTCStatsQuery *query) {
if (!HasMedia()) {
return NS_ERROR_UNEXPECTED;
}
if (!mThread) {
- CSFLogError(logTag, "Could not build stats query, no MainThread");
+ CSFLogError(LOGTAG, "Could not build stats query, no MainThread");
return NS_ERROR_UNEXPECTED;
}
nsresult rv = GetTimeSinceEpoch(&(query->now));
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "Could not build stats query, could not get timestamp");
+ CSFLogError(LOGTAG, "Could not build stats query, could not get timestamp");
return rv;
}
// Note: mMedia->ice_ctx() is deleted on STS thread; so make sure we grab and hold
// a ref instead of making multiple calls. NrIceCtx uses threadsafe refcounting.
// NOTE: Do this after all other failure tests, to ensure we don't
// accidentally release the Ctx on Mainthread.
query->iceCtx = mMedia->ice_ctx();
if (!query->iceCtx) {
- CSFLogError(logTag, "Could not build stats query, no ice_ctx");
+ CSFLogError(LOGTAG, "Could not build stats query, no ice_ctx");
return NS_ERROR_UNEXPECTED;
}
// We do not use the pcHandle here, since that's risky to expose to content.
query->report = new RTCStatsReportInternalConstruct(
NS_ConvertASCIItoUTF16(mName.c_str()),
query->now);
@@ -3650,17 +3655,17 @@ static void RecordIceStats_s(
DOMHighResTimeStamp now,
RTCStatsReportInternal* report) {
NS_ConvertASCIItoUTF16 transportId(mediaStream.name().c_str());
std::vector<NrIceCandidatePair> candPairs;
nsresult res = mediaStream.GetCandidatePairs(&candPairs);
if (NS_FAILED(res)) {
- CSFLogError(logTag, "%s: Error getting candidate pairs", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Error getting candidate pairs", __FUNCTION__);
return;
}
for (auto& candPair : candPairs) {
NS_ConvertASCIItoUTF16 codeword(candPair.codeword.c_str());
NS_ConvertASCIItoUTF16 localCodeword(candPair.local.codeword.c_str());
NS_ConvertASCIItoUTF16 remoteCodeword(candPair.remote.codeword.c_str());
// Only expose candidate-pair statistics to chrome, until we've thought
@@ -3998,17 +4003,17 @@ void PeerConnectionImpl::DeliverStatsRep
pco->OnGetStatsSuccess(*query->report, rv);
} else {
pco->OnGetStatsError(kInternalError,
ObString("Failed to fetch statistics"),
rv);
}
if (rv.Failed()) {
- CSFLogError(logTag, "Error firing stats observer callback");
+ CSFLogError(LOGTAG, "Error firing stats observer callback");
}
}
}
}
void
PeerConnectionImpl::RecordLongtermICEStatistics() {
WebrtcGlobalInformation::StoreLongTermICEStatistics(*this);
@@ -4063,17 +4068,17 @@ PeerConnectionImpl::MaybeFireNegotiation
}
void
PeerConnectionImpl::IceStreamReady(NrIceMediaStream *aStream)
{
PC_AUTO_ENTER_API_CALL_NO_CHECK();
MOZ_ASSERT(aStream);
- CSFLogDebug(logTag, "%s: %s", __FUNCTION__, aStream->name().c_str());
+ CSFLogDebug(LOGTAG, "%s: %s", __FUNCTION__, aStream->name().c_str());
}
//Telemetry for when calls start
void
PeerConnectionImpl::startCallTelem() {
if (!mStartTime.IsNull()) {
return;
}
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h
@@ -20,16 +20,21 @@
#include "sigslot.h"
#include "nricectx.h"
#include "nricemediastream.h"
#include "nsComponentManagerUtils.h"
#include "nsPIDOMWindow.h"
#include "nsIUUIDGenerator.h"
#include "nsIThread.h"
+// Work around nasty macro in webrtc/voice_engine/voice_engine_defines.h
+#ifdef GetLastError
+#undef GetLastError
+#endif
+
#include "signaling/src/jsep/JsepSession.h"
#include "signaling/src/jsep/JsepSessionImpl.h"
#include "signaling/src/sdp/SdpMediaSection.h"
#include "mozilla/ErrorResult.h"
#include "mozilla/dom/PeerConnectionImplEnumsBinding.h"
#include "PrincipalChangeObserver.h"
#include "StreamTracks.h"
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
@@ -51,17 +51,21 @@
#include "MediaStreamError.h"
#include "MediaManager.h"
namespace mozilla {
using namespace dom;
-static const char* logTag = "PeerConnectionMedia";
+static const char* pcmLogTag = "PeerConnectionMedia";
+#ifdef LOGTAG
+#undef LOGTAG
+#endif
+#define LOGTAG pcmLogTag
//XXX(pkerr) What about bitrate settings? Going with the defaults for now.
RefPtr<WebRtcCallWrapper>
CreateCall()
{
return WebRtcCallWrapper::Create();
}
@@ -70,28 +74,28 @@ PeerConnectionMedia::ReplaceTrack(const
const std::string& aOldTrackId,
MediaStreamTrack& aNewTrack,
const std::string& aNewStreamId,
const std::string& aNewTrackId)
{
RefPtr<LocalSourceStreamInfo> oldInfo(GetLocalStreamById(aOldStreamId));
if (!oldInfo) {
- CSFLogError(logTag, "Failed to find stream id %s", aOldStreamId.c_str());
+ CSFLogError(LOGTAG, "Failed to find stream id %s", aOldStreamId.c_str());
return NS_ERROR_NOT_AVAILABLE;
}
nsresult rv = AddTrack(*aNewTrack.mOwningStream, aNewStreamId,
aNewTrack, aNewTrackId);
NS_ENSURE_SUCCESS(rv, rv);
RefPtr<LocalSourceStreamInfo> newInfo(GetLocalStreamById(aNewStreamId));
if (!newInfo) {
- CSFLogError(logTag, "Failed to add track id %s", aNewTrackId.c_str());
+ CSFLogError(LOGTAG, "Failed to add track id %s", aNewTrackId.c_str());
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
rv = newInfo->TakePipelineFrom(oldInfo, aOldTrackId, aNewTrack, aNewTrackId);
NS_ENSURE_SUCCESS(rv, rv);
return RemoveLocalTrack(aOldStreamId, aOldTrackId);
@@ -176,99 +180,99 @@ void SourceStreamInfo::DetachMedia_m()
mMediaStream = nullptr;
}
already_AddRefed<PeerConnectionImpl>
PeerConnectionImpl::Constructor(const dom::GlobalObject& aGlobal, ErrorResult& rv)
{
RefPtr<PeerConnectionImpl> pc = new PeerConnectionImpl(&aGlobal);
- CSFLogDebug(logTag, "Created PeerConnection: %p", pc.get());
+ CSFLogDebug(LOGTAG, "Created PeerConnection: %p", pc.get());
return pc.forget();
}
PeerConnectionImpl* PeerConnectionImpl::CreatePeerConnection()
{
PeerConnectionImpl *pc = new PeerConnectionImpl();
- CSFLogDebug(logTag, "Created PeerConnection: %p", pc);
+ CSFLogDebug(LOGTAG, "Created PeerConnection: %p", pc);
return pc;
}
NS_IMETHODIMP PeerConnectionMedia::ProtocolProxyQueryHandler::
OnProxyAvailable(nsICancelable *request,
nsIChannel *aChannel,
nsIProxyInfo *proxyinfo,
nsresult result) {
if (!pcm_->mProxyRequest) {
// PeerConnectionMedia is no longer waiting
return NS_OK;
}
- CSFLogInfo(logTag, "%s: Proxy Available: %d", __FUNCTION__, (int)result);
+ CSFLogInfo(LOGTAG, "%s: Proxy Available: %d", __FUNCTION__, (int)result);
if (NS_SUCCEEDED(result) && proxyinfo) {
SetProxyOnPcm(*proxyinfo);
}
pcm_->mProxyResolveCompleted = true;
pcm_->mProxyRequest = nullptr;
pcm_->FlushIceCtxOperationQueueIfReady();
return NS_OK;
}
void
PeerConnectionMedia::ProtocolProxyQueryHandler::SetProxyOnPcm(
nsIProxyInfo& proxyinfo)
{
- CSFLogInfo(logTag, "%s: Had proxyinfo", __FUNCTION__);
+ CSFLogInfo(LOGTAG, "%s: Had proxyinfo", __FUNCTION__);
nsresult rv;
nsCString httpsProxyHost;
int32_t httpsProxyPort;
rv = proxyinfo.GetHost(httpsProxyHost);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "%s: Failed to get proxy server host", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Failed to get proxy server host", __FUNCTION__);
return;
}
rv = proxyinfo.GetPort(&httpsProxyPort);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "%s: Failed to get proxy server port", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Failed to get proxy server port", __FUNCTION__);
return;
}
if (pcm_->mIceCtxHdlr.get()) {
assert(httpsProxyPort >= 0 && httpsProxyPort < (1 << 16));
// Note that this could check if PrivacyRequested() is set on the PC and
// remove "webrtc" from the ALPN list. But that would only work if the PC
// was constructed with a peerIdentity constraint, not when isolated
// streams are added. If we ever need to signal to the proxy that the
// media is isolated, then we would need to restructure this code.
pcm_->mProxyServer.reset(
new NrIceProxyServer(httpsProxyHost.get(),
static_cast<uint16_t>(httpsProxyPort),
"webrtc,c-webrtc"));
} else {
- CSFLogError(logTag, "%s: Failed to set proxy server (ICE ctx unavailable)",
+ CSFLogError(LOGTAG, "%s: Failed to set proxy server (ICE ctx unavailable)",
__FUNCTION__);
}
}
NS_IMPL_ISUPPORTS(PeerConnectionMedia::ProtocolProxyQueryHandler, nsIProtocolProxyCallback)
void
PeerConnectionMedia::StunAddrsHandler::OnStunAddrsAvailable(
const mozilla::net::NrIceStunAddrArray& addrs)
{
- CSFLogInfo(logTag, "%s: receiving (%d) stun addrs", __FUNCTION__,
+ CSFLogInfo(LOGTAG, "%s: receiving (%d) stun addrs", __FUNCTION__,
(int)addrs.Length());
if (pcm_) {
pcm_->mStunAddrs = addrs;
pcm_->mLocalAddrsCompleted = true;
pcm_->mStunAddrsRequest = nullptr;
pcm_->FlushIceCtxOperationQueueIfReady();
pcm_ = nullptr;
}
@@ -287,17 +291,17 @@ PeerConnectionMedia::PeerConnectionMedia
mIceRestartState(ICE_RESTART_NONE),
mLocalAddrsCompleted(false) {
}
void
PeerConnectionMedia::InitLocalAddrs()
{
if (XRE_IsContentProcess()) {
- CSFLogDebug(logTag, "%s: Get stun addresses via IPC",
+ CSFLogDebug(LOGTAG, "%s: Get stun addresses via IPC",
mParentHandle.c_str());
nsCOMPtr<nsIEventTarget> target = mParent->GetWindow()
? mParent->GetWindow()->EventTargetFor(TaskCategory::Other)
: nullptr;
// We're in the content process, so send a request over IPC for the
// stun address discovery.
@@ -323,53 +327,53 @@ PeerConnectionMedia::InitProxy()
mProxyResolveCompleted = true;
return NS_OK;
}
nsresult rv;
nsCOMPtr<nsIProtocolProxyService> pps =
do_GetService(NS_PROTOCOLPROXYSERVICE_CONTRACTID, &rv);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "%s: Failed to get proxy service: %d", __FUNCTION__, (int)rv);
+ CSFLogError(LOGTAG, "%s: Failed to get proxy service: %d", __FUNCTION__, (int)rv);
return NS_ERROR_FAILURE;
}
// We use the following URL to find the "default" proxy address for all HTTPS
// connections. We will only attempt one HTTP(S) CONNECT per peer connection.
// "example.com" is guaranteed to be unallocated and should return the best default.
nsCOMPtr<nsIURI> fakeHttpsLocation;
rv = NS_NewURI(getter_AddRefs(fakeHttpsLocation), "https://example.com");
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "%s: Failed to set URI: %d", __FUNCTION__, (int)rv);
+ CSFLogError(LOGTAG, "%s: Failed to set URI: %d", __FUNCTION__, (int)rv);
return NS_ERROR_FAILURE;
}
nsCOMPtr<nsIChannel> channel;
rv = NS_NewChannel(getter_AddRefs(channel),
fakeHttpsLocation,
nsContentUtils::GetSystemPrincipal(),
nsILoadInfo::SEC_ALLOW_CROSS_ORIGIN_DATA_IS_NULL,
nsIContentPolicy::TYPE_OTHER);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "%s: Failed to get channel from URI: %d",
+ CSFLogError(LOGTAG, "%s: Failed to get channel from URI: %d",
__FUNCTION__, (int)rv);
return NS_ERROR_FAILURE;
}
nsCOMPtr<nsIEventTarget> target = mParent->GetWindow()
? mParent->GetWindow()->EventTargetFor(TaskCategory::Network)
: nullptr;
RefPtr<ProtocolProxyQueryHandler> handler = new ProtocolProxyQueryHandler(this);
rv = pps->AsyncResolve(channel,
nsIProtocolProxyService::RESOLVE_PREFER_HTTPS_PROXY |
nsIProtocolProxyService::RESOLVE_ALWAYS_TUNNEL,
handler, target, getter_AddRefs(mProxyRequest));
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "%s: Failed to resolve protocol proxy: %d", __FUNCTION__, (int)rv);
+ CSFLogError(LOGTAG, "%s: Failed to resolve protocol proxy: %d", __FUNCTION__, (int)rv);
return NS_ERROR_FAILURE;
}
return NS_OK;
}
nsresult PeerConnectionMedia::Init(const std::vector<NrIceStunServer>& stun_servers,
const std::vector<NrIceTurnServer>& turn_servers,
@@ -386,41 +390,41 @@ nsresult PeerConnectionMedia::Init(const
// TODO(ekr@rtfm.com): need some way to set not offerer later
// Looks like a bug in the NrIceCtx API.
mIceCtxHdlr = NrIceCtxHandler::Create("PC:" + mParentName,
mParent->GetAllowIceLoopback(),
ice_tcp,
mParent->GetAllowIceLinkLocal(),
policy);
if(!mIceCtxHdlr) {
- CSFLogError(logTag, "%s: Failed to create Ice Context", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Failed to create Ice Context", __FUNCTION__);
return NS_ERROR_FAILURE;
}
if (NS_FAILED(rv = mIceCtxHdlr->ctx()->SetStunServers(stun_servers))) {
- CSFLogError(logTag, "%s: Failed to set stun servers", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Failed to set stun servers", __FUNCTION__);
return rv;
}
// Give us a way to globally turn off TURN support
bool disabled = Preferences::GetBool("media.peerconnection.turn.disable", false);
if (!disabled) {
if (NS_FAILED(rv = mIceCtxHdlr->ctx()->SetTurnServers(turn_servers))) {
- CSFLogError(logTag, "%s: Failed to set turn servers", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Failed to set turn servers", __FUNCTION__);
return rv;
}
} else if (!turn_servers.empty()) {
- CSFLogError(logTag, "%s: Setting turn servers disabled", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Setting turn servers disabled", __FUNCTION__);
}
if (NS_FAILED(rv = mDNSResolver->Init())) {
- CSFLogError(logTag, "%s: Failed to initialize dns resolver", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Failed to initialize dns resolver", __FUNCTION__);
return rv;
}
if (NS_FAILED(rv =
mIceCtxHdlr->ctx()->SetResolver(mDNSResolver->AllocateResolver()))) {
- CSFLogError(logTag, "%s: Failed to get dns resolver", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Failed to get dns resolver", __FUNCTION__);
return rv;
}
ConnectSignals(mIceCtxHdlr->ctx().get());
// This webrtc:Call instance will be shared by audio and video media conduits.
mCall = CreateCall();
return NS_OK;
@@ -444,29 +448,29 @@ PeerConnectionMedia::EnsureTransports(co
GatherIfReady();
}
void
PeerConnectionMedia::EnsureTransport_s(size_t aLevel, size_t aComponentCount)
{
RefPtr<NrIceMediaStream> stream(mIceCtxHdlr->ctx()->GetStream(aLevel));
if (!stream) {
- CSFLogDebug(logTag, "%s: Creating ICE media stream=%u components=%u",
+ CSFLogDebug(LOGTAG, "%s: Creating ICE media stream=%u components=%u",
mParentHandle.c_str(),
static_cast<unsigned>(aLevel),
static_cast<unsigned>(aComponentCount));
std::ostringstream os;
os << mParentName << " aLevel=" << aLevel;
RefPtr<NrIceMediaStream> stream =
mIceCtxHdlr->CreateStream(os.str(),
aComponentCount);
if (!stream) {
- CSFLogError(logTag, "Failed to create ICE stream.");
+ CSFLogError(LOGTAG, "Failed to create ICE stream.");
return;
}
stream->SetLevel(aLevel);
stream->SignalReady.connect(this, &PeerConnectionMedia::IceStreamReady_s);
stream->SignalCandidate.connect(this,
&PeerConnectionMedia::OnCandidateFound_s);
mIceCtxHdlr->ctx()->SetStream(aLevel, stream);
@@ -482,22 +486,22 @@ PeerConnectionMedia::ActivateOrRemoveTra
RefPtr<JsepTransport> transport = transports[i];
std::string ufrag;
std::string pwd;
std::vector<std::string> candidates;
if (transport->mComponents) {
MOZ_ASSERT(transport->mIce);
- CSFLogDebug(logTag, "Transport %u is active", static_cast<unsigned>(i));
+ CSFLogDebug(LOGTAG, "Transport %u is active", static_cast<unsigned>(i));
ufrag = transport->mIce->GetUfrag();
pwd = transport->mIce->GetPassword();
candidates = transport->mIce->GetCandidates();
} else {
- CSFLogDebug(logTag, "Transport %u is disabled", static_cast<unsigned>(i));
+ CSFLogDebug(LOGTAG, "Transport %u is disabled", static_cast<unsigned>(i));
// Make sure the MediaPipelineFactory doesn't try to use these.
RemoveTransportFlow(i, false);
RemoveTransportFlow(i, true);
}
if (forceIceTcp) {
candidates.erase(std::remove_if(candidates.begin(),
candidates.end(),
@@ -532,46 +536,46 @@ void
PeerConnectionMedia::ActivateOrRemoveTransport_s(
size_t aMLine,
size_t aComponentCount,
const std::string& aUfrag,
const std::string& aPassword,
const std::vector<std::string>& aCandidateList) {
if (!aComponentCount) {
- CSFLogDebug(logTag, "%s: Removing ICE media stream=%u",
+ CSFLogDebug(LOGTAG, "%s: Removing ICE media stream=%u",
mParentHandle.c_str(),
static_cast<unsigned>(aMLine));
mIceCtxHdlr->ctx()->SetStream(aMLine, nullptr);
return;
}
RefPtr<NrIceMediaStream> stream(mIceCtxHdlr->ctx()->GetStream(aMLine));
if (!stream) {
MOZ_ASSERT(false);
return;
}
if (!stream->HasParsedAttributes()) {
- CSFLogDebug(logTag, "%s: Activating ICE media stream=%u components=%u",
+ CSFLogDebug(LOGTAG, "%s: Activating ICE media stream=%u components=%u",
mParentHandle.c_str(),
static_cast<unsigned>(aMLine),
static_cast<unsigned>(aComponentCount));
std::vector<std::string> attrs;
attrs.reserve(aCandidateList.size() + 2 /* ufrag + pwd */);
for (const auto& candidate : aCandidateList) {
attrs.push_back("candidate:" + candidate);
}
attrs.push_back("ice-ufrag:" + aUfrag);
attrs.push_back("ice-pwd:" + aPassword);
nsresult rv = stream->ParseAttributes(attrs);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "Couldn't parse ICE attributes, rv=%u",
+ CSFLogError(LOGTAG, "Couldn't parse ICE attributes, rv=%u",
static_cast<unsigned>(rv));
}
for (size_t c = aComponentCount; c < stream->components(); ++c) {
// components are 1-indexed
stream->DisableComponent(c + 1);
}
}
@@ -633,33 +637,33 @@ PeerConnectionMedia::StartIceChecks(cons
void
PeerConnectionMedia::StartIceChecks_s(
bool aIsControlling,
bool aIsOfferer,
bool aIsIceLite,
const std::vector<std::string>& aIceOptionsList) {
- CSFLogDebug(logTag, "Starting ICE Checking");
+ CSFLogDebug(LOGTAG, "Starting ICE Checking");
std::vector<std::string> attributes;
if (aIsIceLite) {
attributes.push_back("ice-lite");
}
if (!aIceOptionsList.empty()) {
attributes.push_back("ice-options:");
for (const auto& option : aIceOptionsList) {
attributes.back() += option + ' ';
}
}
nsresult rv = mIceCtxHdlr->ctx()->ParseGlobalAttributes(attributes);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "%s: couldn't parse global parameters", __FUNCTION__ );
+ CSFLogError(LOGTAG, "%s: couldn't parse global parameters", __FUNCTION__ );
}
mIceCtxHdlr->ctx()->SetControlling(aIsControlling ?
NrIceCtx::ICE_CONTROLLING :
NrIceCtx::ICE_CONTROLLED);
mIceCtxHdlr->ctx()->StartChecks(aIsOfferer);
}
@@ -870,24 +874,24 @@ PeerConnectionMedia::AddIceCandidate(con
}
void
PeerConnectionMedia::AddIceCandidate_s(const std::string& aCandidate,
const std::string& aMid,
uint32_t aMLine) {
RefPtr<NrIceMediaStream> stream(mIceCtxHdlr->ctx()->GetStream(aMLine));
if (!stream) {
- CSFLogError(logTag, "No ICE stream for candidate at level %u: %s",
+ CSFLogError(LOGTAG, "No ICE stream for candidate at level %u: %s",
static_cast<unsigned>(aMLine), aCandidate.c_str());
return;
}
nsresult rv = stream->ParseTrickleCandidate(aCandidate);
if (NS_FAILED(rv)) {
- CSFLogError(logTag, "Couldn't process ICE candidate at level %u",
+ CSFLogError(LOGTAG, "Couldn't process ICE candidate at level %u",
static_cast<unsigned>(aMLine));
return;
}
}
void
PeerConnectionMedia::UpdateNetworkState(bool online) {
RUN_ON_THREAD(GetSTSThread(),
@@ -980,17 +984,17 @@ PeerConnectionMedia::EnsureIceGathering_
nsresult
PeerConnectionMedia::AddTrack(DOMMediaStream& aMediaStream,
const std::string& streamId,
MediaStreamTrack& aTrack,
const std::string& trackId)
{
ASSERT_ON_THREAD(mMainThread);
- CSFLogDebug(logTag, "%s: MediaStream: %p", __FUNCTION__, &aMediaStream);
+ CSFLogDebug(LOGTAG, "%s: MediaStream: %p", __FUNCTION__, &aMediaStream);
RefPtr<LocalSourceStreamInfo> localSourceStream =
GetLocalStreamById(streamId);
if (!localSourceStream) {
localSourceStream = new LocalSourceStreamInfo(&aMediaStream, this, streamId);
mLocalSourceStreams.AppendElement(localSourceStream);
}
@@ -1000,17 +1004,17 @@ PeerConnectionMedia::AddTrack(DOMMediaSt
}
nsresult
PeerConnectionMedia::RemoveLocalTrack(const std::string& streamId,
const std::string& trackId)
{
ASSERT_ON_THREAD(mMainThread);
- CSFLogDebug(logTag, "%s: stream: %s track: %s", __FUNCTION__,
+ CSFLogDebug(LOGTAG, "%s: stream: %s track: %s", __FUNCTION__,
streamId.c_str(), trackId.c_str());
RefPtr<LocalSourceStreamInfo> localSourceStream =
GetLocalStreamById(streamId);
if (!localSourceStream) {
return NS_ERROR_ILLEGAL_VALUE;
}
@@ -1022,17 +1026,17 @@ PeerConnectionMedia::RemoveLocalTrack(co
}
nsresult
PeerConnectionMedia::RemoveRemoteTrack(const std::string& streamId,
const std::string& trackId)
{
ASSERT_ON_THREAD(mMainThread);
- CSFLogDebug(logTag, "%s: stream: %s track: %s", __FUNCTION__,
+ CSFLogDebug(LOGTAG, "%s: stream: %s track: %s", __FUNCTION__,
streamId.c_str(), trackId.c_str());
RefPtr<RemoteSourceStreamInfo> remoteSourceStream =
GetRemoteStreamById(streamId);
if (!remoteSourceStream) {
return NS_ERROR_ILLEGAL_VALUE;
}
@@ -1043,17 +1047,17 @@ PeerConnectionMedia::RemoveRemoteTrack(c
return NS_OK;
}
void
PeerConnectionMedia::SelfDestruct()
{
ASSERT_ON_THREAD(mMainThread);
- CSFLogDebug(logTag, "%s: ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s: ", __FUNCTION__);
// Shut down the media
for (uint32_t i=0; i < mLocalSourceStreams.Length(); ++i) {
mLocalSourceStreams[i]->DetachMedia_m();
}
for (uint32_t i=0; i < mRemoteSourceStreams.Length(); ++i) {
mRemoteSourceStreams[i]->DetachMedia_m();
@@ -1069,23 +1073,23 @@ PeerConnectionMedia::SelfDestruct()
mProxyRequest = nullptr;
}
// Shutdown the transport (async)
RUN_ON_THREAD(mSTSThread, WrapRunnable(
this, &PeerConnectionMedia::ShutdownMediaTransport_s),
NS_DISPATCH_NORMAL);
- CSFLogDebug(logTag, "%s: Media shut down", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s: Media shut down", __FUNCTION__);
}
void
PeerConnectionMedia::SelfDestruct_m()
{
- CSFLogDebug(logTag, "%s: ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s: ", __FUNCTION__);
ASSERT_ON_THREAD(mMainThread);
mLocalSourceStreams.Clear();
mRemoteSourceStreams.Clear();
mMainThread = nullptr;
@@ -1093,17 +1097,17 @@ PeerConnectionMedia::SelfDestruct_m()
this->Release();
}
void
PeerConnectionMedia::ShutdownMediaTransport_s()
{
ASSERT_ON_THREAD(mSTSThread);
- CSFLogDebug(logTag, "%s: ", __FUNCTION__);
+ CSFLogDebug(LOGTAG, "%s: ", __FUNCTION__);
// Here we access m{Local|Remote}SourceStreams off the main thread.
// That's OK because by here PeerConnectionImpl has forgotten about us,
// so there is no chance of getting a call in here from outside.
// The dispatches from SelfDestruct() and to SelfDestruct_m() provide
// memory barriers that protect us from badness.
for (uint32_t i=0; i < mLocalSourceStreams.Length(); ++i) {
mLocalSourceStreams[i]->DetachTransport_s();
@@ -1114,17 +1118,17 @@ PeerConnectionMedia::ShutdownMediaTransp
}
disconnect_all();
mTransportFlows.clear();
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
NrIceStats stats = mIceCtxHdlr->Destroy();
- CSFLogDebug(logTag, "Ice Telemetry: stun (retransmits: %d)"
+ CSFLogDebug(LOGTAG, "Ice Telemetry: stun (retransmits: %d)"
" turn (401s: %d 403s: %d 438s: %d)",
stats.stun_retransmits, stats.turn_401s, stats.turn_403s,
stats.turn_438s);
Telemetry::ScalarAdd(Telemetry::ScalarID::WEBRTC_NICER_STUN_RETRANSMITS,
stats.stun_retransmits);
Telemetry::ScalarAdd(Telemetry::ScalarID::WEBRTC_NICER_TURN_401S,
stats.turn_401s);
@@ -1281,17 +1285,17 @@ PeerConnectionMedia::IceConnectionStateC
void
PeerConnectionMedia::OnCandidateFound_s(NrIceMediaStream *aStream,
const std::string &aCandidateLine)
{
ASSERT_ON_THREAD(mSTSThread);
MOZ_ASSERT(aStream);
MOZ_RELEASE_ASSERT(mIceCtxHdlr);
- CSFLogDebug(logTag, "%s: %s", __FUNCTION__, aStream->name().c_str());
+ CSFLogDebug(LOGTAG, "%s: %s", __FUNCTION__, aStream->name().c_str());
NrIceCandidate candidate;
NrIceCandidate rtcpCandidate;
GetDefaultCandidates(*aStream, &candidate, &rtcpCandidate);
// ShutdownMediaTransport_s has not run yet because it unhooks this function
// from its signal, which means that SelfDestruct_m has not been dispatched
// yet either, so this PCMedia will still be around when this dispatch reaches
@@ -1335,17 +1339,17 @@ PeerConnectionMedia::GetDefaultCandidate
// Optional; component won't exist if doing rtcp-mux
if (NS_FAILED(aStream.GetDefaultCandidate(2, aRtcpCandidate))) {
aRtcpCandidate->cand_addr.host.clear();
aRtcpCandidate->cand_addr.port = 0;
}
if (NS_FAILED(res)) {
aCandidate->cand_addr.host.clear();
aCandidate->cand_addr.port = 0;
- CSFLogError(logTag, "%s: GetDefaultCandidates failed for level %u, "
+ CSFLogError(LOGTAG, "%s: GetDefaultCandidates failed for level %u, "
"res=%u",
__FUNCTION__,
static_cast<unsigned>(aStream.GetLevel()),
static_cast<unsigned>(res));
}
}
void
@@ -1364,17 +1368,17 @@ PeerConnectionMedia::IceConnectionStateC
SignalIceConnectionStateChange(ctx, state);
}
void
PeerConnectionMedia::IceStreamReady_s(NrIceMediaStream *aStream)
{
MOZ_ASSERT(aStream);
- CSFLogDebug(logTag, "%s: %s", __FUNCTION__, aStream->name().c_str());
+ CSFLogDebug(LOGTAG, "%s: %s", __FUNCTION__, aStream->name().c_str());
}
void
PeerConnectionMedia::OnCandidateFound_m(const std::string& aCandidateLine,
const std::string& aDefaultAddr,
uint16_t aDefaultPort,
const std::string& aDefaultRtcpAddr,
uint16_t aDefaultRtcpPort,
@@ -1468,27 +1472,27 @@ PeerConnectionMedia::ConnectDtlsListener
nsresult
LocalSourceStreamInfo::TakePipelineFrom(RefPtr<LocalSourceStreamInfo>& info,
const std::string& oldTrackId,
MediaStreamTrack& aNewTrack,
const std::string& newTrackId)
{
if (mPipelines.count(newTrackId)) {
- CSFLogError(logTag, "%s: Pipeline already exists for %s/%s",
+ CSFLogError(LOGTAG, "%s: Pipeline already exists for %s/%s",
__FUNCTION__, mId.c_str(), newTrackId.c_str());
return NS_ERROR_INVALID_ARG;
}
RefPtr<MediaPipeline> pipeline(info->ForgetPipelineByTrackId_m(oldTrackId));
if (!pipeline) {
// Replacetrack can potentially happen in the middle of offer/answer, before
// the pipeline has been created.
- CSFLogInfo(logTag, "%s: Replacing track before the pipeline has been "
+ CSFLogInfo(LOGTAG, "%s: Replacing track before the pipeline has been "
"created, nothing to do.", __FUNCTION__);
return NS_OK;
}
nsresult rv =
static_cast<MediaPipelineTransmit*>(pipeline.get())->ReplaceTrack(aNewTrack);
NS_ENSURE_SUCCESS(rv, rv);
@@ -1606,17 +1610,17 @@ SourceStreamInfo::AnyCodecHasPluginID(ui
nsresult
SourceStreamInfo::StorePipeline(
const std::string& trackId,
const RefPtr<mozilla::MediaPipeline>& aPipeline)
{
MOZ_ASSERT(mPipelines.find(trackId) == mPipelines.end());
if (mPipelines.find(trackId) != mPipelines.end()) {
- CSFLogError(logTag, "%s: Storing duplicate track", __FUNCTION__);
+ CSFLogError(LOGTAG, "%s: Storing duplicate track", __FUNCTION__);
return NS_ERROR_FAILURE;
}
mPipelines[trackId] = aPipeline;
return NS_OK;
}
void
@@ -1655,17 +1659,17 @@ RemoteSourceStreamInfo::SyncPipeline(
static_cast<WebrtcAudioConduit*>(aPipeline->IsVideo() ?
i->second->Conduit() :
aPipeline->Conduit());
WebrtcVideoConduit *video_conduit =
static_cast<WebrtcVideoConduit*>(aPipeline->IsVideo() ?
aPipeline->Conduit() :
i->second->Conduit());
video_conduit->SyncTo(audio_conduit);
- CSFLogDebug(logTag, "Syncing %p to %p, %s to %s",
+ CSFLogDebug(LOGTAG, "Syncing %p to %p, %s to %s",
video_conduit, audio_conduit,
i->first.c_str(), aPipeline->trackid().c_str());
}
}
}
void
RemoteSourceStreamInfo::StartReceiving()
@@ -1679,17 +1683,17 @@ RemoteSourceStreamInfo::StartReceiving()
SourceMediaStream* source = GetMediaStream()->GetInputStream()->AsSourceStream();
source->SetPullEnabled(true);
// AdvanceKnownTracksTicksTime(HEAT_DEATH_OF_UNIVERSE) means that in
// theory per the API, we can't add more tracks before that
// time. However, the impl actually allows it, and it avoids a whole
// bunch of locking that would be required (and potential blocking)
// if we used smaller values and updated them on each NotifyPull.
source->AdvanceKnownTracksTime(STREAM_TIME_MAX);
- CSFLogDebug(logTag, "Finished adding tracks to MediaStream %p", source);
+ CSFLogDebug(LOGTAG, "Finished adding tracks to MediaStream %p", source);
}
RefPtr<MediaPipeline> SourceStreamInfo::GetPipelineByTrackId_m(
const std::string& trackId) {
ASSERT_ON_THREAD(mParent->GetMainThread());
// Refuse to hand out references if we're tearing down.
// (Since teardown involves a dispatch to and from STS before MediaPipelines
--- a/media/webrtc/signaling/src/peerconnection/WebrtcGlobalInformation.cpp
+++ b/media/webrtc/signaling/src/peerconnection/WebrtcGlobalInformation.cpp
@@ -31,17 +31,21 @@
#include "mozilla/RefPtr.h"
#include "rlogconnector.h"
#include "runnable_utils.h"
#include "PeerConnectionCtx.h"
#include "PeerConnectionImpl.h"
#include "webrtc/system_wrappers/include/trace.h"
-static const char* logTag = "WebrtcGlobalInformation";
+static const char* wgiLogTag = "WebrtcGlobalInformation";
+#ifdef LOGTAG
+#undef LOGTAG
+#endif
+#define LOGTAG wgiLogTag
namespace mozilla {
namespace dom {
typedef Vector<nsAutoPtr<RTCStatsQuery>> RTCStatsQueries;
typedef nsTArray<RTCStatsReportInternal> Stats;
template<class Request, typename Callback,
@@ -101,17 +105,17 @@ public:
}
void Complete()
{
ErrorResult rv;
mCallback.get()->Call(mResult, rv);
if (rv.Failed()) {
- CSFLogError(logTag, "Error firing stats observer callback");
+ CSFLogError(LOGTAG, "Error firing stats observer callback");
}
}
protected:
// The mutex is used to protect two related operations involving the sRequest map
// and the sLastRequestId. For the map, it prevents more than one thread from
// adding or deleting map entries at the same time. For id generation,
// it creates an atomic allocation and increment.
@@ -261,17 +265,17 @@ OnStatsReport_m(WebrtcGlobalChild* aThis
}
// This is the last stats report to be collected. (Must be the gecko process).
MOZ_ASSERT(XRE_IsParentProcess());
StatsRequest* request = StatsRequest::Get(aRequestId);
if (!request) {
- CSFLogError(logTag, "Bad RequestId");
+ CSFLogError(LOGTAG, "Bad RequestId");
return;
}
for (auto&& query : *aQueryList) {
request->mResult.mReports.Value().AppendElement(*(query->report), fallible);
}
// Reports saved for closed/destroyed PeerConnections
@@ -332,17 +336,17 @@ static void OnGetLogging_m(WebrtcGlobalC
}
// This is the last log to be collected. (Must be the gecko process).
MOZ_ASSERT(XRE_IsParentProcess());
LogRequest* request = LogRequest::Get(aRequestId);
if (!request) {
- CSFLogError(logTag, "Bad RequestId");
+ CSFLogError(LOGTAG, "Bad RequestId");
return;
}
if (!aLogList->empty()) {
for (auto& line : *aLogList) {
request->mResult.AppendElement(NS_ConvertUTF8toUTF16(line.c_str()),
fallible);
}
@@ -711,17 +715,17 @@ WebrtcGlobalParent::RecvGetStatsResult(c
nsTArray<RTCStatsReportInternal>&& Stats)
{
MOZ_ASSERT(NS_IsMainThread());
nsresult rv = NS_OK;
StatsRequest* request = StatsRequest::Get(aRequestId);
if (!request) {
- CSFLogError(logTag, "Bad RequestId");
+ CSFLogError(LOGTAG, "Bad RequestId");
return IPC_FAIL_NO_REASON(this);
}
for (auto&& s : Stats) {
request->mResult.mReports.Value().AppendElement(s, fallible);
}
auto next = request->GetNextParent();
@@ -755,17 +759,17 @@ mozilla::ipc::IPCResult
WebrtcGlobalParent::RecvGetLogResult(const int& aRequestId,
const WebrtcGlobalLog& aLog)
{
MOZ_ASSERT(NS_IsMainThread());
LogRequest* request = LogRequest::Get(aRequestId);
if (!request) {
- CSFLogError(logTag, "Bad RequestId");
+ CSFLogError(LOGTAG, "Bad RequestId");
return IPC_FAIL_NO_REASON(this);
}
request->mResult.AppendElements(aLog, fallible);
auto next = request->GetNextParent();
if (next) {
// There are more content instances to query.
if (!next->SendGetLogRequest(request->mRequestId, request->mPattern)) {
@@ -774,17 +778,17 @@ WebrtcGlobalParent::RecvGetLogResult(con
return IPC_OK();
}
// Content queries complete, run chrome instance query if applicable
nsresult rv = RunLogQuery(request->mPattern, nullptr, aRequestId);
if (NS_FAILED(rv)) {
//Unable to get gecko process log. Return what has been collected.
- CSFLogError(logTag, "Unable to extract chrome process log");
+ CSFLogError(LOGTAG, "Unable to extract chrome process log");
request->Complete();
LogRequest::Delete(aRequestId);
}
return IPC_OK();
}
WebrtcGlobalParent*
--- a/media/webrtc/signaling/src/peerconnection/moz.build
+++ b/media/webrtc/signaling/src/peerconnection/moz.build
@@ -15,18 +15,17 @@ LOCAL_INCLUDES += [
'/media/webrtc/signaling/src/common',
'/media/webrtc/signaling/src/common/browser_logging',
'/media/webrtc/signaling/src/common/time_profiling',
'/media/webrtc/signaling/src/media-conduit',
'/media/webrtc/signaling/src/mediapipeline',
'/media/webrtc/trunk',
]
-# Multiple uses of logTag
-SOURCES += [
+UNIFIED_SOURCES += [
'MediaPipelineFactory.cpp',
'MediaStreamList.cpp',
'PeerConnectionCtx.cpp',
'PeerConnectionImpl.cpp',
'PeerConnectionMedia.cpp',
'WebrtcGlobalInformation.cpp',
]