Bug 1404997 - P11. More constness fixes. r?pehrsons
MozReview-Commit-ID: 5qgO43V9R9P
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -330,28 +330,26 @@ protected:
0);
}
return;
}
ImageFormat format = aImage->GetFormat();
if (format == ImageFormat::PLANAR_YCBCR) {
// Cast away constness b/c some of the accessors are non-const
- PlanarYCbCrImage* yuv = const_cast<PlanarYCbCrImage*>(
- static_cast<const PlanarYCbCrImage*>(aImage));
-
- const PlanarYCbCrData* data = yuv->GetData();
+ const PlanarYCbCrData* data =
+ static_cast<const PlanarYCbCrImage*>(aImage)->GetData();
if (data) {
uint8_t* y = data->mYChannel;
uint8_t* cb = data->mCbChannel;
uint8_t* cr = data->mCrChannel;
int32_t yStride = data->mYStride;
int32_t cbCrStride = data->mCbCrStride;
- uint32_t width = yuv->GetSize().width;
- uint32_t height = yuv->GetSize().height;
+ uint32_t width = aImage->GetSize().width;
+ uint32_t height = aImage->GetSize().height;
rtc::Callback0<void> callback_unused;
rtc::scoped_refptr<webrtc::WrappedI420Buffer> video_frame_buffer(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
width,
height,
y,
yStride,
@@ -589,17 +587,17 @@ protected:
// reference We don't need to worry about runnables still trying to access
// the conduit, since the runnables hold a ref to AudioProxyThread.
NS_ReleaseOnMainThreadSystemGroup("AudioProxyThread::mConduit",
mConduit.forget());
MOZ_COUNT_DTOR(AudioProxyThread);
}
RefPtr<AudioSessionConduit> mConduit;
- RefPtr<AutoTaskQueue> mTaskQueue;
+ const RefPtr<AutoTaskQueue> mTaskQueue;
// Only accessed on mTaskQueue
UniquePtr<AudioPacketizer<int16_t, int16_t>> mPacketizer;
// A buffer to hold a single packet of audio.
int16_t mPacket[AUDIO_SAMPLE_BUFFER_MAX_BYTES / sizeof(int16_t)];
};
static char kDTLSExporterLabel[] = "EXTRACTOR-dtls_srtp";
@@ -620,17 +618,16 @@ MediaPipeline::MediaPipeline(const std::
// for safety
, mRtpPacketsSent(0)
, mRtcpPacketsSent(0)
, mRtpPacketsReceived(0)
, mRtcpPacketsReceived(0)
, mRtpBytesSent(0)
, mRtpBytesReceived(0)
, mPc(aPc)
- , mDescription()
, mRtpParser(webrtc::RtpHeaderParser::Create())
, mPacketDumper(new PacketDumper(mPc))
{
if (mDirection == DirectionType::RECEIVE) {
mConduit->SetReceiverTransport(mTransport);
} else {
mConduit->SetTransmitterTransport(mTransport);
}
@@ -984,17 +981,17 @@ MediaPipeline::UpdateRtcpMuxState(Transp
mRtcp.mSendSrtp = aInfo.mSendSrtp;
mRtcp.mRecvSrtp = aInfo.mRecvSrtp;
}
}
}
}
nsresult
-MediaPipeline::SendPacket(TransportFlow* aFlow, const void* aData, int aLen)
+MediaPipeline::SendPacket(const TransportFlow* aFlow, const void* aData, int aLen)
{
ASSERT_ON_THREAD(mStsThread);
// Note that we bypass the DTLS layer here
TransportLayerDtls* dtls =
static_cast<TransportLayerDtls*>(aFlow->GetLayer(TransportLayerDtls::ID()));
MOZ_ASSERT(dtls);
@@ -1268,17 +1265,17 @@ MediaPipeline::RtcpPacketReceived(Transp
MOZ_ASSERT(mRtcp.mRecvSrtp); // This should never happen
(void)mConduit->ReceivedRTCPPacket(innerData.get(),
outLen); // Ignore error codes
}
bool
-MediaPipeline::IsRtp(const unsigned char* aData, size_t aLen)
+MediaPipeline::IsRtp(const unsigned char* aData, size_t aLen) const
{
if (aLen < 2)
return false;
// Check if this is a RTCP packet. Logic based on the types listed in
// media/webrtc/trunk/src/modules/rtp_rtcp/source/rtp_utility.cc
// Anything outside this range is RTP.
@@ -1518,35 +1515,35 @@ MediaPipelineTransmit::MediaPipelineTran
bool aIsVideo,
dom::MediaStreamTrack* aDomTrack,
RefPtr<MediaSessionConduit> aConduit)
: MediaPipeline(aPc,
DirectionType::TRANSMIT,
aMainThread,
aStsThread,
aConduit)
+ , mIsVideo(aIsVideo)
, mListener(new PipelineListener(aConduit))
- , mIsVideo(aIsVideo)
+ , mFeeder(aIsVideo ? MakeAndAddRef<VideoFrameFeeder>(mListener)
+ : nullptr) // For video we send frames to an
+ // async VideoFrameConverter that
+ // calls back to a VideoFrameFeeder
+ // that feeds I420 frames to
+ // VideoConduit.
, mDomTrack(aDomTrack)
, mTransmitting(false)
{
SetDescription();
if (!IsVideo()) {
mAudioProcessing = MakeAndAddRef<AudioProxyThread>(
static_cast<AudioSessionConduit*>(aConduit.get()));
mListener->SetAudioProxy(mAudioProcessing);
} else { // Video
- // For video we send frames to an async VideoFrameConverter that calls
- // back to a VideoFrameFeeder that feeds I420 frames to VideoConduit.
-
- mFeeder = MakeAndAddRef<VideoFrameFeeder>(mListener);
-
mConverter = MakeAndAddRef<VideoFrameConverter>();
mConverter->AddListener(mFeeder);
-
mListener->SetVideoFrameConverter(mConverter);
}
}
MediaPipelineTransmit::~MediaPipelineTransmit()
{
if (mFeeder) {
mFeeder->Detach();
@@ -1645,17 +1642,17 @@ MediaPipelineTransmit::Start()
bool
MediaPipelineTransmit::IsVideo() const
{
return mIsVideo;
}
void
-MediaPipelineTransmit::UpdateSinkIdentity_m(MediaStreamTrack* aTrack,
+MediaPipelineTransmit::UpdateSinkIdentity_m(const MediaStreamTrack* aTrack,
nsIPrincipal* aPrincipal,
const PeerIdentity* aSinkIdentity)
{
ASSERT_ON_THREAD(mMainThread);
if (aTrack != nullptr && aTrack != mDomTrack) {
// If a track is specified, then it might not be for this pipeline,
// since we receive notifications for all tracks on the PC.
@@ -2017,27 +2014,24 @@ MediaPipelineTransmit::PipelineListener:
}
// TODO(ekr@rtfm.com): For now assume that we have only one
// track type and it's destined for us
// See bug 784517
if (aMedia.GetType() == MediaSegment::AUDIO) {
MOZ_RELEASE_ASSERT(aRate > 0);
- AudioSegment* audio =
- const_cast<AudioSegment*>(static_cast<const AudioSegment*>(&aMedia));
- for (AudioSegment::ChunkIterator iter(*audio); !iter.IsEnded();
+ const AudioSegment* audio = static_cast<const AudioSegment*>(&aMedia);
+ for (AudioSegment::ConstChunkIterator iter(*audio); !iter.IsEnded();
iter.Next()) {
mAudioProcessing->QueueAudioChunk(aRate, *iter, mEnabled);
}
} else {
- VideoSegment* video =
- const_cast<VideoSegment*>(static_cast<const VideoSegment*>(&aMedia));
- VideoSegment::ChunkIterator iter(*video);
- for (VideoSegment::ChunkIterator iter(*video); !iter.IsEnded();
+ const VideoSegment* video = static_cast<const VideoSegment*>(&aMedia);
+ for (VideoSegment::ConstChunkIterator iter(*video); !iter.IsEnded();
iter.Next()) {
mConverter->QueueVideoChunk(*iter, !mEnabled);
}
}
}
void
MediaPipelineTransmit::PipelineListener::SetCurrentFrames(
@@ -2065,17 +2059,17 @@ public:
explicit GenericReceiveCallback(GenericReceiveListener* aListener)
: mListener(aListener)
{
}
void TrackAdded(TrackTicks aTime);
private:
- RefPtr<GenericReceiveListener> mListener;
+ const RefPtr<GenericReceiveListener> mListener;
};
class GenericReceiveListener : public MediaStreamListener
{
public:
explicit GenericReceiveListener(dom::MediaStreamTrack* aTrack)
: mTrack(aTrack)
, mPlayedTicks(0)
@@ -2165,17 +2159,17 @@ public:
{
}
void Run() override
{
mListener->SetPrincipalHandle_msg(mPrincipalHandle);
}
- RefPtr<GenericReceiveListener> mListener;
+ const RefPtr<GenericReceiveListener> mListener;
PrincipalHandle mPrincipalHandle;
};
mTrack->GraphImpl()->AppendMessage(
MakeUnique<Message>(this, aPrincipalHandle));
}
// Must be called on the MediaStreamGraph thread
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.h
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.h
@@ -150,17 +150,17 @@ public:
// given expiration time.
bool Expired(const DOMHighResTimeStamp aExpiry) const
{
return mTimestamp < aExpiry;
}
private:
static const double constexpr EXPIRY_TIME_MILLISECONDS = 10 * 1000;
- uint32_t mCsrc;
+ const uint32_t mCsrc;
DOMHighResTimeStamp mTimestamp;
};
// Gets the gathered contributing source stats for the last expiration period.
// @param aId the stream id to use for populating inboundRtpStreamId field
// @param aArr the array to append the stats objects to
void GetContributingSourceStats(
const nsString& aInboundStreamId,
@@ -198,17 +198,17 @@ public:
virtual nsresult SendRtpPacket(const uint8_t* aData, size_t aLen);
virtual nsresult SendRtcpPacket(const uint8_t* aData, size_t aLen);
private:
nsresult SendRtpRtcpPacket_s(nsAutoPtr<DataBuffer> aData, bool aIsRtp);
// Creates a cycle, which we break with Detach
RefPtr<MediaPipeline> mPipeline;
- nsCOMPtr<nsIEventTarget> mStsThread;
+ const nsCOMPtr<nsIEventTarget> mStsThread;
};
protected:
virtual ~MediaPipeline();
nsresult AttachTransport_s();
friend class PipelineTransport;
struct TransportInfo
@@ -245,43 +245,45 @@ protected:
TransportInfo* GetTransportInfo_s(TransportFlow* aFlow);
void IncrementRtpPacketsSent(int aBytes);
void IncrementRtcpPacketsSent();
void IncrementRtpPacketsReceived(int aBytes);
virtual void OnRtpPacketReceived() {};
void IncrementRtcpPacketsReceived();
- virtual nsresult SendPacket(TransportFlow* aFlow, const void* aData, int aLen);
+ virtual nsresult SendPacket(const TransportFlow* aFlow,
+ const void* aData,
+ int aLen);
// Process slots on transports
- void StateChange(TransportFlow* flow, TransportLayer::State);
+ void StateChange(TransportFlow* aFlow, TransportLayer::State);
void RtpPacketReceived(TransportLayer* aLayer,
const unsigned char* aData,
size_t aLen);
void RtcpPacketReceived(TransportLayer* aLayer,
const unsigned char* aData,
size_t aLen);
void PacketReceived(TransportLayer* aLayer,
const unsigned char* aData,
size_t aLen);
- DirectionType mDirection;
+ const DirectionType mDirection;
size_t mLevel;
RefPtr<MediaSessionConduit> mConduit; // Our conduit. Written on the main
// thread. Read on STS thread.
// The transport objects. Read/written on STS thread.
TransportInfo mRtp;
TransportInfo mRtcp;
// Pointers to the threads we need. Initialized at creation
// and used all over the place.
- nsCOMPtr<nsIEventTarget> mMainThread;
- nsCOMPtr<nsIEventTarget> mStsThread;
+ const nsCOMPtr<nsIEventTarget> mMainThread;
+ const nsCOMPtr<nsIEventTarget> mStsThread;
// Created in c'tor. Referenced by the conduit.
RefPtr<PipelineTransport> mTransport;
// Only safe to access from STS thread.
// Build into TransportInfo?
int32_t mRtpPacketsSent;
int32_t mRtcpPacketsSent;
@@ -289,30 +291,30 @@ protected:
int32_t mRtcpPacketsReceived;
int64_t mRtpBytesSent;
int64_t mRtpBytesReceived;
// Only safe to access from STS thread.
std::map<uint32_t, RtpCSRCStats> mCsrcStats;
// Written in c'tor. Read on STS thread.
- std::string mPc;
+ const std::string mPc;
std::string mDescription;
// Written in c'tor, all following accesses are on the STS thread.
nsAutoPtr<MediaPipelineFilter> mFilter;
- nsAutoPtr<webrtc::RtpHeaderParser> mRtpParser;
+ const nsAutoPtr<webrtc::RtpHeaderParser> mRtpParser;
nsAutoPtr<PacketDumper> mPacketDumper;
private:
// Gets the current time as a DOMHighResTimeStamp
static DOMHighResTimeStamp GetNow();
- bool IsRtp(const unsigned char* aData, size_t aLen);
+ bool IsRtp(const unsigned char* aData, size_t aLen) const;
// Must be called on the STS thread. Must be called after DetachMedia().
void DetachTransport_s();
};
class ConduitDeleteEvent : public Runnable
{
public:
explicit ConduitDeleteEvent(already_AddRefed<MediaSessionConduit> aConduit)
@@ -320,17 +322,17 @@ public:
, mConduit(aConduit)
{
}
/* we exist solely to proxy release of the conduit */
NS_IMETHOD Run() override { return NS_OK; }
private:
- RefPtr<MediaSessionConduit> mConduit;
+ const RefPtr<MediaSessionConduit> mConduit;
};
// A specialization of pipeline for reading from an input device
// and transmitting to the network.
class MediaPipelineTransmit : public MediaPipeline
{
public:
// Set aRtcpTransport to nullptr to use rtcp-mux
@@ -345,17 +347,17 @@ public:
void Stop() override;
// written and used from MainThread
bool IsVideo() const override;
// When the principal of the domtrack changes, it calls through to here
// so that we can determine whether to enable track transmission.
// `track` has to be null or equal `mDomTrack` for us to apply the update.
- virtual void UpdateSinkIdentity_m(dom::MediaStreamTrack* aTrack,
+ virtual void UpdateSinkIdentity_m(const dom::MediaStreamTrack* aTrack,
nsIPrincipal* aPrincipal,
const PeerIdentity* aSinkIdentity);
// Called on the main thread.
void DetachMedia() override;
// Override MediaPipeline::TransportReady.
nsresult TransportReady_s(TransportInfo& aInfo) override;
@@ -371,21 +373,21 @@ public:
class VideoFrameFeeder;
protected:
~MediaPipelineTransmit();
void SetDescription();
private:
- RefPtr<PipelineListener> mListener;
+ const bool mIsVideo;
+ const RefPtr<PipelineListener> mListener;
+ const RefPtr<VideoFrameFeeder> mFeeder;
RefPtr<AudioProxyThread> mAudioProcessing;
- RefPtr<VideoFrameFeeder> mFeeder;
RefPtr<VideoFrameConverter> mConverter;
- bool mIsVideo;
RefPtr<dom::MediaStreamTrack> mDomTrack;
bool mTransmitting;
};
// A specialization of pipeline for reading from the network and
// rendering media.
class MediaPipelineReceive : public MediaPipeline
{
@@ -459,14 +461,14 @@ public:
private:
class PipelineRenderer;
friend class PipelineRenderer;
// Separate class to allow ref counting
class PipelineListener;
- RefPtr<PipelineRenderer> mRenderer;
+ const RefPtr<PipelineRenderer> mRenderer;
RefPtr<PipelineListener> mListener;
};
} // namespace mozilla
#endif
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
@@ -1498,17 +1498,17 @@ PeerConnectionMedia::UpdateRemoteStreamP
ASSERT_ON_THREAD(mMainThread);
for (RefPtr<TransceiverImpl>& transceiver : mTransceivers) {
transceiver->UpdatePrincipal(aPrincipal);
}
}
void
-PeerConnectionMedia::UpdateSinkIdentity_m(MediaStreamTrack* aTrack,
+PeerConnectionMedia::UpdateSinkIdentity_m(const MediaStreamTrack* aTrack,
nsIPrincipal* aPrincipal,
const PeerIdentity* aSinkIdentity)
{
ASSERT_ON_THREAD(mMainThread);
for (RefPtr<TransceiverImpl>& transceiver : mTransceivers) {
transceiver->UpdateSinkIdentity(aTrack, aPrincipal, aSinkIdentity);
}
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.h
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.h
@@ -135,17 +135,17 @@ class PeerConnectionMedia : public sigsl
nsresult AddRIDFilter(dom::MediaStreamTrack& aRecvTrack,
const nsAString& aRid);
// In cases where the peer isn't yet identified, we disable the pipeline (not
// the stream, that would potentially affect others), so that it sends
// black/silence. Once the peer is identified, re-enable those streams.
// aTrack will be set if this update came from a principal change on aTrack.
// TODO: Move to PeerConnectionImpl
- void UpdateSinkIdentity_m(dom::MediaStreamTrack* aTrack,
+ void UpdateSinkIdentity_m(const dom::MediaStreamTrack* aTrack,
nsIPrincipal* aPrincipal,
const PeerIdentity* aSinkIdentity);
// this determines if any track is peerIdentity constrained
bool AnyLocalTrackHasPeerIdentity() const;
// When we finally learn who is on the other end, we need to change the ownership
// on streams
void UpdateRemoteStreamPrincipals_m(nsIPrincipal* aPrincipal);
--- a/media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp
+++ b/media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp
@@ -115,17 +115,17 @@ TransceiverImpl::InitVideo()
mPCHandle,
mMainThread.get(),
mStsThread.get(),
static_cast<VideoSessionConduit*>(mConduit.get()),
mReceiveTrack);
}
nsresult
-TransceiverImpl::UpdateSinkIdentity(dom::MediaStreamTrack* aTrack,
+TransceiverImpl::UpdateSinkIdentity(const dom::MediaStreamTrack* aTrack,
nsIPrincipal* aPrincipal,
const PeerIdentity* aSinkIdentity)
{
if (mJsepTransceiver->IsStopped()) {
return NS_OK;
}
mTransmitPipeline->UpdateSinkIdentity_m(aTrack, aPrincipal, aSinkIdentity);
--- a/media/webrtc/signaling/src/peerconnection/TransceiverImpl.h
+++ b/media/webrtc/signaling/src/peerconnection/TransceiverImpl.h
@@ -60,17 +60,17 @@ public:
bool IsValid() const
{
return !!mConduit;
}
nsresult UpdateSendTrack(dom::MediaStreamTrack* aSendTrack);
- nsresult UpdateSinkIdentity(dom::MediaStreamTrack* aTrack,
+ nsresult UpdateSinkIdentity(const dom::MediaStreamTrack* aTrack,
nsIPrincipal* aPrincipal,
const PeerIdentity* aSinkIdentity);
nsresult UpdateTransport(PeerConnectionMedia& aTransportManager);
nsresult UpdateConduit();
nsresult UpdatePrincipal(nsIPrincipal* aPrincipal);