Bug 1355048: P5. Remove RenderVideoFrame virtual method. r?jesup
It isn't used.
Amend documentation
MozReview-Commit-ID: Bg8rqzL034R
old mode 100644
new mode 100755
--- a/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
+++ b/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
@@ -125,39 +125,31 @@ public:
* @param height: current height of the video @ decoder
* @param number_of_streams: number of participating video streams
*/
virtual void FrameSizeChange(unsigned int width,
unsigned int height,
unsigned int number_of_streams) = 0;
/**
- * Callback Function reporting decoded I420 frame for processing.
- * @param buffer: pointer to decoded video frame
+ * Callback Function reporting decoded frame for processing.
+ * @param buffer: reference to decoded video frame
* @param buffer_size: size of the decoded frame
* @param time_stamp: Decoder timestamp, typically 90KHz as per RTP
* @render_time: Wall-clock time at the decoder for synchronization
* purposes in milliseconds
* NOTE: If decoded video frame is passed through buffer , it is the
* responsibility of the concrete implementations of this class to own copy
* of the frame if needed for time longer than scope of this callback.
* Such implementations should be quick in processing the frames and return
* immediately.
*/
virtual void RenderVideoFrame(const webrtc::VideoFrameBuffer& buffer,
uint32_t time_stamp,
int64_t render_time) = 0;
- virtual void RenderVideoFrame(const uint8_t* buffer_y,
- uint32_t y_stride,
- const uint8_t* buffer_u,
- uint32_t u_stride,
- const uint8_t* buffer_v,
- uint32_t v_stride,
- uint32_t time_stamp,
- int64_t render_time) = 0;
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VideoRenderer)
};
/**
* Generic Interface for representing Audio/Video Session
* MediaSession conduit is identified by 2 main components
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -2274,31 +2274,16 @@ public:
void RenderVideoFrame(const webrtc::VideoFrameBuffer& buffer,
uint32_t time_stamp,
int64_t render_time) override
{
pipeline_->listener_->RenderVideoFrame(buffer, time_stamp, render_time);
}
- void RenderVideoFrame(const uint8_t* buffer_y,
- uint32_t y_stride,
- const uint8_t* buffer_u,
- uint32_t u_stride,
- const uint8_t* buffer_v,
- uint32_t v_stride,
- uint32_t time_stamp,
- int64_t render_time) override
- {
- pipeline_->listener_->RenderVideoFrame(buffer_y, y_stride,
- buffer_u, u_stride,
- buffer_v, v_stride,
- time_stamp, render_time);
- }
-
private:
MediaPipelineReceiveVideo *pipeline_; // Raw pointer to avoid cycles
};
MediaPipelineReceiveVideo::MediaPipelineReceiveVideo(
const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,