Bug 1355048: P3. Remove ImageHandle. r?jesup
This object isn't used and we can use the NativeHandle interface instead to pass this information.
MozReview-Commit-ID: ApMeQfJtZNJ
old mode 100755
new mode 100644
--- a/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
+++ b/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
@@ -101,31 +101,16 @@ public:
* @param len : Length of the RTCP packet
* @result : NS_OK on success, NS_ERROR_FAILURE otherwise
*/
virtual nsresult SendRtcpPacket(const uint8_t* data, size_t len) = 0;
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TransportInterface)
};
/**
- * This class wraps image object for VideoRenderer::RenderVideoFrame()
- * callback implementation to use for rendering.
- */
-class ImageHandle
-{
-public:
- explicit ImageHandle(layers::Image* image) : mImage(image) {}
-
- const RefPtr<layers::Image>& GetImage() const { return mImage; }
-
-private:
- RefPtr<layers::Image> mImage;
-};
-
-/**
* 1. Abstract renderer for video data
* 2. This class acts as abstract interface between the video-engine and
* video-engine agnostic renderer implementation.
* 3. Concrete implementation of this interface is responsible for
* processing and/or rendering the obtained raw video frame to appropriate
* output , say, <video>
*/
class VideoRenderer
@@ -146,39 +131,33 @@ public:
/**
* Callback Function reporting decoded I420 frame for processing.
* @param buffer: pointer to decoded video frame
* @param buffer_size: size of the decoded frame
* @param time_stamp: Decoder timestamp, typically 90KHz as per RTP
* @render_time: Wall-clock time at the decoder for synchronization
* purposes in milliseconds
- * @handle: opaque handle for image object of decoded video frame.
* NOTE: If decoded video frame is passed through buffer , it is the
* responsibility of the concrete implementations of this class to own copy
* of the frame if needed for time longer than scope of this callback.
* Such implementations should be quick in processing the frames and return
* immediately.
- * On the other hand, if decoded video frame is passed through handle, the
- * implementations should keep a reference to the (ref-counted) image object
- * inside until it's no longer needed.
*/
virtual void RenderVideoFrame(const webrtc::VideoFrameBuffer& buffer,
uint32_t time_stamp,
- int64_t render_time,
- const ImageHandle& handle) = 0;
+ int64_t render_time) = 0;
virtual void RenderVideoFrame(const uint8_t* buffer_y,
uint32_t y_stride,
const uint8_t* buffer_u,
uint32_t u_stride,
const uint8_t* buffer_v,
uint32_t v_stride,
uint32_t time_stamp,
- int64_t render_time,
- const ImageHandle& handle) = 0;
+ int64_t render_time) = 0;
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VideoRenderer)
};
/**
* Generic Interface for representing Audio/Video Session
* MediaSession conduit is identified by 2 main components
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -2313,21 +2313,19 @@ WebrtcVideoConduit::OnFrame(const webrtc
const_cast<unsigned char*>(video_frame.video_frame_buffer()->DataY()),
reinterpret_cast<unsigned char*>(×tamp),
sizeof(timestamp), 0, 0);
if (ok) {
VideoLatencyUpdate(now - timestamp);
}
}
- const ImageHandle img_handle(nullptr);
mRenderer->RenderVideoFrame(*video_frame.video_frame_buffer(),
video_frame.timestamp(),
- video_frame.render_time_ms(),
- img_handle);
+ video_frame.render_time_ms());
}
// Compare lists of codecs
bool
WebrtcVideoConduit::CodecsDifferent(const nsTArray<UniquePtr<VideoCodecConfig>>& a,
const nsTArray<UniquePtr<VideoCodecConfig>>& b)
{
// return a != b;
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -2192,37 +2192,35 @@ public:
ReentrantMonitorAutoEnter enter(monitor_);
width_ = width;
height_ = height;
}
void RenderVideoFrame(const webrtc::VideoFrameBuffer& buffer,
uint32_t time_stamp,
- int64_t render_time,
- const RefPtr<layers::Image>& video_image)
+ int64_t render_time)
{
RenderVideoFrame(buffer.DataY(),
buffer.StrideY(),
buffer.DataU(),
buffer.StrideU(),
buffer.DataV(),
buffer.StrideV(),
- time_stamp, render_time, video_image);
+ time_stamp, render_time);
}
void RenderVideoFrame(const uint8_t* buffer_y,
uint32_t y_stride,
const uint8_t* buffer_u,
uint32_t u_stride,
const uint8_t* buffer_v,
uint32_t v_stride,
uint32_t time_stamp,
- int64_t render_time,
- const RefPtr<layers::Image>& video_image)
+ int64_t render_time)
{
ReentrantMonitorAutoEnter enter(monitor_);
if (buffer_y) {
// Create a video frame using |buffer|.
RefPtr<PlanarYCbCrImage> yuvImage = image_container_->CreatePlanarYCbCrImage();
PlanarYCbCrData yuvData;
@@ -2272,39 +2270,34 @@ public:
unsigned int height,
unsigned int number_of_streams) override
{
pipeline_->listener_->FrameSizeChange(width, height, number_of_streams);
}
void RenderVideoFrame(const webrtc::VideoFrameBuffer& buffer,
uint32_t time_stamp,
- int64_t render_time,
- const ImageHandle& handle) override
+ int64_t render_time) override
{
- pipeline_->listener_->RenderVideoFrame(buffer,
- time_stamp, render_time,
- handle.GetImage());
+ pipeline_->listener_->RenderVideoFrame(buffer, time_stamp, render_time);
}
void RenderVideoFrame(const uint8_t* buffer_y,
uint32_t y_stride,
const uint8_t* buffer_u,
uint32_t u_stride,
const uint8_t* buffer_v,
uint32_t v_stride,
uint32_t time_stamp,
- int64_t render_time,
- const ImageHandle& handle) override
+ int64_t render_time) override
{
pipeline_->listener_->RenderVideoFrame(buffer_y, y_stride,
buffer_u, u_stride,
buffer_v, v_stride,
- time_stamp, render_time,
- handle.GetImage());
+ time_stamp, render_time);
}
private:
MediaPipelineReceiveVideo *pipeline_; // Raw pointer to avoid cycles
};
MediaPipelineReceiveVideo::MediaPipelineReceiveVideo(