Bug 1248335: avoid using SvcInternal structure entirely, as system-vpx may not have it r?glandium,pkerr draft
authorRandell Jesup <rjesup@jesup.org>
Thu, 18 Feb 2016 15:24:24 -0500
changeset 331928 cbeeeb2b2b8b68613d6b1fe5a0455e80af4326b2
parent 331913 81228410543e5249d7b0305b04007451a3df6f20
child 514502 5988691d578fd71828b4f25990911e82a49068e0
push id11118
push userrjesup@wgate.com
push dateThu, 18 Feb 2016 20:24:47 +0000
reviewersglandium, pkerr
bugs1248335
milestone47.0a1
Bug 1248335: avoid using SvcInternal structure entirely, as system-vpx may not have it r?glandium,pkerr MozReview-Commit-ID: 146FTSGQ8Ck
media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
--- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -148,27 +148,32 @@ bool VP9EncoderImpl::SetSvcRates() {
                            codec_.spatialLayers[i].target_bitrate_bps /
                            total_bitrate_bps);
     }
 #endif
   } else {
     float rate_ratio[VPX_MAX_LAYERS] = {0};
     float total = 0;
 
+#ifdef LIBVPX_SVC
     for (i = 0; i < num_spatial_layers_; ++i) {
       if (svc_internal_.svc_params.scaling_factor_num[i] <= 0 ||
           svc_internal_.svc_params.scaling_factor_den[i] <= 0) {
         LOG(LS_ERROR) << "Scaling factors not specified!";
         return false;
       }
       rate_ratio[i] =
           static_cast<float>(svc_internal_.svc_params.scaling_factor_num[i]) /
           svc_internal_.svc_params.scaling_factor_den[i];
       total += rate_ratio[i];
     }
+#else
+    rate_ratio[0] = 1;
+    total = 1;
+#endif
 
     for (i = 0; i < num_spatial_layers_; ++i) {
       config_->ss_target_bitrate[i] = static_cast<unsigned int>(
           config_->rc_target_bitrate * rate_ratio[i] / total);
       if (num_temporal_layers_ == 1) {
         config_->layer_target_bitrate[i] = config_->ss_target_bitrate[i];
       } else if (num_temporal_layers_ == 2) {
         config_->layer_target_bitrate[i * num_temporal_layers_] =
@@ -392,59 +397,62 @@ int VP9EncoderImpl::NumberOfThreads(int 
     // 1 thread less than VGA.
     return 1;
   }
 }
 
 int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
   config_->ss_number_layers = num_spatial_layers_;
 
+#ifdef LIBVPX_SVC
   if (ExplicitlyConfiguredSpatialLayers()) {
-#ifdef LIBVPX_SVC
     for (int i = 0; i < num_spatial_layers_; ++i) {
       const auto& layer = codec_.spatialLayers[i];
       svc_internal_.svc_params.max_quantizers[i] = config_->rc_max_quantizer;
       svc_internal_.svc_params.min_quantizers[i] = config_->rc_min_quantizer;
       svc_internal_.svc_params.scaling_factor_num[i] = layer.scaling_factor_num;
       svc_internal_.svc_params.scaling_factor_den[i] = layer.scaling_factor_den;
     }
-#endif
   } else {
     int scaling_factor_num = 256;
     for (int i = num_spatial_layers_ - 1; i >= 0; --i) {
       svc_internal_.svc_params.max_quantizers[i] = config_->rc_max_quantizer;
       svc_internal_.svc_params.min_quantizers[i] = config_->rc_min_quantizer;
       // 1:2 scaling in each dimension.
       svc_internal_.svc_params.scaling_factor_num[i] = scaling_factor_num;
       svc_internal_.svc_params.scaling_factor_den[i] = 256;
       if (codec_.mode != kScreensharing)
         scaling_factor_num /= 2;
     }
   }
+#endif
 
   if (!SetSvcRates()) {
     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
   }
 
   if (vpx_codec_enc_init(encoder_, vpx_codec_vp9_cx(), config_, 0)) {
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   }
   vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_);
   vpx_codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT,
                     rc_max_intra_target_);
   vpx_codec_control(encoder_, VP9E_SET_AQ_MODE,
                     inst->codecSpecific.VP9.adaptiveQpMode ? 3 : 0);
 
+#ifdef LIBVPX_SVC
   vpx_codec_control(
       encoder_, VP9E_SET_SVC,
       (num_temporal_layers_ > 1 || num_spatial_layers_ > 1) ? 1 : 0);
   if (num_temporal_layers_ > 1 || num_spatial_layers_ > 1) {
     vpx_codec_control(encoder_, VP9E_SET_SVC_PARAMETERS,
                       &svc_internal_.svc_params);
   }
+#endif
+
   // Register callback for getting each spatial layer.
   vpx_codec_priv_output_cx_pkt_cb_pair_t cbp = {
       VP9EncoderImpl::EncoderOutputCodedPacketCallback, (void*)(this)};
   vpx_codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK, (void*)(&cbp));
 
   // Control function to set the number of column tiles in encoding a frame, in
   // log2 unit: e.g., 0 = 1 tile column, 1 = 2 tile columns, 2 = 4 tile columns.
   // The number tile columns will be capped by the encoder based on image size
@@ -640,30 +648,32 @@ void VP9EncoderImpl::PopulateCodecSpecif
       vp9_info->p_diff[i] = p_diff_[layer_id.spatial_layer_id][i];
     }
   } else {
     vp9_info->gof_idx =
         static_cast<uint8_t>(frames_since_kf_ % gof_.num_frames_in_gof);
     vp9_info->temporal_up_switch = gof_.temporal_up_switch[vp9_info->gof_idx];
   }
 
+#ifdef LIBVPX_SVC
   if (vp9_info->ss_data_available) {
     vp9_info->spatial_layer_resolution_present = true;
     for (size_t i = 0; i < vp9_info->num_spatial_layers; ++i) {
       vp9_info->width[i] = codec_.width *
                            svc_internal_.svc_params.scaling_factor_num[i] /
                            svc_internal_.svc_params.scaling_factor_den[i];
       vp9_info->height[i] = codec_.height *
                             svc_internal_.svc_params.scaling_factor_num[i] /
                             svc_internal_.svc_params.scaling_factor_den[i];
     }
     if (!vp9_info->flexible_mode) {
       vp9_info->gof.CopyGofInfoVP9(gof_);
     }
   }
+#endif
 }
 
 int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
   encoded_image_._length = 0;
   encoded_image_._frameType = kDeltaFrame;
   RTPFragmentationHeader frag_info;
   // Note: no data partitioning in VP9, so 1 partition only. We keep this
   // fragmentation data for now, until VP9 packetizer is implemented.
--- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
+++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
@@ -10,17 +10,19 @@
  */
 
 #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_IMPL_H_
 #define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_IMPL_H_
 
 #include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
 #include "webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h"
 
+#ifdef LIBVPX_SVC
 #include "vpx/svc_context.h"
+#endif
 #include "vpx/vpx_decoder.h"
 #include "vpx/vpx_encoder.h"
 
 namespace webrtc {
 
 class ScreenshareLayersVP9;
 
 class VP9EncoderImpl : public VP9Encoder {
@@ -104,17 +106,19 @@ class VP9EncoderImpl : public VP9Encoder
   bool inited_;
   int64_t timestamp_;
   uint16_t picture_id_;
   int cpu_speed_;
   uint32_t rc_max_intra_target_;
   vpx_codec_ctx_t* encoder_;
   vpx_codec_enc_cfg_t* config_;
   vpx_image_t* raw_;
+#ifdef LIBVPX_SVC
   SvcInternal_t svc_internal_;
+#endif
   const I420VideoFrame* input_image_;
   GofInfoVP9 gof_;       // Contains each frame's temporal information for
                          // non-flexible mode.
   uint8_t tl0_pic_idx_;  // Only used in non-flexible mode.
   size_t frames_since_kf_;
   uint8_t num_temporal_layers_;
   uint8_t num_spatial_layers_;