Bug 1240420. Part 2 - fix warnings of signed/unsigned comparison. r=kinetik. draft
authorJW Wang <jwwang@mozilla.com>
Thu, 21 Jan 2016 21:11:14 +0800
changeset 323904 190c367badca5b8dedec860c0c7a3a6ddb3dcf7f
parent 323903 b37eb07ba6f5b29fa186ce8974e6fb7c6b4a8196
child 324276 c01d0355c486f152fc449e861d590057672cf1eb
push id9800
push userjwwang@mozilla.com
push dateThu, 21 Jan 2016 13:14:03 +0000
reviewerskinetik
bugs1240420
milestone46.0a1
Bug 1240420. Part 2 - fix warnings of signed/unsigned comparison. r=kinetik.
dom/media/AudioStream.cpp
dom/media/AudioStream.h
--- a/dom/media/AudioStream.cpp
+++ b/dom/media/AudioStream.cpp
@@ -42,28 +42,28 @@ static int gDumpedAudioCount = 0;
  * with the playback rate at the moment. Since the playback rate and number of
  * underrun frames can vary in each callback. We need to keep the whole history
  * in order to calculate the playback position of the audio engine correctly.
  */
 class FrameHistory {
   struct Chunk {
     uint32_t servicedFrames;
     uint32_t totalFrames;
-    int rate;
+    uint32_t rate;
   };
 
   template <typename T>
   static T FramesToUs(uint32_t frames, int rate) {
     return static_cast<T>(frames) * USECS_PER_S / rate;
   }
 public:
   FrameHistory()
     : mBaseOffset(0), mBasePosition(0) {}
 
-  void Append(uint32_t aServiced, uint32_t aUnderrun, int aRate) {
+  void Append(uint32_t aServiced, uint32_t aUnderrun, uint32_t aRate) {
     /* In most case where playback rate stays the same and we don't underrun
      * frames, we are able to merge chunks to avoid lose of precision to add up
      * in compressing chunks into |mBaseOffset| and |mBasePosition|.
      */
     if (!mChunks.IsEmpty()) {
       Chunk& c = mChunks.LastElement();
       // 2 chunks (c1 and c2) can be merged when rate is the same and
       // adjacent frames are zero. That is, underrun frames in c1 are zero
@@ -294,23 +294,23 @@ WriteDumpFile(FILE* aDumpFile, AudioStre
   for (uint32_t i = 0; i < samples; ++i) {
     SetUint16LE(output + i*2, int16_t(input[i]*32767.0f));
   }
   fwrite(output, 2, samples, aDumpFile);
   fflush(aDumpFile);
 }
 
 nsresult
-AudioStream::Init(int32_t aNumChannels, int32_t aRate,
+AudioStream::Init(uint32_t aNumChannels, uint32_t aRate,
                   const dom::AudioChannel aAudioChannel)
 {
   mStartTime = TimeStamp::Now();
   mIsFirst = CubebUtils::GetFirstStream();
 
-  if (!CubebUtils::GetCubebContext() || aNumChannels < 0 || aRate < 0) {
+  if (!CubebUtils::GetCubebContext()) {
     return NS_ERROR_FAILURE;
   }
 
   MOZ_LOG(gAudioStreamLog, LogLevel::Debug,
     ("%s  channels: %d, rate: %d for %p", __FUNCTION__, aNumChannels, aRate, this));
   mInRate = mOutRate = aRate;
   mChannels = aNumChannels;
   mOutChannels = (aNumChannels > 2) ? 2 : aNumChannels;
@@ -722,17 +722,17 @@ int64_t AudioClock::GetPositionUnlocked(
 
 int64_t AudioClock::GetPositionInFrames() const
 {
   return (GetPositionUnlocked() * mInRate) / USECS_PER_S;
 }
 
 void AudioClock::SetPlaybackRateUnlocked(double aPlaybackRate)
 {
-  mOutRate = static_cast<int>(mInRate / aPlaybackRate);
+  mOutRate = static_cast<uint32_t>(mInRate / aPlaybackRate);
 }
 
 double AudioClock::GetPlaybackRate() const
 {
   return static_cast<double>(mInRate) / mOutRate;
 }
 
 void AudioClock::SetPreservesPitch(bool aPreservesPitch)
--- a/dom/media/AudioStream.h
+++ b/dom/media/AudioStream.h
@@ -62,19 +62,19 @@ public:
   // Get the current pitch preservation state.
   // Called on the audio thread.
   bool GetPreservesPitch() const;
 private:
   // This AudioStream holds a strong reference to this AudioClock. This
   // pointer is garanteed to always be valid.
   AudioStream* const mAudioStream;
   // Output rate in Hz (characteristic of the playback rate)
-  int mOutRate;
+  uint32_t mOutRate;
   // Input rate in Hz (characteristic of the media being played)
-  int mInRate;
+  uint32_t mInRate;
   // True if the we are timestretching, false if we are resampling.
   bool mPreservesPitch;
   // The history of frames sent to the audio engine in each DataCallback.
   const nsAutoPtr<FrameHistory> mFrameHistory;
 };
 
 class CircularByteBuffer
 {
@@ -247,17 +247,17 @@ public:
     virtual ~DataSource() {}
   };
 
   explicit AudioStream(DataSource& aSource);
 
   // Initialize the audio stream. aNumChannels is the number of audio
   // channels (1 for mono, 2 for stereo, etc) and aRate is the sample rate
   // (22050Hz, 44100Hz, etc).
-  nsresult Init(int32_t aNumChannels, int32_t aRate,
+  nsresult Init(uint32_t aNumChannels, uint32_t aRate,
                 const dom::AudioChannel aAudioStreamChannel);
 
   // Closes the stream. All future use of the stream is an error.
   void Shutdown();
 
   void Reset();
 
   // Set the current volume of the audio playback. This is a value from
@@ -279,19 +279,19 @@ public:
 
   // Return the position, measured in audio frames played since the stream
   // was opened, of the audio hardware.  Thread-safe.
   int64_t GetPositionInFrames();
 
   // Returns true when the audio stream is paused.
   bool IsPaused();
 
-  int GetRate() { return mOutRate; }
-  int GetChannels() { return mChannels; }
-  int GetOutChannels() { return mOutChannels; }
+  uint32_t GetRate() { return mOutRate; }
+  uint32_t GetChannels() { return mChannels; }
+  uint32_t GetOutChannels() { return mOutChannels; }
 
   // Set playback rate as a multiple of the intrinsic playback rate. This is to
   // be called only with aPlaybackRate > 0.0.
   nsresult SetPlaybackRate(double aPlaybackRate);
   // Switch between resampling (if false) and time stretching (if true, default).
   nsresult SetPreservesPitch(bool aPreservesPitch);
 
   size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const;
@@ -331,21 +331,21 @@ private:
   void GetTimeStretched(AudioBufferWriter& aWriter);
 
   void StartUnlocked();
 
   // The monitor is held to protect all access to member variables.
   Monitor mMonitor;
 
   // Input rate in Hz (characteristic of the media being played)
-  int mInRate;
+  uint32_t mInRate;
   // Output rate in Hz (characteristic of the playback rate)
-  int mOutRate;
-  int mChannels;
-  int mOutChannels;
+  uint32_t mOutRate;
+  uint32_t mChannels;
+  uint32_t mOutChannels;
 #if defined(__ANDROID__)
   dom::AudioChannel mAudioChannel;
 #endif
   AudioClock mAudioClock;
   soundtouch::SoundTouch* mTimeStretcher;
 
   // Stream start time for stream open delay telemetry.
   TimeStamp mStartTime;