Bug 1440255 - Consolidate failure paths. r?dminor
The cameraThread is set by startCapture(), so a failed startCapture() that
quits the Looper and runs the cameraThread to completion needs to set
cameraThread back to null for consistency.
Likewise, stopCapture() shall always quit the Looper and set cameraThread to
null.
MozReview-Commit-ID: H1ExLyTixYw
--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
@@ -120,16 +120,29 @@ public class VideoCaptureAndroid impleme
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
boolean startResult =
startCaptureOnCameraThread(width, height, min_mfps, max_mfps);
exchange(result, startResult);
}
});
boolean startResult = exchange(result, false); // |false| is a dummy value.
+
+ if (!startResult) {
+ // Starting failed on the camera thread. The looper has now quit and the
+ // camera thread is dead.
+ try {
+ cameraThread.join();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ cameraThreadHandler = null;
+ cameraThread = null;
+ }
+
return startResult;
}
@WebRTCJNITarget
private void unlinkCapturer() {
// stopCapture might fail. That might leave the callbacks dangling, so make
// sure those don't call into dead code.
// Note that onPreviewCameraFrame isn't synchronized, so there's no point in
@@ -260,18 +273,17 @@ public class VideoCaptureAndroid impleme
if (range[Parameters.PREVIEW_FPS_MIN_INDEX] % min_mfps == 0 &&
range[Parameters.PREVIEW_FPS_MAX_INDEX] % max_mfps == 0) {
int dropRatio = range[Parameters.PREVIEW_FPS_MAX_INDEX] / max_mfps;
frameDropRatio = Math.min(dropRatio, frameDropRatio);
}
}
if (frameDropRatio == Integer.MAX_VALUE) {
Log.e(TAG, "Can not find camera fps range");
- error = new RuntimeException("Can not find camera fps range");
- return false;
+ throw new RuntimeException("Can not find camera fps range");
}
if (frameDropRatio > 1) {
Log.d(TAG, "Frame dropper is enabled. Ratio: " + frameDropRatio);
}
min_mfps *= frameDropRatio;
max_mfps *= frameDropRatio;
Log.d(TAG, "Camera preview mfps range: " + min_mfps + " - " + max_mfps);
parameters.setPreviewFpsRange(min_mfps, max_mfps);
@@ -287,19 +299,17 @@ public class VideoCaptureAndroid impleme
frameCount = 0;
averageDurationMs = 1000000.0f / (max_mfps / frameDropRatio);
camera.startPreview();
return true;
} catch (RuntimeException e) {
error = e;
}
Log.e(TAG, "startCapture failed", error);
- if (camera != null) {
- stopCaptureOnCameraThread();
- }
+ stopCaptureOnCameraThread();
return false;
}
// Called by native code. Returns true when camera is known to be stopped.
@WebRTCJNITarget
private synchronized boolean stopCapture() {
Log.d(TAG, "stopCapture");
final Exchanger<Boolean> result = new Exchanger<Boolean>();
@@ -317,22 +327,22 @@ public class VideoCaptureAndroid impleme
}
cameraThreadHandler = null;
cameraThread = null;
Log.d(TAG, "stopCapture done");
return status;
}
private boolean stopCaptureOnCameraThread() {
- if (camera == null) {
- Log.e(TAG, "Camera is already stopped!");
- return true;
- }
Throwable error = null;
try {
+ if (camera == null) {
+ Log.e(TAG, "Camera is already stopped!");
+ throw new RuntimeException("Camera is already stopped!");
+ }
camera.stopPreview();
camera.setPreviewCallbackWithBuffer(null);
camera.setPreviewTexture(null);
cameraSurfaceTexture = null;
if (cameraGlTextures != null) {
GLES20.glDeleteTextures(1, cameraGlTextures, 0);
cameraGlTextures = null;
}