--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
@@ -115,35 +115,36 @@ public class VideoCaptureAndroid impleme
Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
cameraThread = new CameraThread(handlerExchanger);
cameraThread.start();
cameraThreadHandler = exchange(handlerExchanger, null);
final Exchanger<Boolean> result = new Exchanger<Boolean>();
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
- startCaptureOnCameraThread(width, height, min_mfps, max_mfps, result);
+ boolean startResult =
+ startCaptureOnCameraThread(width, height, min_mfps, max_mfps);
+ exchange(result, startResult);
}
});
boolean startResult = exchange(result, false); // |false| is a dummy value.
return startResult;
}
@WebRTCJNITarget
private void unlinkCapturer() {
// stopCapture might fail. That might leave the callbacks dangling, so make
// sure those don't call into dead code.
// Note that onPreviewCameraFrame isn't synchronized, so there's no point in
// synchronizing us either. ProvideCameraFrame has to do the null check.
native_capturer = 0;
}
- private void startCaptureOnCameraThread(
- int width, int height, int min_mfps, int max_mfps,
- Exchanger<Boolean> result) {
+ private boolean startCaptureOnCameraThread(
+ int width, int height, int min_mfps, int max_mfps) {
Throwable error = null;
try {
camera = Camera.open(id);
if (localPreview != null) {
localPreview.addCallback(this);
if (localPreview.getSurface() != null &&
localPreview.getSurface().isValid()) {
@@ -239,18 +240,17 @@ public class VideoCaptureAndroid impleme
range[Parameters.PREVIEW_FPS_MAX_INDEX] % max_mfps == 0) {
int dropRatio = range[Parameters.PREVIEW_FPS_MAX_INDEX] / max_mfps;
frameDropRatio = Math.min(dropRatio, frameDropRatio);
}
}
if (frameDropRatio == Integer.MAX_VALUE) {
Log.e(TAG, "Can not find camera fps range");
error = new RuntimeException("Can not find camera fps range");
- exchange(result, false);
- return;
+ return false;
}
if (frameDropRatio > 1) {
Log.d(TAG, "Frame dropper is enabled. Ratio: " + frameDropRatio);
}
min_mfps *= frameDropRatio;
max_mfps *= frameDropRatio;
Log.d(TAG, "Camera preview mfps range: " + min_mfps + " - " + max_mfps);
parameters.setPreviewFpsRange(min_mfps, max_mfps);
@@ -261,83 +261,77 @@ public class VideoCaptureAndroid impleme
int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
for (int i = 0; i < numCaptureBuffers; i++) {
camera.addCallbackBuffer(new byte[bufSize]);
}
camera.setPreviewCallbackWithBuffer(this);
frameCount = 0;
averageDurationMs = 1000000.0f / (max_mfps / frameDropRatio);
camera.startPreview();
- exchange(result, true);
- return;
+ return true;
} catch (RuntimeException e) {
error = e;
}
Log.e(TAG, "startCapture failed", error);
if (camera != null) {
- Exchanger<Boolean> resultDropper = new Exchanger<Boolean>();
- stopCaptureOnCameraThread(resultDropper);
- exchange(resultDropper, false);
+ stopCaptureOnCameraThread();
}
- exchange(result, false);
- return;
+ return false;
}
// Called by native code. Returns true when camera is known to be stopped.
@WebRTCJNITarget
private synchronized boolean stopCapture() {
Log.d(TAG, "stopCapture");
final Exchanger<Boolean> result = new Exchanger<Boolean>();
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
- stopCaptureOnCameraThread(result);
+ boolean stopResult = stopCaptureOnCameraThread();
+ exchange(result, stopResult);
}
});
boolean status = exchange(result, false); // |false| is a dummy value here.
try {
cameraThread.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
cameraThreadHandler = null;
cameraThread = null;
Log.d(TAG, "stopCapture done");
return status;
}
- private void stopCaptureOnCameraThread(
- Exchanger<Boolean> result) {
+ private boolean stopCaptureOnCameraThread() {
if (camera == null) {
Log.e(TAG, "Camera is already stopped!");
- return;
+ return true;
}
Throwable error = null;
try {
camera.stopPreview();
camera.setPreviewCallbackWithBuffer(null);
camera.setPreviewTexture(null);
cameraSurfaceTexture = null;
if (cameraGlTextures != null) {
GLES20.glDeleteTextures(1, cameraGlTextures, 0);
cameraGlTextures = null;
}
camera.release();
camera = null;
- exchange(result, true);
Looper.myLooper().quit();
- return;
+ return true;
} catch (IOException e) {
error = e;
} catch (RuntimeException e) {
error = e;
}
Log.e(TAG, "Failed to stop camera", error);
- exchange(result, false);
Looper.myLooper().quit();
- return;
+ return false;
}
@WebRTCJNITarget
private int getDeviceOrientation() {
int orientation = 0;
if (context != null) {
WindowManager wm = (WindowManager) context.getSystemService(
Context.WINDOW_SERVICE);