Commit 1e4c039d authored by Erwan Croze's avatar Erwan Croze 👋🏻

Fix javadoc

parent c3a57c08
......@@ -28,21 +28,21 @@ import org.linphone.mediastream.video.capture.hwconf.AndroidCameraConfiguration.
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.view.SurfaceView;
/**
* Wrapper for Android Camera API. Used by Mediastreamer to record
* Wrapper for Android Camera API. Used by Mediastreamer to record
* video from webcam.
* This file depends only on Android SDK >= 5
* This file depends only on Android SDK superior or egal to 5
*/
public class AndroidVideoApi5JniWrapper {
public static boolean isRecording = false;
public static native void putImage(long nativePtr, byte[] buffer);
static public int detectCameras(int[] indexes, int[] frontFacing, int[] orientation) {
Log.d("detectCameras\n");
AndroidCamera[] cameras = AndroidCameraConfiguration.retrieveCameras();
int nextIndex = 0;
for (AndroidCamera androidCamera : cameras) {
if (nextIndex == 2) {
......@@ -57,7 +57,7 @@ public class AndroidVideoApi5JniWrapper {
}
return nextIndex;
}
/**
* Return the hw-available available resolution best matching the requested one.
* Best matching meaning :
......@@ -71,23 +71,23 @@ public class AndroidVideoApi5JniWrapper {
*/
static public int[] selectNearestResolutionAvailable(int cameraId, int requestedW, int requestedH) {
Log.d("mediastreamer", "selectNearestResolutionAvailable: " + cameraId + ", " + requestedW + "x" + requestedH);
return selectNearestResolutionAvailableForCamera(cameraId, requestedW, requestedH);
}
static public void activateAutoFocus(Object cam) {
Log.d("mediastreamer", "Turning on autofocus on camera " + cam);
Camera camera = (Camera) cam;
if (camera != null && (camera.getParameters().getFocusMode() == Parameters.FOCUS_MODE_AUTO || camera.getParameters().getFocusMode() == Parameters.FOCUS_MODE_MACRO))
camera.autoFocus(null); // We don't need to do anything after the focus finished, so we don't need a callback
}
public static Object startRecording(int cameraId, int width, int height, int fps, int rotation, final long nativePtr) {
Log.d("mediastreamer", "startRecording(" + cameraId + ", " + width + ", " + height + ", " + fps + ", " + rotation + ", " + nativePtr + ")");
Camera camera = Camera.open();
Camera camera = Camera.open();
applyCameraParameters(camera, width, height, fps);
camera.setPreviewCallback(new Camera.PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
if (isRecording) {
......@@ -95,28 +95,28 @@ public class AndroidVideoApi5JniWrapper {
putImage(nativePtr, data);
}
}
});
});
camera.startPreview();
isRecording = true;
Log.d("mediastreamer", "Returning camera object: " + camera);
return camera;
}
return camera;
}
public static void stopRecording(Object cam) {
isRecording = false;
Log.d("mediastreamer", "stopRecording(" + cam + ")");
Log.d("mediastreamer", "stopRecording(" + cam + ")");
Camera camera = (Camera) cam;
if (camera != null) {
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
camera.release();
} else {
Log.i("mediastreamer", "Cannot stop recording ('camera' is null)");
}
}
}
public static void setPreviewDisplaySurface(Object cam, Object surf) {
Log.d("mediastreamer", "setPreviewDisplaySurface(" + cam + ", " + surf + ")");
Camera camera = (Camera) cam;
......@@ -124,7 +124,7 @@ public class AndroidVideoApi5JniWrapper {
try {
camera.setPreviewDisplay(surface.getHolder());
} catch (Exception exc) {
exc.printStackTrace();
exc.printStackTrace();
}
}
//select nearest resolution equal or above requested, if none, return highest resolution from the supported list
......@@ -135,7 +135,7 @@ public class AndroidVideoApi5JniWrapper {
requestedH = requestedW;
requestedW = t;
}
AndroidCamera[] cameras = AndroidCameraConfiguration.retrieveCameras();
List<AndroidCamera.Size> supportedSizes = null;
for(AndroidCamera c: cameras) {
......@@ -151,11 +151,11 @@ public class AndroidVideoApi5JniWrapper {
Log.d("mediastreamer", "\t" + s.width + "x" + s.height);
}
int r[] = null;
int rW = Math.max(requestedW, requestedH);
int rH = Math.min(requestedW, requestedH);
try {
try {
// look for nearest size
AndroidCamera.Size result = supportedSizes.get(0); /*by default return first value*/
int req = rW * rH;
......@@ -168,7 +168,7 @@ public class AndroidVideoApi5JniWrapper {
result = s;
useDownscale = 0;
}
/* MS2 has a NEON downscaler, so we test this too */
int downScaleDist = /*Math.abs*/-1*(req - s.width * s.height / 4);
if (((s.width/2 >= rW && s.height/2 >= rH) || (s.width/2 >= rH && s.height/2 >= rW)) && downScaleDist < minDist) {
......@@ -193,14 +193,14 @@ public class AndroidVideoApi5JniWrapper {
} catch (Exception exc) {
Log.e(exc,"mediastreamer", " resolution selection failed");
return null;
}
}
}
protected static void applyCameraParameters(Camera camera, int width, int height, int requestedFps) {
Parameters params = camera.getParameters();
params.setPreviewSize(width, height);
params.setPreviewSize(width, height);
List<Integer> supported = params.getSupportedPreviewFrameRates();
if (supported != null) {
int nearest = Integer.MAX_VALUE;
......@@ -213,7 +213,7 @@ public class AndroidVideoApi5JniWrapper {
}
Log.d("mediastreamer", "Preview framerate set:" + params.getPreviewFrameRate());
}
camera.setParameters(params);
}
}
......@@ -18,7 +18,7 @@ import java.util.ArrayList;
import java.util.List;
/**
* Android cameras detection, using SDK >= 21.
* Android cameras detection, using SDK superior or egal to 21.
*/
public class AndroidCameraConfigurationReader21 {
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment