Commit 22f54d40 authored by Sylvain Berfini's avatar Sylvain Berfini 🎩

Updated logs + new SDK/API added to Version.java

parent ce4abb79
......@@ -42,13 +42,15 @@ public class Version {
public static final int API12_HONEYCOMB_MR1_31X = 12;
public static final int API13_HONEYCOMB_MR2_32 = 13;
public static final int API14_ICE_CREAM_SANDWICH_40 = 14;
public static final int API15_ICE_CREAM_SANDWICH_403 = 15;
public static final int API16_JELLY_BEAN_41 = 16;
public static final int API17_JELLY_BEAN_42 = 17;
private static native boolean nativeHasZrtp();
private static native boolean nativeHasNeon();
private static Boolean hasNeon;
private static final int buildVersion =
Integer.parseInt(Build.VERSION.SDK);
private static final int buildVersion = Build.VERSION.SDK_INT;
// API03_CUPCAKE_15;
// 8; // 2.2
// 7; // 2.1
......@@ -107,6 +109,6 @@ public class Version {
StringBuilder sb = new StringBuilder(" ==== Capabilities dump ====\n");
sb.append("Has neon: ").append(Boolean.toString(hasNeon())).append("\n");
sb.append("Has ZRTP: ").append(Boolean.toString(hasZrtp())).append("\n");
android.util.Log.i("mediastreamer", sb.toString());
Log.i(sb.toString());
}
}
......@@ -21,18 +21,18 @@ package org.linphone.mediastream.video;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import org.linphone.mediastream.Log;
import org.linphone.mediastream.video.display.OpenGLESDisplay;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Surface.OutOfResourcesException;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
public class AndroidVideoWindowImpl {
private SurfaceView mVideoRenderingView;
......@@ -77,7 +77,7 @@ public class AndroidVideoWindowImpl {
mVideoRenderingView.getHolder().addCallback(new Callback(){
public void surfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
Log.i("mediastream", "Video display surface is being changed.");
Log.i("Video display surface is being changed.");
if (!useGLrendering) {
synchronized(AndroidVideoWindowImpl.this){
mBitmap=Bitmap.createBitmap(width,height,Config.RGB_565);
......@@ -85,11 +85,11 @@ public class AndroidVideoWindowImpl {
}
}
if (mListener!=null) mListener.onVideoRenderingSurfaceReady(AndroidVideoWindowImpl.this, mVideoRenderingView);
Log.w("mediastream", "Video display surface changed");
Log.w("Video display surface changed");
}
public void surfaceCreated(SurfaceHolder holder) {
Log.w("mediastream", "Video display surface created");
Log.w("Video display surface created");
}
public void surfaceDestroyed(SurfaceHolder holder) {
......@@ -101,7 +101,7 @@ public class AndroidVideoWindowImpl {
}
if (mListener!=null)
mListener.onVideoRenderingSurfaceDestroyed(AndroidVideoWindowImpl.this);
Log.d("mediastream", "Video display surface destroyed");
Log.d("Video display surface destroyed");
}
});
// register callback for preview surface events
......@@ -109,20 +109,20 @@ public class AndroidVideoWindowImpl {
mVideoPreviewView.getHolder().addCallback(new Callback(){
public void surfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
Log.i("mediastream", "Video preview surface is being changed.");
Log.i("Video preview surface is being changed.");
if (mListener!=null)
mListener.onVideoPreviewSurfaceReady(AndroidVideoWindowImpl.this, mVideoPreviewView);
Log.w("mediastream", "Video preview surface changed");
Log.w("Video preview surface changed");
}
public void surfaceCreated(SurfaceHolder holder) {
Log.w("mediastream", "Video preview surface created");
Log.w("Video preview surface created");
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (mListener!=null)
mListener.onVideoPreviewSurfaceDestroyed(AndroidVideoWindowImpl.this);
Log.d("mediastream", "Video preview surface destroyed");
Log.d("Video preview surface destroyed");
}
});
}
......@@ -143,18 +143,18 @@ public class AndroidVideoWindowImpl {
}
public Surface getSurface(){
if (useGLrendering)
Log.e("mediastream", "View class does not match Video display filter used (you must use a non-GL View)");
Log.e("View class does not match Video display filter used (you must use a non-GL View)");
return mVideoRenderingView.getHolder().getSurface();
}
public Bitmap getBitmap(){
if (useGLrendering)
Log.e("mediastream", "View class does not match Video display filter used (you must use a non-GL View)");
Log.e( "View class does not match Video display filter used (you must use a non-GL View)");
return mBitmap;
}
public void setOpenGLESDisplay(int ptr) {
if (!useGLrendering)
Log.e("mediastream", "View class does not match Video display filter used (you must use a GL View)");
Log.e("View class does not match Video display filter used (you must use a GL View)");
renderer.setOpenGLESDisplay(ptr);
}
......
......@@ -20,13 +20,13 @@ package org.linphone.mediastream.video.capture;
import java.util.List;
import org.linphone.mediastream.Log;
import org.linphone.mediastream.video.capture.hwconf.AndroidCameraConfiguration;
import org.linphone.mediastream.video.capture.hwconf.AndroidCameraConfiguration.AndroidCamera;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceView;
/**
......@@ -41,13 +41,13 @@ public class AndroidVideoApi5JniWrapper {
public static native void putImage(long nativePtr, byte[] buffer);
static public int detectCameras(int[] indexes, int[] frontFacing, int[] orientation) {
Log.d("mediastreamer", "detectCameras\n");
Log.d("detectCameras\n");
AndroidCamera[] cameras = AndroidCameraConfiguration.retrieveCameras();
int nextIndex = 0;
for (AndroidCamera androidCamera : cameras) {
if (nextIndex == 2) {
Log.w("mediastreamer", "Returning only the 2 first cameras (increase buffer size to retrieve all)");
Log.w("Returning only the 2 first cameras (increase buffer size to retrieve all)");
break;
}
// skip already added cameras
......
......@@ -18,9 +18,10 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package org.linphone.mediastream.video.capture;
import org.linphone.mediastream.Log;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.util.Log;
public class AndroidVideoApi8JniWrapper {
static public int detectCameras(int[] indexes, int[] frontFacing, int[] orientation) {
......@@ -32,7 +33,7 @@ public class AndroidVideoApi8JniWrapper {
}
public static Object startRecording(int cameraId, int width, int height, int fps, int rotation, final long nativePtr) {
Log.d("mediastreamer", "startRecording(" + cameraId + ", " + width + ", " + height + ", " + fps + ", " + rotation + ", " + nativePtr + ")");
Log.d("startRecording(" + cameraId + ", " + width + ", " + height + ", " + fps + ", " + rotation + ", " + nativePtr + ")");
Camera camera = Camera.open();
AndroidVideoApi5JniWrapper.applyCameraParameters(camera, width, height, fps);
......@@ -53,13 +54,13 @@ public class AndroidVideoApi8JniWrapper {
camera.startPreview();
AndroidVideoApi5JniWrapper.isRecording = true;
Log.d("mediastreamer", "Returning camera object: " + camera);
Log.d("Returning camera object: " + camera);
return camera;
}
public static void stopRecording(Object cam) {
AndroidVideoApi5JniWrapper.isRecording = false;
Log.d("mediastreamer", "stopRecording(" + cam + ")");
Log.d("stopRecording(" + cam + ")");
Camera camera = (Camera) cam;
if (camera != null) {
......@@ -67,7 +68,7 @@ public class AndroidVideoApi8JniWrapper {
camera.stopPreview();
camera.release();
} else {
Log.i("mediastreamer", "Cannot stop recording ('camera' is null)");
Log.i("Cannot stop recording ('camera' is null)");
}
}
......
......@@ -20,13 +20,14 @@ package org.linphone.mediastream.video.capture;
import java.util.List;
import org.linphone.mediastream.Log;
import android.annotation.TargetApi;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.Size;
import android.os.Build;
import android.util.Log;
@TargetApi(Build.VERSION_CODES.GINGERBREAD)
public class AndroidVideoApi9JniWrapper {
......@@ -46,12 +47,12 @@ public class AndroidVideoApi9JniWrapper {
* resolution can possibly match the requested one
*/
static public int[] selectNearestResolutionAvailable(int cameraId, int requestedW, int requestedH) {
Log.d("mediastreamer", "selectNearestResolutionAvailable: " + cameraId + ", " + requestedW + "x" + requestedH);
Log.d("selectNearestResolutionAvailable: " + cameraId + ", " + requestedW + "x" + requestedH);
return AndroidVideoApi5JniWrapper.selectNearestResolutionAvailableForCamera(cameraId, requestedW, requestedH);
}
public static Object startRecording(int cameraId, int width, int height, int fps, int rotation, final long nativePtr) {
Log.d("mediastreamer", "startRecording(" + cameraId + ", " + width + ", " + height + ", " + fps + ", " + rotation + ", " + nativePtr + ")");
Log.d("startRecording(" + cameraId + ", " + width + ", " + height + ", " + fps + ", " + rotation + ", " + nativePtr + ")");
try {
Camera camera = Camera.open(cameraId);
Parameters params = camera.getParameters();
......@@ -85,7 +86,7 @@ public class AndroidVideoApi9JniWrapper {
setCameraDisplayOrientation(rotation, cameraId, camera);
camera.startPreview();
AndroidVideoApi5JniWrapper.isRecording = true;
Log.d("mediastreamer", "Returning camera object: " + camera);
Log.d("Returning camera object: " + camera);
return camera;
} catch (Exception exc) {
exc.printStackTrace();
......@@ -114,17 +115,17 @@ public class AndroidVideoApi9JniWrapper {
result = (info.orientation - rotationDegrees + 360) % 360;
}
Log.w("mediastreamer", "Camera preview orientation: "+ result);
Log.w("Camera preview orientation: "+ result);
try {
camera.setDisplayOrientation(result);
} catch (Exception exc) {
Log.e("mediastreamer", "Failed to execute: camera[" + camera + "].setDisplayOrientation(" + result + ")");
Log.e("Failed to execute: camera[" + camera + "].setDisplayOrientation(" + result + ")");
exc.printStackTrace();
}
}
private static int[] findClosestEnclosingFpsRange(int expectedFps, List<int[]> fpsRanges) {
Log.d("mediastreamer", "Searching for closest fps range from " + expectedFps);
Log.d("Searching for closest fps range from " + expectedFps);
// init with first element
int[] closestRange = fpsRanges.get(0);
int measure = Math.abs(closestRange[0] - expectedFps)
......@@ -136,10 +137,10 @@ public class AndroidVideoApi9JniWrapper {
if (curMeasure < measure) {
closestRange=curRange;
measure = curMeasure;
Log.d("mediastreamer", "a better range has been found: w="+closestRange[0]+",h="+closestRange[1]);
Log.d("a better range has been found: w="+closestRange[0]+",h="+closestRange[1]);
}
}
Log.d("mediastreamer", "The closest fps range is w="+closestRange[0]+",h="+closestRange[1]);
Log.d("The closest fps range is w="+closestRange[0]+",h="+closestRange[1]);
return closestRange;
}
}
......@@ -20,10 +20,10 @@ package org.linphone.mediastream.video.capture.hwconf;
import java.util.List;
import org.linphone.mediastream.Log;
import org.linphone.mediastream.Version;
import android.hardware.Camera.Size;
import android.util.Log;
......@@ -65,7 +65,7 @@ public class AndroidCameraConfiguration {
else
camerasCache = AndroidCameraConfiguration.probeCamerasSDK9();
} catch (Exception exc) {
Log.e("mediastreamer", "Error: cannot retrieve cameras information (busy ?)", exc);
Log.e("Error: cannot retrieve cameras information (busy ?)", exc);
exc.printStackTrace();
camerasCache = new AndroidCamera[0];
}
......
......@@ -21,11 +21,11 @@ package org.linphone.mediastream.video.capture.hwconf;
import java.util.ArrayList;
import java.util.List;
import org.linphone.mediastream.Log;
import org.linphone.mediastream.video.capture.hwconf.AndroidCameraConfiguration.AndroidCamera;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
/**
* Android cameras detection, using SDK < 9
......@@ -41,20 +41,20 @@ class AndroidCameraConfigurationReader5 {
// Defaults
if (Hacks.isGalaxySOrTab()) {
Log.d("mediastreamer", "Hack Galaxy S : has one or more cameras");
Log.d( "Hack Galaxy S : has one or more cameras");
if (Hacks.isGalaxySOrTabWithFrontCamera()) {
Log.d("mediastreamer", "Hack Galaxy S : HAS a front camera with id=2");
Log.d("Hack Galaxy S : HAS a front camera with id=2");
cam.add(new AndroidCamera(2, true, 90, r));
} else {
Log.d("mediastreamer", "Hack Galaxy S : NO front camera");
Log.d("Hack Galaxy S : NO front camera");
}
Log.d("mediastreamer", "Hack Galaxy S : HAS a rear camera with id=1");
Log.d("Hack Galaxy S : HAS a rear camera with id=1");
cam.add(new AndroidCamera(1, false, 90, r));
} else {
cam.add(new AndroidCamera(0, false, 90, r));
if (Hacks.hasTwoCamerasRear0Front1()) {
Log.d("mediastreamer", "Hack SPHD700 has 2 cameras a rear with id=0 and a front with id=1");
Log.d("Hack SPHD700 has 2 cameras a rear with id=0 and a front with id=1");
cam.add(new AndroidCamera(1, true, 90, r));
}
}
......
......@@ -18,11 +18,11 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package org.linphone.mediastream.video.capture.hwconf;
import org.linphone.mediastream.Log;
import org.linphone.mediastream.Version;
import android.hardware.Camera;
import android.os.Build;
import android.util.Log;
public final class Hacks {
......@@ -106,12 +106,12 @@ public final class Hacks {
try {
nb = (Integer) Camera.class.getMethod("getNumberOfCameras", (Class[])null).invoke(null);
} catch (Exception e) {
Log.e("mediastreamer", "Error getting number of cameras");
Log.e("Error getting number of cameras");
}
return nb > 0;
}
Log.i("mediastreamer", "Hack: considering there IS a camera.\n"
Log.i("Hack: considering there IS a camera.\n"
+ "If it is not the case, report DEVICE and MODEL to linphone-users@nongnu.org");
return true;
}
......@@ -119,11 +119,11 @@ public final class Hacks {
public static boolean hasBuiltInEchoCanceller() {
for (BuiltInEchoCancellerModel model: mBuiltInEchoCancellerModels) {
if (Build.MANUFACTURER.equals(model.manufacturer) && Build.MODEL.startsWith(model.model)) {
Log.i("mediastreamer", Build.MANUFACTURER + " " + Build.MODEL + " has a built-in echo canceller");
Log.i(Build.MANUFACTURER + " " + Build.MODEL + " has a built-in echo canceller");
return true;
}
}
Log.i("mediastreamer", Build.MANUFACTURER + " " + Build.MODEL + " doesn't have a built-in echo canceller");
Log.i(Build.MANUFACTURER + " " + Build.MODEL + " doesn't have a built-in echo canceller");
return false;
}
}
......@@ -23,14 +23,14 @@ import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import org.linphone.mediastream.Log;
import android.content.Context;
import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
class GL2JNIView extends GLSurfaceView {
private static String TAG = "GL2JNIView";
private static final boolean DEBUG = false;
public GL2JNIView(Context context) {
......@@ -77,7 +77,7 @@ class GL2JNIView extends GLSurfaceView {
private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
Log.w(TAG, "creating OpenGL ES 2.0 context");
Log.w("creating OpenGL ES 2.0 context");
checkEglError("Before eglCreateContext", egl);
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
......@@ -93,7 +93,7 @@ class GL2JNIView extends GLSurfaceView {
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
Log.e(String.format("%s: EGL error: 0x%x", prompt, error));
}
}
......@@ -188,9 +188,9 @@ class GL2JNIView extends GLSurfaceView {
private void printConfigs(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
int numConfigs = configs.length;
Log.w(TAG, String.format("%d configurations", numConfigs));
Log.w(String.format("%d configurations", numConfigs));
for (int i = 0; i < numConfigs; i++) {
Log.w(TAG, String.format("Configuration %d:\n", i));
Log.w(String.format("Configuration %d:\n", i));
printConfig(egl, display, configs[i]);
}
}
......@@ -272,7 +272,7 @@ class GL2JNIView extends GLSurfaceView {
int attribute = attributes[i];
String name = names[i];
if ( egl.eglGetConfigAttrib(display, config, attribute, value)) {
Log.w(TAG, String.format(" %s: %d\n", name, value[0]));
Log.w(String.format(" %s: %d\n", name, value[0]));
} else {
// Log.w(TAG, String.format(" %s: failed\n", name));
while (egl.eglGetError() != EGL10.EGL_SUCCESS);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment