android: rewrite of Android mediastreamer integration

parent a82dcdbf
......@@ -28,3 +28,6 @@ missing
stamp-h1
*.gmo
java/bin/
java/libs/
java/gen/
......@@ -76,10 +76,7 @@ LOCAL_SRC_FILES = \
tonedetector.c \
audiostream.c \
qualityindicator.c \
bitratecontrol.c \
shaders.c \
opengles_display.c \
android-opengl-display.c
bitratecontrol.c
ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
LOCAL_SRC_FILES += msresample.c.neon
......@@ -110,8 +107,8 @@ endif
#LOCAL_SRC_FILES += aqsnd.c
ifeq ($(LINPHONE_VIDEO),1)
LOCAL_CFLAGS += -DVIDEO_ENABLED
LOCAL_ARM_NEON := true
LOCAL_CFLAGS += -DVIDEO_ENABLED -DHAVE_NEON=1 -D__ARM_NEON__
LOCAL_SRC_FILES += \
videoenc.c \
......@@ -130,7 +127,10 @@ LOCAL_SRC_FILES += \
msandroidvideo.cpp \
scaler.c.neon \
scaler_arm.S.neon \
vp8.c
vp8.c \
shaders.c \
opengles_display.c \
android-opengl-display.c
endif
#LOCAL_SRC_FILES += videostream.c
......@@ -176,6 +176,25 @@ endif
LOCAL_STATIC_LIBRARIES += cpufeatures
include $(BUILD_STATIC_LIBRARY)
ifeq ($(BUILD_MS2), 1)
LOCAL_SRC_FILES += \
../tests/mediastream.c
LOCAL_STATIC_LIBRARIES += \
libgsm
ifeq ($(LINPHONE_VIDEO),1)
LOCAL_STATIC_LIBRARIES += \
libgsm \
libvpx \
libavcodec \
libswscale \
libavcore \
libavutil
endif
LOCAL_LDLIBS += -lGLESv2 -llog -ldl
include $(BUILD_SHARED_LIBRARY)
else
include $(BUILD_STATIC_LIBRARY)
endif
$(call import-module,android/cpufeatures)
......@@ -147,7 +147,7 @@ MS2_PUBLIC void audio_stream_enable_adaptive_bitrate_control(AudioStream *st, bo
MS2_PUBLIC void audio_stream_set_mic_gain(AudioStream *stream, float gain);
/* enable/disable rtp stream */
/* enable/disable rtp stream */
MS2_PUBLIC void audio_stream_mute_rtp(AudioStream *stream, bool_t val);
/*enable noise gate, must be done before start()*/
......@@ -229,6 +229,7 @@ struct _VideoStream
MSWebCam *cam;
bool_t use_preview_window;
bool_t adapt_bitrate;
int device_orientation; /* warning: meaning of this variable depends on the platform (Android, iOS, ...) */
OrtpZrtpContext *ortpZrtpContext;
};
......@@ -263,11 +264,12 @@ MS2_PUBLIC void video_stream_set_native_window_id(VideoStream *stream, unsigned
MS2_PUBLIC void video_stream_set_native_preview_window_id(VideoStream *stream, unsigned long id);
MS2_PUBLIC unsigned long video_stream_get_native_preview_window_id(VideoStream *stream);
MS2_PUBLIC void video_stream_use_preview_video_window(VideoStream *stream, bool_t yesno);
MS2_PUBLIC void video_stream_set_device_rotation(VideoStream *stream, int orientation);
/*provided for compatibility, use video_stream_set_direction() instead */
MS2_PUBLIC int video_stream_recv_only_start(VideoStream *videostream, RtpProfile *profile, const char *addr, int port, int used_pt, int jitt_comp);
MS2_PUBLIC int video_stream_send_only_start(VideoStream *videostream,
RtpProfile *profile, const char *addr, int port, int rtcp_port,
RtpProfile *profile, const char *addr, int port, int rtcp_port,
int used_pt, int jitt_comp, MSWebCam *device);
MS2_PUBLIC void video_stream_recv_only_stop(VideoStream *vs);
MS2_PUBLIC void video_stream_send_only_stop(VideoStream *vs);
......
......@@ -102,7 +102,7 @@ struct _MSFilterStats{
unsigned int count; /*<number of time the filter is called for processing*/
};
typedef struct _MSFilterStats MSFilterStats;
typedef struct _MSFilterStats MSFilterStats;
struct _MSFilterDesc{
MSFilterId id; /* the id declared in allfilters.h */
......@@ -231,7 +231,7 @@ MS2_PUBLIC MSFilterDesc * ms_filter_get_decoder(const char *mime);
* @param name The filter name.
**/
MS2_PUBLIC MSFilterDesc *ms_filter_lookup_by_name(const char *filter_name);
/**
* Create encoder filter according to codec name.
*
......@@ -297,7 +297,7 @@ MS2_PUBLIC MSFilter *ms_filter_new_from_name(const char *name);
*
* The primary use is to create your own filter's in your
* application and avoid registration inside mediastreamer2.
*
*
* @param desc A MSFilterDesc for the filter.
*
* Returns: a MSFilter if successfull, NULL otherwise.
......@@ -402,10 +402,10 @@ MS2_PUBLIC void ms_connection_helper_start(MSConnectionHelper *h);
/**
* \brief Enter a MSFilter to be connected into the MSConnectionHelper object.
*
*
* This functions enters a MSFilter to be connected into the MSConnectionHelper
* object and connects it to the last entered if not the first one.
* The MSConnectionHelper is useful to reduce the amount of code necessary to create graphs in case
* The MSConnectionHelper is useful to reduce the amount of code necessary to create graphs in case
* the connections are made in an ordered manner and some filters are present conditionally in graphs.
* For example, instead of writing
* \code
......@@ -429,11 +429,11 @@ MS2_PUBLIC void ms_connection_helper_start(MSConnectionHelper *h);
* if (my_condition) ms_connection_helper_link(&h,f2,1,0);
* \endcode
*
* @param h a connection helper
* @param h a connection helper
* @param f a MSFilter
* @param inpin an input pin number with which the MSFilter needs to connect to previously entered MSFilter
* @param outpin an output pin number with which the MSFilter needs to be connected to the next entered MSFilter
*
*
* Returns: the return value of ms_filter_link() that is called internally to this function.
**/
MS2_PUBLIC int ms_connection_helper_link(MSConnectionHelper *h, MSFilter *f, int inpin, int outpin);
......@@ -441,7 +441,7 @@ MS2_PUBLIC int ms_connection_helper_link(MSConnectionHelper *h, MSFilter *f, int
/**
* \brief Enter a MSFilter to be disconnected into the MSConnectionHelper object.
* Process exactly the same way as ms_connection_helper_link() but calls ms_filter_unlink() on the
* Process exactly the same way as ms_connection_helper_link() but calls ms_filter_unlink() on the
* entered filters.
**/
MS2_PUBLIC int ms_connection_helper_unlink(MSConnectionHelper *h, MSFilter *f, int inpin, int outpin);
......@@ -459,7 +459,7 @@ MS2_PUBLIC void ms_filter_enable_statistics(bool_t enabled);
*
**/
MS2_PUBLIC void ms_filter_reset_statistics(void);
/**
* \brief Retrieves statistics for running filters.
* Returns a list of MSFilterStats
......@@ -468,7 +468,7 @@ MS2_PUBLIC const MSList * ms_filter_get_statistics(void);
/**
* \brief Logs runtime statistics for running filters.
*
*
**/
MS2_PUBLIC void ms_filter_log_statistics(void);
......@@ -519,7 +519,8 @@ enum _MSFilterInterfaceId{
MSFilterRecorderInterface,
MSFilterVideoDisplayInterface,
MSFilterEchoCancellerInterface,
MSFilterVideoDecoderInterface
MSFilterVideoDecoderInterface,
MSFilterVideoCaptureInterface,
};
typedef enum _MSFilterInterfaceId MSFilterInterfaceId;
......
......@@ -120,5 +120,9 @@ typedef enum _MSPlayerState MSPlayerState;
/** Interface definitions for video decoders */
#define MS_VIDEO_DECODER_DECODING_ERRORS \
MS_FILTER_EVENT_NO_ARG(MSFilterVideoDecoderInterface,0)
#endif
/** Interface definitions for video capture */
#define MS_VIDEO_CAPTURE_SET_DEVICE_ORIENTATION \
MS_FILTER_METHOD(MSFilterVideoCaptureInterface,0,int)
#endif
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="output" path="bin"/>
</classpath>
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>Mediastreamer</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.ApkBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.linphone"
android:versionCode="1"
android:versionName="1.0">
<uses-sdk android:minSdkVersion="3" />
<application android:icon="@drawable/icon" android:label="@string/app_name">
<activity android:name=".MediastreamerActivity"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-permission android:name="android.permission.INTERNET"></uses-permission>
<uses-permission android:name="android.permission.RECORD_AUDIO"></uses-permission>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
<uses-permission android:name="android.permission.WAKE_LOCK"/>
<uses-permission android:name="android.permission.PROCESS_OUTGOING_CALLS"></uses-permission>
<uses-permission android:name="android.permission.CALL_PHONE"></uses-permission>
<uses-permission android:name="android.permission.BOOT_COMPLETED"></uses-permission>
<uses-permission android:name="android.permission.VIBRATE"></uses-permission>
<uses-permission android:name="android.permission.CAMERA" />
</manifest>
\ No newline at end of file
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
# Project target.
target=android-13
-optimizationpasses 5
-dontusemixedcaseclassnames
-dontskipnonpubliclibraryclasses
-dontpreverify
-verbose
-optimizations !code/simplification/arithmetic,!field/*,!class/merging/*
-keep public class * extends android.app.Activity
-keep public class * extends android.app.Application
-keep public class * extends android.app.Service
-keep public class * extends android.content.BroadcastReceiver
-keep public class * extends android.content.ContentProvider
-keep public class * extends android.app.backup.BackupAgentHelper
-keep public class * extends android.preference.Preference
-keep public class com.android.vending.licensing.ILicensingService
-keepclasseswithmembernames class * {
native <methods>;
}
-keepclasseswithmembers class * {
public <init>(android.content.Context, android.util.AttributeSet);
}
-keepclasseswithmembers class * {
public <init>(android.content.Context, android.util.AttributeSet, int);
}
-keepclassmembers class * extends android.app.Activity {
public void *(android.view.View);
}
-keepclassmembers enum * {
public static **[] values();
public static ** valueOf(java.lang.String);
}
-keep class * implements android.os.Parcelable {
public static final android.os.Parcelable$Creator *;
}
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/video_frame" android:orientation="vertical"
android:layout_height="fill_parent" android:layout_width="fill_parent">
<org.linphone.mediastream.GL2JNIView android:layout_height="fill_parent" android:layout_width="fill_parent" android:id="@+id/video_surface"></org.linphone.mediastream.GL2JNIView >
<SurfaceView android:layout_height="72dip" android:layout_width="88dip" android:id="@+id/video_capture_surface" android:layout_gravity="right|bottom"
android:layout_margin="15dip"></SurfaceView>
</FrameLayout>
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/video_frame"
android:layout_height="fill_parent" android:layout_width="fill_parent">
<org.linphone.mediastream.GL2JNIView
android:layout_height="fill_parent"
android:layout_width="fill_parent"
android:id="@+id/video_surface">
</org.linphone.mediastream.GL2JNIView>
<SurfaceView
android:layout_height="88dip"
android:layout_width="72dip"
android:id="@+id/video_capture_surface"
android:layout_gravity="right|bottom"
android:layout_margin="15dip">
</SurfaceView>
</FrameLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<menu
xmlns:android="http://schemas.android.com/apk/res/android">
<item android:title="Change camera" android:id="@+id/videocall_menu_change_camera"></item>
<item android:title="Exit" android:id="@+id/videocall_menu_exit"></item>
</menu>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="hello">Hello World, MediastreamerActivity!</string>
<string name="app_name">Mediastreamer</string>
</resources>
package org.linphone;
import java.util.ArrayList;
import java.util.List;
import org.linphone.core.AndroidVideoWindowImpl;
import android.app.Activity;
import android.hardware.Camera;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MediastreamerActivity extends Activity implements
SensorEventListener {
native int runMediaStream(int argc, String[] argv);
native int stopMediaStream();
native void setVideoWindowId(Object wid);
native void setVideoPreviewWindowId(Object wid);
native void setDeviceRotation(int rotation);
native void changeCamera(int newCameraId);
Thread msThread;
int cameraId;
private static void loadOptionalLibrary(String s) {
try {
System.loadLibrary(s);
} catch (Throwable e) {
Log.w("Unable to load optional library lib", s);
}
}
static {
// FFMPEG (audio/video)
loadOptionalLibrary("avutil");
loadOptionalLibrary("swscale");
loadOptionalLibrary("avcore");
loadOptionalLibrary("avcodec");
// Main library
System.loadLibrary("mediastreamer2");
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the currently selected menu XML resource.
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.videocall_activity_menu, menu);
if (Camera.getNumberOfCameras() == 1) {
menu.findItem(R.id.videocall_menu_change_camera).setVisible(false);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.videocall_menu_exit:
this.finish();
break;
case R.id.videocall_menu_change_camera:
cameraId = (cameraId + 1) % Camera.getNumberOfCameras();
changeCamera(cameraId);
setVideoPreviewWindowId(findViewById(R.id.video_capture_surface));
break;
}
return true;
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
/* declare layout */
setContentView(R.layout.main);
cameraId = 0;
Log.i("ms", "Mediastreamer starting !");
/* retrieve preview surface */
final SurfaceView previewSurface = (SurfaceView) findViewById(R.id.video_capture_surface);
final SurfaceHolder holder = previewSurface.getHolder();
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
/* retrieve rendering surface */
GLSurfaceView view = (GLSurfaceView) findViewById(R.id.video_surface);
/* force surfaces Z ordering */
view.setZOrderOnTop(false);
previewSurface.setZOrderOnTop(true);
/* register callback, allowing us to use preview surface when ready */
holder.addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
setVideoPreviewWindowId(previewSurface);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
// ...
}
});
/* instanciate object responsible of video rendering */
AndroidVideoWindowImpl mVideoWindow = new AndroidVideoWindowImpl(view);
mVideoWindow
.setListener(new AndroidVideoWindowImpl.VideoWindowListener() {
public void onSurfaceDestroyed(AndroidVideoWindowImpl vw) {
// setVideoWindowId(null);
}
public void onSurfaceReady(AndroidVideoWindowImpl vw) {
setVideoWindowId(vw);
// set device rotation too
onSensorChanged(null);
}
});
final List<String> args = new ArrayList<String>();
args.add("prog_name");
args.add("--local");
args.add("4000");
args.add("--remote");
args.add("127.0.0.1:4000");
args.add("--payload");
args.add("103");
args.add("--camera");
args.add("Android0");
// if the phone is vertical => supply portrait mode resolution
int rot = rotationToAngle(getWindowManager().getDefaultDisplay()
.getRotation());
if (rot % 180 == 0) {
args.add("--width");
args.add("240");
args.add("--height");
args.add("320");
}
msThread = new Thread() {
public void run() {
Log.e("ms", "Starting mediastream !");
String[] _args = new String[args.size()];
int ret = runMediaStream(args.size(), args.toArray(_args));
Log.e("ms", "Mediastreamer ended (return code:" + ret + ")");
};
};
/* start mediastream */
msThread.start();
}
@Override
protected void onResume() {
super.onResume();
SensorManager mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
Sensor mAccelerometer = mSensorManager
.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
mSensorManager.registerListener(this, mAccelerometer,
SensorManager.SENSOR_DELAY_NORMAL);
}
@Override
protected void onPause() {
super.onPause();
((SensorManager) getSystemService(SENSOR_SERVICE))
.unregisterListener(this);
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
@Override
public void onSensorChanged(SensorEvent event) {
int rot = rotationToAngle(getWindowManager().getDefaultDisplay()
.getRotation());
// Returning rotation FROM ITS NATURAL ORIENTATION
setDeviceRotation(rot);
}
@Override
protected void onDestroy() {
stopMediaStream();
try {
msThread.join(100000);
} catch (Exception exc) {
}
Log.d("ms", "MediastreamerActivity destroyed");
super.onDestroy();
}
static int rotationToAngle(int r) {
switch (r) {
case Surface.ROTATION_0:
return 0;
case Surface.ROTATION_90:
return 90;
case Surface.ROTATION_180:
return 180;
case Surface.ROTATION_270:
return 270;
}
return 0;
}
}
\ No newline at end of file
package org.linphone;
public class OpenGLESDisplay {
public static native void init(int ptr, int width, int height);
public static native void render(int ptr);
}
package org.linphone.core;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import org.linphone.OpenGLESDisplay;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Bitmap.Config;
import android.opengl.GLSurfaceView;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Surface.OutOfResourcesException;
import android.view.SurfaceHolder.Callback;
public class AndroidVideoWindowImpl {
private boolean useGLrendering;
private Bitmap mBitmap;
private SurfaceView mView;
private Surface mSurface;
private VideoWindowListener mListener;
private Renderer renderer;
public static interface VideoWindowListener{
void onSurfaceReady(AndroidVideoWindowImpl vw);
void onSurfaceDestroyed(AndroidVideoWindowImpl vw);
};
public AndroidVideoWindowImpl(SurfaceView view){
useGLrendering = (view instanceof GLSurfaceView);
mView=view;
mBitmap=null;
mSurface=null;
mListener=null;
view.getHolder().addCallback(new Callback(){
public void surfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
Log.i("Surface is being changed.");
if (!useGLrendering) {
synchronized(AndroidVideoWindowImpl.this){
mBitmap=Bitmap.createBitmap(width,height,Config.RGB_565);
mSurface=holder.getSurface();
}
}
if (mListener!=null) mListener.onSurfaceReady(AndroidVideoWindowImpl.this);
Log.w("Video display surface changed");
}
public void surfaceCreated(SurfaceHolder holder) {
Log.w("Video display surface created");
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (!useGLrendering) {
synchronized(AndroidVideoWindowImpl.this){
mSurface=null;
mBitmap=null;
}
}
if (mListener!=null)
mListener.onSurfaceDestroyed(AndroidVideoWindowImpl.this);
Log.d("Video display surface destroyed");
}
});
if (useGLrendering) {
renderer = new Renderer();
((GLSurfaceView)mView).setRenderer(renderer);
((GLSurfaceView)mView).setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
}
public void setListener(VideoWindowListener l){
mListener=l;
}
public Surface getSurface(){
if (useGLrendering)
Log.e("View class does not match Video display filter used (you must use a non-GL View)");
return mView.getHolder().getSurface();
}
public Bitmap getBitmap(){
if (useGLrendering)
Log.e("View class does not match Video display filter used (you must use a non-GL View)");
return mBitmap;
}
public void setOpenGLESDisplay(int ptr) {
if (!useGLrendering)
Log.e("View class does not match Video display filter used (you must use a GL View)");
renderer.setOpenGLESDisplay(ptr);
}
public void requestRender() {
((GLSurfaceView)mView).requestRender();
}
//Called by the mediastreamer2 android display filter
public synchronized void update(){
if (mSurface!=null){
try {
Canvas canvas=mSurface.lockCanvas(null);
canvas.drawBitmap(mBitmap, 0, 0, null);
mSurface.unlockCanvasAndPost(canvas);
} catch (IllegalArgumentException e) {