Commit 0be2e0ed authored by Sylvain Berfini's avatar Sylvain Berfini 🎩

Removed display filter in video preview graph for Android + improved...

Removed display filter in video preview graph for Android + improved AndroidVideoWindowImpl to make display surface optionnal
parent bc1180d7
......@@ -87,36 +87,39 @@ public class AndroidVideoWindowImpl {
*/
public void init() {
// register callback for rendering surface events
mVideoRenderingView.getHolder().addCallback(new Callback(){
public void surfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
Log.i("Video display surface is being changed.");
if (!useGLrendering) {
synchronized(AndroidVideoWindowImpl.this){
mBitmap=Bitmap.createBitmap(width,height,Config.RGB_565);
mSurface=holder.getSurface();
if (mVideoRenderingView != null) {
mVideoRenderingView.getHolder().addCallback(new Callback() {
public void surfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
Log.i("Video display surface is being changed.");
if (!useGLrendering) {
synchronized (AndroidVideoWindowImpl.this) {
mBitmap = Bitmap.createBitmap(width, height, Config.RGB_565);
mSurface = holder.getSurface();
}
}
if (mListener != null)
mListener.onVideoRenderingSurfaceReady(AndroidVideoWindowImpl.this, mVideoRenderingView);
Log.w("Video display surface changed");
}
if (mListener!=null) mListener.onVideoRenderingSurfaceReady(AndroidVideoWindowImpl.this, mVideoRenderingView);
Log.w("Video display surface changed");
}
public void surfaceCreated(SurfaceHolder holder) {
Log.w("Video display surface created");
}
public void surfaceCreated(SurfaceHolder holder) {
Log.w("Video display surface created");
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (!useGLrendering) {
synchronized(AndroidVideoWindowImpl.this){
mSurface=null;
mBitmap=null;
public void surfaceDestroyed(SurfaceHolder holder) {
if (!useGLrendering) {
synchronized (AndroidVideoWindowImpl.this) {
mSurface = null;
mBitmap = null;
}
}
if (mListener != null)
mListener.onVideoRenderingSurfaceDestroyed(AndroidVideoWindowImpl.this);
Log.d("Video display surface destroyed");
}
if (mListener!=null)
mListener.onVideoRenderingSurfaceDestroyed(AndroidVideoWindowImpl.this);
Log.d("Video display surface destroyed");
}
});
});
}
// register callback for preview surface events
if (mVideoPreviewView != null) {
mVideoPreviewView.getHolder().addCallback(new Callback(){
......@@ -140,7 +143,7 @@ public class AndroidVideoWindowImpl {
});
}
if (useGLrendering) {
if (useGLrendering && mVideoRenderingView != null) {
renderer = new Renderer();
((GLSurfaceView)mVideoRenderingView).setRenderer(renderer);
((GLSurfaceView)mVideoRenderingView).setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
......@@ -162,7 +165,7 @@ public class AndroidVideoWindowImpl {
public Surface getSurface(){
if (useGLrendering)
Log.e("View class does not match Video display filter used (you must use a non-GL View)");
return mVideoRenderingView.getHolder().getSurface();
return mVideoRenderingView != null ? mVideoRenderingView.getHolder().getSurface() : null;
}
public SurfaceView getPreviewSurfaceView(){
if (useGLrendering)
......@@ -182,7 +185,7 @@ public class AndroidVideoWindowImpl {
}
public void requestRender() {
((GLSurfaceView)mVideoRenderingView).requestRender();
if (mVideoRenderingView != null) ((GLSurfaceView)mVideoRenderingView).requestRender();
}
//Called by the mediastreamer2 android display filter
......
......@@ -1620,11 +1620,6 @@ static void configure_video_preview_source(VideoPreview *stream) {
}
void video_preview_start(VideoPreview *stream, MSWebCam *device) {
MSPixFmt format = MS_YUV420P; /* Display format */
int mirroring = 1;
int corner = -1;
MSVideoSize disp_size = stream->sent_vsize;
const char *displaytype = stream->display_name;
MSConnectionHelper ch;
stream->source = ms_web_cam_create_reader(device);
......@@ -1632,6 +1627,16 @@ void video_preview_start(VideoPreview *stream, MSWebCam *device) {
/* configure the filters */
configure_video_preview_source(stream);
#if defined(__ANDROID__)
// On Android the capture filter doesn't need a display filter to render the preview
stream->output2 = ms_factory_create_filter(stream->ms.factory, MS_VOID_SINK_ID);
#else
MSPixFmt format = MS_YUV420P; /* Display format */
int mirroring = 1;
int corner = -1;
MSVideoSize disp_size = stream->sent_vsize;
const char *displaytype = stream->display_name;
if (displaytype) {
stream->output2=ms_factory_create_filter_from_name(stream->ms.factory, displaytype);
ms_filter_call_method(stream->output2, MS_FILTER_SET_PIX_FMT, &format);
......@@ -1640,6 +1645,7 @@ void video_preview_start(VideoPreview *stream, MSWebCam *device) {
ms_filter_call_method(stream->output2, MS_VIDEO_DISPLAY_SET_LOCAL_VIEW_MODE, &corner);
/* and then connect all */
}
#endif
stream->local_jpegwriter = ms_factory_create_filter(stream->ms.factory, MS_JPEG_WRITER_ID);
if (stream->local_jpegwriter) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment