Commit 4915fd9b authored by Guillaume Beraudo's avatar Guillaume Beraudo

Merge branch 'master' of git.linphone.org:mediastreamer2

parents 6791023e cec49fcf
......@@ -201,15 +201,16 @@ AC_DEFUN([MS_CHECK_VIDEO],[
*) AC_MSG_ERROR(bad value ${enableval} for --disable-vp8) ;;
esac],[vp8=true])
vp8dir=/usr
if test x$vp8 = xtrue; then
PKG_CHECK_MODULES(VP8, [vpx >= 0.9.6 ], [have_vp8=yes],
[have_vp8=no])
if test "$have_vp8" = "no" ; then
AC_CHECK_HEADERS([vpx/vpx_encoder.h],
[ have_vp8=yes
VP8_LIBS="-lvpx"
AC_SUBST(VP8_LIBS)
])
MS_CHECK_DEP([VP8 codec],[VP8],[${vp8dir}/include],
[${vp8dir}/lib],[vpx/vpx_encoder.h],[vpx],[vpx_codec_encode])
if test "$VP8_found" = "yes" ; then
have_vp8=yes
fi
fi
fi
......
......@@ -72,8 +72,8 @@ fi
dnl Checks for programs.
AC_PROG_CC
AC_PROG_CXX
AC_PROG_CC
AC_PROG_OBJC
AC_CONFIG_MACRO_DIR([m4])
......@@ -702,7 +702,7 @@ AC_ARG_ENABLE(tests,
yes) tests=yes ;;
no) tests=no ;;
*) AC_MSG_ERROR(bad value ${enableval} for --enable-tests) ;;
esac],[tests=no])
esac],[tests=yes])
AM_CONDITIONAL(ENABLE_TESTS, test x$tests = xyes)
......
......@@ -23,10 +23,10 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
typedef struct MSAudioMixerCtl{
int pin;
union {
union param_t {
float gain; /**<gain correction */
int active; /**< to mute or unmute the channel */
};
} param;
} MSAudioMixerCtl;
#define MS_AUDIO_MIXER_SET_INPUT_GAIN MS_FILTER_METHOD(MS_AUDIO_MIXER_ID,0,MSAudioMixerCtl)
......
......@@ -42,11 +42,11 @@ struct _MSAudioEndpoint{
MSFilter *in_resampler,*out_resampler;
MSCPoint out_cut_point;
MSCPoint in_cut_point;
MSCPoint in_cut_point_prev;
MSCPoint mixer_in;
MSCPoint mixer_out;
MSAudioConference *conference;
int pin;
bool_t is_remote;
};
typedef struct _MSAudioEndpoint MSAudioEndpoint;
......
......@@ -552,6 +552,7 @@ typedef enum _MSFilterInterfaceId MSFilterInterfaceId;
#define MS_FILTER_SET_VAD_PROB_START MS_FILTER_BASE_METHOD(23,int)
#define MS_FILTER_SET_VAD_PROB_CONTINUE MS_FILTER_BASE_METHOD(24,int)
#define MS_FILTER_SET_MAX_GAIN MS_FILTER_BASE_METHOD(25,int)
#define MS_VIDEO_CAPTURE_SET_AUTOFOCUS MS_FILTER_BASE_METHOD(26,int)
#define MS_CONF_SPEEX_PREPROCESS_MIC MS_FILTER_EVENT(MS_CONF_ID, 1, void*)
#define MS_CONF_CHANNEL_VOLUME MS_FILTER_EVENT(MS_CONF_ID, 3, void*)
......
......@@ -50,9 +50,15 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#define MS_VIDEO_SIZE_QQVGA_W 160
#define MS_VIDEO_SIZE_QQVGA_H 120
#define MS_VIDEO_SIZE_HQVGA_W 160
#define MS_VIDEO_SIZE_HQVGA_H 240
#define MS_VIDEO_SIZE_QVGA_W 320
#define MS_VIDEO_SIZE_QVGA_H 240
#define MS_VIDEO_SIZE_HVGA_W 320
#define MS_VIDEO_SIZE_HVGA_H 480
#define MS_VIDEO_SIZE_VGA_W 640
#define MS_VIDEO_SIZE_VGA_H 480
......@@ -217,6 +223,17 @@ MS2_PUBLIC void rgb24_copy_revert(uint8_t *dstbuf, int dstlsz,
MS2_PUBLIC void ms_rgb_to_yuv(const uint8_t rgb[3], uint8_t yuv[3]);
#ifdef __arm__
MS2_PUBLIC void rotate_plane_neon_clockwise(int wDest, int hDest, int full_width, uint8_t* src, uint8_t* dst);
MS2_PUBLIC void rotate_plane_neon_anticlockwise(int wDest, int hDest, int full_width, uint8_t* src, uint8_t* dst);
MS2_PUBLIC void rotate_cbcr_to_cr_cb(int wDest, int hDest, int full_width, uint8_t* cbcr_src, uint8_t* cr_dst, uint8_t* cb_dst,bool_t clockWise);
MS2_PUBLIC void deinterlace_and_rotate_180_neon(uint8_t* ysrc, uint8_t* cbcrsrc, uint8_t* ydst, uint8_t* udst, uint8_t* vdst, int w, int h, int y_byte_per_row,int cbcr_byte_per_row);
void deinterlace_down_scale_and_rotate_180_neon(uint8_t* ysrc, uint8_t* cbcrsrc, uint8_t* ydst, uint8_t* udst, uint8_t* vdst, int w, int h, int y_byte_per_row,int cbcr_byte_per_row,bool_t down_scale);
void deinterlace_down_scale_neon(uint8_t* ysrc, uint8_t* cbcrsrc, uint8_t* ydst, uint8_t* u_dst, uint8_t* v_dst, int w, int h, int y_byte_per_row,int cbcr_byte_per_row,bool_t down_scale);
mblk_t *copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(uint8_t* y, uint8_t * cbcr, int rotation, int w, int h, int y_byte_per_row,int cbcr_byte_per_row, bool_t uFirstvSecond, bool_t down_scale);
#endif
static inline bool_t ms_video_size_greater_than(MSVideoSize vs1, MSVideoSize vs2){
return (vs1.width>=vs2.width) && (vs1.height>=vs2.height);
}
......
......@@ -60,7 +60,7 @@ public class MediastreamerActivity extends Activity {
Thread msThread;
int cameraId = 0;
String videoCodec = VP8_MIME_TYPE;
String remoteIp = "192.168.1.100";//27.0.0.1";
String remoteIp = "127.0.0.1";
short remotePort = 4000, localPort = 4000;
int bitrate = 256;
......@@ -363,4 +363,4 @@ public class MediastreamerActivity extends Activity {
}
return 0;
}
}
\ No newline at end of file
}
......@@ -196,13 +196,15 @@ public class AndroidVideoWindowImpl {
}
public void onDrawFrame(GL10 gl) {
if (ptr == 0)
return;
if (initPending) {
OpenGLESDisplay.init(ptr, width, height);
initPending = false;
synchronized (this) {
if (ptr == 0)
return;
if (initPending) {
OpenGLESDisplay.init(ptr, width, height);
initPending = false;
}
OpenGLESDisplay.render(ptr);
}
OpenGLESDisplay.render(ptr);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
......
......@@ -75,6 +75,13 @@ public class AndroidVideoApi5JniWrapper {
return selectNearestResolutionAvailableForCamera(cameraId, requestedW, requestedH);
}
static public void activateAutoFocus(Object cam) {
Log.d("mediastreamer", "Turning on autofocus on camera " + cam);
Camera camera = (Camera) cam;
if (camera != null && (camera.getParameters().getFocusMode() == Parameters.FOCUS_MODE_AUTO || camera.getParameters().getFocusMode() == Parameters.FOCUS_MODE_MACRO))
camera.autoFocus(null); // We don't need to do anything after the focus finished, so we don't need a callback
}
public static Object startRecording(int cameraId, int width, int height, int fps, int rotation, final long nativePtr) {
Log.d("mediastreamer", "startRecording(" + cameraId + ", " + width + ", " + height + ", " + fps + ", " + rotation + ", " + nativePtr + ")");
Camera camera = Camera.open();
......@@ -86,8 +93,8 @@ public class AndroidVideoApi5JniWrapper {
// forward image data to JNI
putImage(nativePtr, data);
}
});
});
camera.startPreview();
Log.d("mediastreamer", "Returning camera object: " + camera);
return camera;
......@@ -142,21 +149,31 @@ public class AndroidVideoApi5JniWrapper {
int rW = Math.max(requestedW, requestedH);
int rH = Math.min(requestedW, requestedH);
try {
try {
// look for nearest size
Size result = null;
int req = rW * rH;
int minDist = Integer.MAX_VALUE;
int useDownscale = 0;
for(Size s: supportedSizes) {
int dist = Math.abs(req - s.width * s.height);
if (dist < minDist) {
minDist = dist;
result = s;
useDownscale = 0;
}
/* MS2 has a NEON downscaler, so we test this too */
int downScaleDist = Math.abs(req - s.width * s.height / 4);
if (downScaleDist < minDist) {
minDist = downScaleDist;
result = s;
useDownscale = 1;
}
if (s.width == rW && s.height == rH)
return new int[] {s.width, s.height};
return new int[] {s.width, s.height, 0};
}
return new int[] {result.width, result.height};
return new int[] {result.width, result.height, useDownscale};
} catch (Exception exc) {
exc.printStackTrace();
return null;
......
......@@ -10,10 +10,10 @@
Summary: Audio/Video real-time streaming
Name: mediastreamer
Version: @MEDIASTREAMER_VERSION@
Release: 2
License: LGPL
Release: 3
License: GPL
Group: Applications/Communications
URL: http://linphone.org/mediastreamer2/
URL: http://www.mediastreamer2.com
Source0: %{name}-@MEDIASTREAMER_VERSION@.tar.gz
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot
%ifarch %ix86
......@@ -77,11 +77,11 @@ rm -rf $RPM_BUILD_ROOT
%files
%defattr(-,root,root,-)
%doc AUTHORS COPYING ChangeLog INSTALL NEWS README
%{_bindir}/mediastream
%{_libdir}/*.so.*
/usr/share/images/nowebcamCIF.jpg
/usr/share/locale/??/LC_MESSAGES/mediastreamer.mo
/usr/share/locale/??_??/LC_MESSAGES/mediastreamer.mo
%{_libexecdir}
%files devel
%defattr(-,root,root,-)
......
......@@ -263,8 +263,8 @@ make_gitversion_h:
fi \
&& rm -f $(GITVERSION_FILE_TMP) ;\
fi
if ! test -f $(GITVERSION_FILE) ; then \
$(ECHO) -n "#define GIT_VERSION \"unknown\" " > $(GITVERSION_FILE) ;\
if ! test -f $(srcdir)/$(GITVERSION_FILE) ; then \
$(ECHO) -n "#define GIT_VERSION \"unknown\" " > $(srcdir)/$(GITVERSION_FILE) ;\
fi
$(GITVERSION_FILE): make_gitversion_h
......
......@@ -34,6 +34,7 @@ typedef struct AndroidDisplay{
MSVideoSize vsize;
struct opengles_display* ogl;
jboolean ogl_free_ready;
jmethodID set_opengles_display_id;
jmethodID request_render_id;
}AndroidDisplay;
......@@ -56,16 +57,23 @@ static void android_display_init(MSFilter *f){
if (ad->request_render_id == 0)
ms_error("Could not find 'requestRender' method\n");
ad->ogl = ogl_display_new();
ad->ogl_free_ready = FALSE;
f->data=ad;
ms_message("%s %p %p", __FUNCTION__, f, ad);
}
static void android_display_uninit(MSFilter *f){
AndroidDisplay *ad=(AndroidDisplay*)f->data;
ms_message("%s %p %p", __FUNCTION__, f, ad->ogl);
if (ad->ogl) {
// uninit must be called with gl context set (in SurfaceDestroyed callback)
ogl_display_free(ad->ogl);
if (ad->ogl_free_ready) {
ms_free(ad->ogl);
ad->ogl = 0;
} else {
ad->ogl_free_ready = TRUE;
}
}
ms_free(ad);
......@@ -84,7 +92,11 @@ static void android_display_process(MSFilter *f){
if ((m=ms_queue_peek_last(f->inputs[0]))!=NULL){
if (ms_yuv_buf_init_from_mblk (&pic,m)==0){
/* schedule display of frame */
ogl_display_set_yuv_to_display(ad->ogl, m);
if (!ad->ogl || !ad->ogl_free_ready) {
ogl_display_set_yuv_to_display(ad->ogl, m);
} else {
ms_warning("%s: opengldisplay not ready (%p)", __FUNCTION__, ad->ogl);
}
ms_queue_remove(f->inputs[0], m);
JNIEnv *jenv=ms_get_jni_env();
......@@ -106,17 +118,30 @@ static int android_display_set_window(MSFilter *f, void *arg){
jobject window=(jobject)id;
ms_filter_lock(f);
ad->android_video_window=window;
if (ad->android_video_window) {
if (window) {
unsigned int ptr = (unsigned int)ad->ogl;
ms_message("Sending opengles_display pointer as long: %p -> %u\n", ad->ogl, ptr);
(*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->set_opengles_display_id, ptr);
ms_message("Sending opengles_display pointer as long: %p -> %u", ad->ogl, ptr);
(*jenv)->CallVoidMethod(jenv,window,ad->set_opengles_display_id, ptr);
ad->ogl_free_ready = FALSE;
} else {
/* when context is lost GL resources are freed by Android */
ogl_display_uninit(ad->ogl, FALSE);
if (window != ad->android_video_window) {
ms_message("Clearing opengles_display (%p : %d)", ad->ogl, ad->ogl_free_ready);
/* when context is lost GL resources are freed by Android */
ogl_display_uninit(ad->ogl, FALSE);
if (ad->ogl_free_ready) {
ms_free(ad->ogl);
ad->ogl = 0;
} else {
ad->ogl_free_ready = TRUE;
}
/* clear native ptr, to prevent rendering to occur now that ptr is invalid */
(*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->set_opengles_display_id, 0);
}
}
ad->android_video_window=window;
ms_filter_unlock(f);
return 0;
......
......@@ -61,16 +61,23 @@ static void cut_audio_stream_graph(MSAudioEndpoint *ep, bool_t is_remote){
/*stop the audio graph*/
ms_ticker_detach(st->ticker,st->soundread);
if (!st->ec) ms_ticker_detach(st->ticker,st->soundwrite);
ep->in_cut_point=just_after(st->decoder);
ms_filter_unlink(st->decoder,0,ep->in_cut_point.filter, ep->in_cut_point.pin);
ep->in_cut_point_prev.pin=0;
if (is_remote){
/*we would like to keep the volrecv (MSVolume filter) in the graph to measure the output level*/
ep->in_cut_point_prev.filter=st->volrecv;
}else{
ep->in_cut_point_prev.filter=st->decoder;
}
ep->in_cut_point=just_after(ep->in_cut_point_prev.filter);
ms_filter_unlink(ep->in_cut_point_prev.filter,ep->in_cut_point_prev.pin,ep->in_cut_point.filter, ep->in_cut_point.pin);
ep->out_cut_point=just_before(st->encoder);
ms_filter_unlink(ep->out_cut_point.filter,ep->out_cut_point.pin,st->encoder,0);
if (is_remote){
ep->mixer_in.filter=st->decoder;
ep->mixer_in.pin=0;
ep->mixer_in.filter=ep->in_cut_point_prev.filter;
ep->mixer_in.pin=ep->in_cut_point_prev.pin;
ep->mixer_out.filter=st->encoder;
ep->mixer_out.pin=0;
}else{
......@@ -82,7 +89,7 @@ static void cut_audio_stream_graph(MSAudioEndpoint *ep, bool_t is_remote){
static void redo_audio_stream_graph(MSAudioEndpoint *ep){
AudioStream *st=ep->st;
ms_filter_link(st->decoder,0,ep->in_cut_point.filter,ep->in_cut_point.pin);
ms_filter_link(ep->in_cut_point_prev.filter,ep->in_cut_point_prev.pin,ep->in_cut_point.filter,ep->in_cut_point.pin);
ms_filter_link(ep->out_cut_point.filter,ep->out_cut_point.pin,st->encoder,0);
ms_ticker_attach(st->ticker,st->soundread);
if (!st->ec)
......
......@@ -256,7 +256,7 @@ static int mixer_set_input_gain(MSFilter *f, void *data){
ms_warning("mixer_set_input_gain: invalid pin number %i",ctl->pin);
return -1;
}
s->channels[ctl->pin].gain=ctl->gain;
s->channels[ctl->pin].gain=ctl->param.gain;
return 0;
}
......@@ -267,7 +267,7 @@ static int mixer_set_active(MSFilter *f, void *data){
ms_warning("mixer_set_active_gain: invalid pin number %i",ctl->pin);
return -1;
}
s->channels[ctl->pin].active=ctl->active;
s->channels[ctl->pin].active=ctl->param.active;
return 0;
}
......
......@@ -271,6 +271,9 @@ int audio_stream_start_full(AudioStream *stream, RtpProfile *profile, const char
rtp_session_set_profile(rtps,profile);
if (remport>0) rtp_session_set_remote_addr_full(rtps,remip,remport,rem_rtcp_port);
if (rem_rtcp_port<=0){
rtp_session_enable_rtcp(rtps,FALSE);
}
rtp_session_set_payload_type(rtps,payload);
rtp_session_set_jitter_compensation(rtps,jitt_comp);
......@@ -415,10 +418,10 @@ int audio_stream_start_full(AudioStream *stream, RtpProfile *profile, const char
ms_connection_helper_link(&h,stream->rtprecv,-1,0);
ms_connection_helper_link(&h,stream->decoder,0,0);
ms_connection_helper_link(&h,stream->dtmfgen,0,0);
if (stream->equalizer)
ms_connection_helper_link(&h,stream->equalizer,0,0);
if (stream->volrecv)
ms_connection_helper_link(&h,stream->volrecv,0,0);
if (stream->equalizer)
ms_connection_helper_link(&h,stream->equalizer,0,0);
if (stream->ec)
ms_connection_helper_link(&h,stream->ec,0,0);
if (stream->write_resampler)
......@@ -642,10 +645,10 @@ void audio_stream_stop(AudioStream * stream)
ms_connection_helper_unlink(&h,stream->rtprecv,-1,0);
ms_connection_helper_unlink(&h,stream->decoder,0,0);
ms_connection_helper_unlink(&h,stream->dtmfgen,0,0);
if (stream->equalizer)
ms_connection_helper_unlink(&h,stream->equalizer,0,0);
if (stream->volrecv!=NULL)
ms_connection_helper_unlink(&h,stream->volrecv,0,0);
if (stream->equalizer)
ms_connection_helper_unlink(&h,stream->equalizer,0,0);
if (stream->ec!=NULL)
ms_connection_helper_unlink(&h,stream->ec,0,0);
if (stream->write_resampler!=NULL)
......
......@@ -37,7 +37,7 @@ struct _MSEventQueue{
};
static void write_event(MSEventQueue *q, MSFilter *f, unsigned int ev_id, void *arg){
int argsize=ev_id & 0xffff;
int argsize=ev_id & 0xff;
int size=argsize+16;
uint8_t *nextpos=q->wptr+size;
......
......@@ -78,6 +78,7 @@ struct AndroidReaderContext {
MSVideoSize requestedSize, hwCapableSize;
ms_mutex_t mutex;
int rotation, rotationSavedDuringVSize;
int useDownscaling;
jobject androidCamera;
jobject previewWindow;
......@@ -97,6 +98,16 @@ static int video_capture_set_fps(MSFilter *f, void *arg){
return 0;
}
static int video_capture_set_autofocus(MSFilter *f, void* data){
JNIEnv *env = ms_get_jni_env();
AndroidReaderContext* d = (AndroidReaderContext*) f->data;
jclass helperClass = getHelperClass(env);
jmethodID method = env->GetStaticMethodID(helperClass,"activateAutoFocus", "(Ljava/lang/Object;)V");
env->CallStaticObjectMethod(helperClass, method, d->androidCamera);
return 0;
}
static int video_capture_get_fps(MSFilter *f, void *arg){
AndroidReaderContext* d = (AndroidReaderContext*) f->data;
*((float*)arg) = d->fps;
......@@ -109,6 +120,13 @@ static int video_capture_set_vsize(MSFilter *f, void* data){
d->requestedSize=*(MSVideoSize*)data;
// always request landscape mode, orientation is handled later
if (d->requestedSize.height > d->requestedSize.width) {
int tmp = d->requestedSize.height;
d->requestedSize.height = d->requestedSize.width;
d->requestedSize.width = tmp;
}
JNIEnv *env = ms_get_jni_env();
jclass helperClass = getHelperClass(env);
......@@ -125,25 +143,29 @@ static int video_capture_set_vsize(MSFilter *f, void* data){
// handle result :
// - 0 : width
// - 1 : height
jint res[2];
env->GetIntArrayRegion((jintArray)resArray, 0, 2, res);
ms_message("Camera selected resolution is: %dx%d (requested: %dx%d)\n", res[0], res[1], d->requestedSize.width, d->requestedSize.height);
// - 2 : useDownscaling
jint res[3];
env->GetIntArrayRegion((jintArray)resArray, 0, 3, res);
ms_message("Camera selected resolution is: %dx%d (requested: %dx%d) with downscaling?%d\n", res[0], res[1], d->requestedSize.width, d->requestedSize.height, res[2]);
d->hwCapableSize.width = res[0];
d->hwCapableSize.height = res[1];
d->useDownscaling = res[2];
int rqSize = d->requestedSize.width * d->requestedSize.height;
int hwSize = d->hwCapableSize.width * d->hwCapableSize.height;
double downscale = d->useDownscaling ? 0.5 : 1;
// if hw supplies a smaller resolution, modify requested size accordingly
if (hwSize < rqSize) {
if ((hwSize * downscale * downscale) < rqSize) {
ms_message("Camera cannot produce requested resolution %dx%d, will supply smaller one: %dx%d\n",
d->requestedSize.width, d->requestedSize.height, res[0], res[1]);
d->requestedSize = d->hwCapableSize;
} else if (hwSize > rqSize) {
d->requestedSize.width, d->requestedSize.height, (int) (res[0] * downscale), (int) (res[1]*downscale));
d->requestedSize.width = (int) (d->hwCapableSize.width * downscale);
d->requestedSize.height = (int) (d->hwCapableSize.height * downscale);
} else if ((hwSize * downscale * downscale) > rqSize) {
ms_message("Camera cannot produce requested resolution %dx%d, will capture a bigger one (%dx%d) and crop it to match encoder requested resolution\n",
d->requestedSize.width, d->requestedSize.height, res[0], res[1]);
d->requestedSize.width, d->requestedSize.height, (int)(res[0] * downscale), (int)(res[1] * downscale));
}
// is phone held |_ to cam orientation ?
if (d->rotation == UNDEFINED_ROTATION || compute_image_rotation_correction(d, d->rotation) % 180 != 0) {
if (d->rotation == UNDEFINED_ROTATION) {
......@@ -334,7 +356,8 @@ static MSFilterMethod video_capture_methods[]={
{ MS_FILTER_GET_PIX_FMT, &video_capture_get_pix_fmt},
{ MS_VIDEO_DISPLAY_SET_NATIVE_WINDOW_ID , &video_set_native_preview_window },//preview is managed by capture filter
{ MS_VIDEO_DISPLAY_GET_NATIVE_WINDOW_ID , &video_get_native_preview_window },
{ MS_VIDEO_CAPTURE_SET_DEVICE_ORIENTATION, &video_set_device_rotation },
{ MS_VIDEO_CAPTURE_SET_DEVICE_ORIENTATION, &video_set_device_rotation },
{ MS_VIDEO_CAPTURE_SET_AUTOFOCUS, &video_capture_set_autofocus },
{ 0,0 }
};
......@@ -418,6 +441,7 @@ static void video_capture_detect(MSWebCamManager *obj){
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT void JNICALL Java_org_linphone_mediastream_video_capture_AndroidVideoApi5JniWrapper_setAndroidSdkVersion
(JNIEnv *env, jclass c, jint version) {
android_sdk_version = version;
......@@ -451,7 +475,7 @@ JNIEXPORT void JNICALL Java_org_linphone_mediastream_video_capture_AndroidVideoA
}
int y_cropping_offset=0, cbcr_cropping_offset=0;
compute_cropping_offsets(d->hwCapableSize, d->requestedSize, &y_cropping_offset, &cbcr_cropping_offset);
//compute_cropping_offsets(d->hwCapableSize, d->requestedSize, &y_cropping_offset, &cbcr_cropping_offset);
int width = d->hwCapableSize.width;
int height = d->hwCapableSize.height;
......@@ -464,14 +488,15 @@ JNIEXPORT void JNICALL Java_org_linphone_mediastream_video_capture_AndroidVideoA
It only implies one thing: image needs to rotated by that amount to be correctly
displayed.
*/
mblk_t* yuv_block = copy_ycbcrbiplanar_to_true_yuv_with_rotation(y_src
mblk_t* yuv_block = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(y_src
, cbcr_src
, image_rotation_correction
, d->requestedSize.width
, d->requestedSize.height
, d->hwCapableSize.width
, d->hwCapableSize.width,
false);
false,
d->useDownscaling);
if (yuv_block) {
if (d->frame)
freemsg(d->frame);
......
......@@ -463,7 +463,7 @@ extern MSSndCardDesc aq_card_desc;
extern MSSndCardDesc pulse_card_desc;
#endif
#ifdef __MACIOUNIT_ENABLED__
#ifdef __IOSIOUNIT_ENABLED__
extern MSSndCardDesc au_card_desc;
#endif
......@@ -500,7 +500,7 @@ static MSSndCardDesc * ms_snd_card_descs[]={
&pulse_card_desc,
#endif
#ifdef __MACIOUNIT_ENABLED__
#ifdef __IOSIOUNIT_ENABLED__
&au_card_desc,
#endif
#ifdef ANDROID
......
......@@ -587,7 +587,8 @@ public:
return ret;
}
bool isTimeToSend(uint64_t ticker_time);
MSVideoSize getVSize()const{
MSVideoSize getVSize(){
if (!_ready) createDshowGraph(); /* so that _vsize is updated according to hardware capabilities*/
return _vsize;
}
void setVSize(MSVideoSize vsize){
......@@ -708,7 +709,7 @@ static char * fourcc_to_char(char *str, uint32_t fcc){
return str;
}
static int find_best_format(ComPtr<IAMStreamConfig> streamConfig, int count, MSVideoSize *requested_size, MSPixFmt requested_fmt ){
static int find_best_format(ComPtr<IAMStreamConfig> streamConfig, int count,MSVideoSize *requested_size, MSPixFmt requested_fmt ){
int i;
MSVideoSize best_found={0,0};
int best_index=-1;
......
......@@ -191,7 +191,7 @@ static int v4lv2_do_mmap(V4lState *s){
}
msg=esballoc(start,buf.length,0,NULL);
/* adjust to real size of picture*/
if (s->pix_fmt==MS_RGB24
if (s->pix_fmt==MS_RGB24)
msg->b_wptr+=s->vsize.width*s->vsize.height*3;
else
msg->b_wptr+=(s->vsize.width*s->vsize.height*3)/2;
......
......@@ -53,6 +53,18 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#endif
static void inc_ref(mblk_t*m){
m->b_datap->db_ref++;
if (m->b_cont)
inc_ref(m->b_cont);
}
static void dec_ref(mblk_t *m){
m->b_datap->db_ref--;
if (m->b_cont)
dec_ref(m->b_cont);
}
typedef struct V4l2State{
int fd;
ms_thread_t thread;
......@@ -264,7 +276,7 @@ static int msv4l2_do_mmap(V4l2State *s){
if (-1==v4l2_ioctl (s->fd, VIDIOC_QBUF, &buf)){
ms_error("VIDIOC_QBUF failed: %s",strerror(errno));
}else {
s->frames[i]->b_datap->db_ref++;
inc_ref(s->frames[i]);
s->queued++;
}
}
......@@ -305,7 +317,7 @@ static mblk_t *v4l2_dequeue_ready_buffer(V4l2State *s, int poll_timeout_ms){
ms_debug("v4l2: de-queue buf %i",buf.index);
/*decrement ref count of dequeued buffer */
ret=s->frames[buf.index];
ret->b_datap->db_ref--;
dec_ref(ret);
if (buf.index >= s->frame_max){
ms_error("buf.index>=s->max_frames !");
return NULL;
......@@ -343,7 +355,7 @@ static mblk_t * v4lv2_grab_image(V4l2State *s, int poll_timeout_ms){
else {
ms_debug("v4l2: queue buf %i",k);
/*increment ref count of queued buffer*/
s->frames[k]->b_datap->db_ref++;
inc_ref(s->frames[k]);
s->queued++;
}
}
......@@ -365,7 +377,7 @@ static void msv4l2_do_munmap(V4l2State *s){
}
for(i=0;i<s->frame_max;++i){
mblk_t *msg=s->frames[i];
mblk_t *msg=s->frames[i]->b_cont;
int len=msg->b_datap->db_lim-msg->b_datap->db_base;
if (v4l2_munmap(msg->b_datap->db_base,len)<0){
ms_warning("MSV4l2: Fail to unmap: %s",strerror(errno));
......
......@@ -395,10 +395,14 @@ static MSScalerContext *ff_create_swscale_context(int src_w, int src_h, MSPixFmt
int ff_flags=0;
MSFFScalerContext *ctx=ms_new(MSFFScalerContext,1);
ctx->src_h=src_h;
#if (TARGET_OS_IPHONE)
ff_flags|=SWS_FAST_BILINEAR;
#else
if (flags & MS_SCALER_METHOD_BILINEAR)
ff_flags|=SWS_BILINEAR;
else if (flags & MS_SCALER_METHOD_NEIGHBOUR)
ff_flags|=SWS_BILINEAR;
#endif
ctx->ctx=sws_getContext (src_w,src_h,ms_pix_fmt_to_ffmpeg (src_fmt),
dst_w,dst_h,ms_pix_fmt_to_ffmpeg (dst_fmt),ff_flags,NULL,NULL,NULL);
if (ctx->ctx==NULL){
......@@ -593,18 +597,23 @@ static void rotate_plane(int wDest, int hDest, int full_width, uint8_t* src, uin
static int hasNeon = -1;
#elif defined (__ARM_NEON__)
static int hasNeon = 1;
#else
static int hasNeon = 0;
#endif
/* Destination and source images may have their dimensions inverted.*/
mblk_t *copy_ycbcrbiplanar_to_true_yuv_with_rotation(uint8_t* y, uint8_t * cbcr, int rotation, int w, int h, int y_byte_per_row,int cbcr_byte_per_row, bool_t uFirstvSecond) {
mblk_t *copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(uint8_t* y, uint8_t * cbcr, int rotation, int w, int h, int y_byte_per_row,int cbcr_byte_per_row, bool_t uFirstvSecond, bool_t down_scale) {
MSPicture pict;
mblk_t *yuv_block = ms_yuv_buf_alloc(&pict, w, h);
#ifdef ANDROID
if (hasNeon == -1) {
hasNeon = (android_getCpuFamily() == ANDROID_CPU_FAMILY_ARM && (android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_NEON) != 0);
}
#endif
if (down_scale && !hasNeon) {
ms_error("down scaling by two requires NEON, returning empty block");
return yuv_block;
}
if (!uFirstvSecond) {
unsigned char* tmp = pict.planes[1];
......@@ -620,21 +629,31 @@ mblk_t *copy_ycbcrbiplanar_to_true_yuv_with_rotation(uint8_t* y, uint8_t * cbcr,
uint8_t* u_dest=pict.planes[1], *v_dest=pict.planes[2];
if (rotation == 0) {
// plain copy
for(i=0; i<h; i++) {
memcpy(&pict.planes[0][i*w], &y[i*y_byte_per_row], w);