androidvideo.cpp 20.7 KB
Newer Older
1 2
/*
mediastreamer2 library - modular sound and video processing and streaming
3 4 5
This is the video capture filter for Android.
It uses one of the JNI wrappers to access Android video capture API.
See:
6 7 8
	org.linphone.mediastream.video.capture.AndroidVideoApi9JniWrapper
	org.linphone.mediastream.video.capture.AndroidVideoApi8JniWrapper
	org.linphone.mediastream.video.capture.AndroidVideoApi5JniWrapper
9

10
 * Copyright (C) 2010  Belledonne Communications, Grenoble, France
11 12 13 14 15 16 17 18 19 20 21 22 23 24

This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
25
 */
26

27
extern "C" {
28 29 30
#include "mediastreamer2/msvideo.h"
#include "mediastreamer2/msfilter.h"
#include "mediastreamer2/mswebcam.h"
31
#include "mediastreamer2/msjava.h"
32 33
#include "mediastreamer2/msticker.h"
}
34

35 36 37
#include <jni.h>
#include <math.h>

38 39
static int android_sdk_version = 5;

40 41 42
static const char* AndroidApi9WrapperPath = "org/linphone/mediastream/video/capture/AndroidVideoApi9JniWrapper";
static const char* AndroidApi8WrapperPath = "org/linphone/mediastream/video/capture/AndroidVideoApi8JniWrapper";
static const char* AndroidApi5WrapperPath = "org/linphone/mediastream/video/capture/AndroidVideoApi5JniWrapper";
43
static const char* VersionPath 			  = "org/linphone/mediastream/Version";
44

45
#define UNDEFINED_ROTATION -1
46 47 48

/************************ Data structures              ************************/
// Struct holding Android's cameras properties
49 50 51 52 53
struct AndroidWebcamConfig {
	int id;
	int frontFacing;
	int orientation;
};
54 55

struct AndroidReaderContext {
56
	AndroidReaderContext(MSFilter *f, MSWebCam *cam):filter(f), webcam(cam),frame(0),fps(5){
57
		ms_message("Creating AndroidReaderContext for Android VIDEO capture filter");
58
		ms_mutex_init(&mutex,NULL);
59 60
		androidCamera = 0;
		previewWindow = 0;
61
		rotation = rotationSavedDuringVSize = UNDEFINED_ROTATION;
Simon Morlat's avatar
Simon Morlat committed
62
		allocator = ms_yuv_buf_allocator_new();
63 64 65
	};

	~AndroidReaderContext(){
66 67 68
		if (frame != 0) {
			freeb(frame);
		}
Simon Morlat's avatar
Simon Morlat committed
69
		ms_yuv_buf_allocator_free(allocator);
70
		ms_mutex_destroy(&mutex);
71 72
	};

73
	MSFrameRateController fpsControl;
74 75
	MSAverageFPS averageFps;

76
	MSFilter *filter;
77
	MSWebCam *webcam;
78

79
	mblk_t *frame;
80
	float fps;
81
	MSVideoSize requestedSize, hwCapableSize, usedSize;
82
	ms_mutex_t mutex;
83
	int rotation, rotationSavedDuringVSize;
84
	int useDownscaling;
85
	char fps_context[64];
Simon Morlat's avatar
Simon Morlat committed
86
	MSYuvBufAllocator *allocator;
87 88 89

	jobject androidCamera;
	jobject previewWindow;
90
	jclass helperClass;
91 92
};

93
/************************ Private helper methods       ************************/
94
static jclass getHelperClassGlobalRef(JNIEnv *env);
95
static int compute_image_rotation_correction(AndroidReaderContext* d, int rotation);
96
static void compute_cropping_offsets(MSVideoSize hwSize, MSVideoSize outputSize, int* yoff, int* cbcroff);
97
static AndroidReaderContext *getContext(MSFilter *f);
98

99

100
/************************ MS2 filter methods           ************************/
101 102 103 104 105
static int video_capture_set_fps(MSFilter *f, void *arg){
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
	d->fps=*((float*)arg);
	return 0;
}
106

107 108 109
static int video_capture_set_autofocus(MSFilter *f, void* data){
	JNIEnv *env = ms_get_jni_env();
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
110 111
	jmethodID method = env->GetStaticMethodID(d->helperClass,"activateAutoFocus", "(Ljava/lang/Object;)V");
	env->CallStaticObjectMethod(d->helperClass, method, d->androidCamera);
112 113 114 115
	
	return 0;
}

116 117
static int video_capture_get_fps(MSFilter *f, void *arg){
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
118
	*((float*)arg) = ms_average_fps_get(&d->averageFps);
119
	return 0;
120 121
}

122
static int video_capture_set_vsize(MSFilter *f, void* data){
123 124 125 126 127
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
	ms_mutex_lock(&d->mutex);

	d->requestedSize=*(MSVideoSize*)data;

128 129 130 131 132 133 134
	// always request landscape mode, orientation is handled later
	if (d->requestedSize.height > d->requestedSize.width) {
		int tmp = d->requestedSize.height;
		d->requestedSize.height = d->requestedSize.width;
		d->requestedSize.width = tmp;
	}

135 136
	JNIEnv *env = ms_get_jni_env();

137
	jmethodID method = env->GetStaticMethodID(d->helperClass,"selectNearestResolutionAvailable", "(III)[I");
138 139

	// find neareast hw-available resolution (using jni call);
140
	jobject resArray = env->CallStaticObjectMethod(d->helperClass, method, ((AndroidWebcamConfig*)d->webcam->data)->id, d->requestedSize.width, d->requestedSize.height);
141 142

	if (!resArray) {
143
		ms_mutex_unlock(&d->mutex);
144 145 146 147 148 149
		ms_error("Failed to retrieve camera '%d' supported resolutions\n", ((AndroidWebcamConfig*)d->webcam->data)->id);
		return -1;
	}

	// handle result :
	//   - 0 : width
150
    //   - 1 : height
151 152
    //   - 2 : useDownscaling
	jint res[3];
153
	env->GetIntArrayRegion((jintArray)resArray, 0, 3, res);
154
	ms_message("Camera selected resolution is: %dx%d (requested: %dx%d) with downscaling?%d\n", res[0], res[1], d->requestedSize.width, d->requestedSize.height, res[2]);
155 156
	d->hwCapableSize.width =  res[0];
	d->hwCapableSize.height = res[1];
157
	d->useDownscaling = res[2];
158 159 160

	int rqSize = d->requestedSize.width * d->requestedSize.height;
	int hwSize = d->hwCapableSize.width * d->hwCapableSize.height;
161
	double downscale = d->useDownscaling ? 0.5 : 1;
162 163

	// if hw supplies a smaller resolution, modify requested size accordingly
164
	if ((hwSize * downscale * downscale) < rqSize) {
165
		ms_message("Camera cannot produce requested resolution %dx%d, will supply smaller one: %dx%d\n",
166
			d->requestedSize.width, d->requestedSize.height, (int) (res[0] * downscale), (int) (res[1]*downscale));
167 168
		d->usedSize.width = (int) (d->hwCapableSize.width * downscale);
		d->usedSize.height = (int) (d->hwCapableSize.height * downscale);
169
	} else if ((hwSize * downscale * downscale) > rqSize) {
170
		ms_message("Camera cannot produce requested resolution %dx%d, will capture a bigger one (%dx%d) and crop it to match encoder requested resolution\n",
171
			d->requestedSize.width, d->requestedSize.height, (int)(res[0] * downscale), (int)(res[1] * downscale));
172 173
		d->usedSize.width = d->requestedSize.width;
		d->usedSize.height = d->requestedSize.height;
174 175 176
	} else {
		d->usedSize.width = d->requestedSize.width;
		d->usedSize.height = d->requestedSize.height;
177
	}
178
	
179
	// is phone held |_ to cam orientation ?
180 181 182 183 184 185 186 187 188
	if (d->rotation == UNDEFINED_ROTATION || compute_image_rotation_correction(d, d->rotation) % 180 != 0) {
		if (d->rotation == UNDEFINED_ROTATION) {
			ms_error("To produce a correct image, Mediastreamer MUST be aware of device's orientation BEFORE calling 'configure_video_source'\n"); 
			ms_warning("Capture filter do not know yet about device's orientation.\n"
				"Current assumption: device is held perpendicular to its webcam (ie: portrait mode for a phone)\n");
			d->rotationSavedDuringVSize = 0;
		} else {
			d->rotationSavedDuringVSize = d->rotation;
		}
189
		bool camIsLandscape = d->hwCapableSize.width > d->hwCapableSize.height;
190
		bool useIsLandscape = d->usedSize.width > d->usedSize.height;
191 192

		// if both are landscape or both portrait, swap
193 194 195 196 197
		if (camIsLandscape == useIsLandscape) {
			int t = d->usedSize.width;
			d->usedSize.width = d->usedSize.height;
			d->usedSize.height = t;
			ms_message("Swapped resolution width and height to : %dx%d\n", d->usedSize.width, d->usedSize.height);
198
		}
199 200
	} else {
		d->rotationSavedDuringVSize = d->rotation;
201
	}
202

203
	ms_mutex_unlock(&d->mutex);
204 205 206 207 208
	return 0;
}

static int video_capture_get_vsize(MSFilter *f, void* data){
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
209
	*(MSVideoSize*)data=d->usedSize;
210 211
	return 0;
}
212

213 214 215 216 217
static int video_capture_get_pix_fmt(MSFilter *f, void *data){
	*(MSPixFmt*)data=MS_YUV420P;
	return 0;
}

218 219 220
// Java will give us a pointer to capture preview surface.
static int video_set_native_preview_window(MSFilter *f, void *arg) {
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
Simon Morlat's avatar
Simon Morlat committed
221
	
222 223 224 225 226 227 228 229 230 231 232
	ms_mutex_lock(&d->mutex);

	jobject w = *((jobject*)arg);

	if (w == d->previewWindow) {
		ms_mutex_unlock(&d->mutex);
		return 0;
	}

	JNIEnv *env = ms_get_jni_env();

233
	jmethodID method = env->GetStaticMethodID(d->helperClass,"setPreviewDisplaySurface", "(Ljava/lang/Object;Ljava/lang/Object;)V");
234 235 236

	if (d->androidCamera) {
		if (d->previewWindow == 0) {
237
			ms_message("Preview capture window set for the 1st time (win: %p rotation:%d)\n", w, d->rotation);
238
		} else {
239
			ms_message("Preview capture window changed (oldwin: %p newwin: %p rotation:%d)\n", d->previewWindow, w, d->rotation);
240

241 242
			env->CallStaticVoidMethod(d->helperClass,
						env->GetStaticMethodID(d->helperClass,"stopRecording", "(Ljava/lang/Object;)V"),
243
						d->androidCamera);
244
			env->DeleteGlobalRef(d->androidCamera);
245
			d->androidCamera = env->NewGlobalRef(
246 247
			env->CallStaticObjectMethod(d->helperClass,
						env->GetStaticMethodID(d->helperClass,"startRecording", "(IIIIIJ)Ljava/lang/Object;"),
248 249 250 251 252 253
						((AndroidWebcamConfig*)d->webcam->data)->id,
						d->hwCapableSize.width,
						d->hwCapableSize.height,
						(jint)d->fps,
						(d->rotation != UNDEFINED_ROTATION) ? d->rotation:0,
						(jlong)d));
254
		}
255 256
		// if previewWindow AND camera are valid => set preview window
		if (w && d->androidCamera)
257
			env->CallStaticVoidMethod(d->helperClass, method, d->androidCamera, w);
258 259 260 261 262 263 264 265 266 267
	} else {
		ms_message("Preview capture window set but camera not created yet; remembering it for later use\n");
	}
	d->previewWindow = w;

	ms_mutex_unlock(&d->mutex);
	return 0;
}

static int video_get_native_preview_window(MSFilter *f, void *arg) {
268 269 270
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
	arg = &d->previewWindow;
	return 0;
271 272 273 274 275
}

static int video_set_device_rotation(MSFilter* f, void* arg) {
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
	d->rotation=*((int*)arg);
276
	ms_message("%s : %d\n", __FUNCTION__, d->rotation);
277 278 279
	return 0;
}

280 281 282 283 284 285
void video_capture_preprocess(MSFilter *f){
	ms_message("Preprocessing of Android VIDEO capture filter");

	AndroidReaderContext *d = getContext(f);
	ms_mutex_lock(&d->mutex);

286
	snprintf(d->fps_context, sizeof(d->fps_context), "Captured mean fps=%%f, expected=%f", d->fps);
287
	ms_video_init_framerate_controller(&d->fpsControl, d->fps);
288
	ms_video_init_average_fps(&d->averageFps, d->fps_context);
289 290 291

	JNIEnv *env = ms_get_jni_env();

292
	jmethodID method = env->GetStaticMethodID(d->helperClass,"startRecording", "(IIIIIJ)Ljava/lang/Object;");
293

294
	ms_message("Starting Android camera '%d' (rotation:%d)", ((AndroidWebcamConfig*)d->webcam->data)->id, d->rotation);
295
	jobject cam = env->CallStaticObjectMethod(d->helperClass, method,
296 297 298 299
			((AndroidWebcamConfig*)d->webcam->data)->id,
			d->hwCapableSize.width,
			d->hwCapableSize.height,
			(jint)d->fps,
300
			d->rotationSavedDuringVSize,
301 302 303 304
			(jlong)d);
	d->androidCamera = env->NewGlobalRef(cam);

	if (d->previewWindow) {
305 306
		method = env->GetStaticMethodID(d->helperClass,"setPreviewDisplaySurface", "(Ljava/lang/Object;Ljava/lang/Object;)V");
		env->CallStaticVoidMethod(d->helperClass, method, d->androidCamera, d->previewWindow);
307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333
	}
	ms_message("Preprocessing of Android VIDEO capture filter done");
	ms_mutex_unlock(&d->mutex);
}

static void video_capture_process(MSFilter *f){
	AndroidReaderContext* d = getContext(f);

	ms_mutex_lock(&d->mutex);

	// If frame not ready, return
	if (d->frame == 0) {
		ms_mutex_unlock(&d->mutex);
		return;
	}

	ms_video_update_average_fps(&d->averageFps, f->ticker->time);

	ms_queue_put(f->outputs[0],d->frame);
	d->frame = 0;
	ms_mutex_unlock(&d->mutex);
}

static void video_capture_postprocess(MSFilter *f){
	ms_message("Postprocessing of Android VIDEO capture filter");
	AndroidReaderContext* d = getContext(f);
	JNIEnv *env = ms_get_jni_env();
Simon Morlat's avatar
Simon Morlat committed
334 335 336
	
	ms_mutex_lock(&d->mutex);
	
337
	if (d->androidCamera) {
338
		jmethodID method = env->GetStaticMethodID(d->helperClass,"stopRecording", "(Ljava/lang/Object;)V");
339

340
		env->CallStaticVoidMethod(d->helperClass, method, d->androidCamera);
341 342 343 344
		env->DeleteGlobalRef(d->androidCamera);
	}
	d->androidCamera = 0;
	d->previewWindow = 0;
345 346 347 348
	if (d->frame){
		freemsg(d->frame);
		d->frame=NULL;
	}
349 350 351
	ms_mutex_unlock(&d->mutex);
}

352 353 354 355 356 357 358
static void video_capture_init(MSFilter *f) {
	AndroidReaderContext* d = new AndroidReaderContext(f, 0);
	ms_message("Init of Android VIDEO capture filter (%p)", d);
	JNIEnv *env = ms_get_jni_env();
	d->helperClass = getHelperClassGlobalRef(env);
	f->data = d;
}
359 360 361 362

static void video_capture_uninit(MSFilter *f) {
	ms_message("Uninit of Android VIDEO capture filter");
	AndroidReaderContext* d = getContext(f);
363 364
	JNIEnv *env = ms_get_jni_env();
	env->DeleteGlobalRef(d->helperClass);
365 366 367
	delete d;
}

368
static MSFilterMethod video_capture_methods[]={
369 370 371 372 373
		{	MS_FILTER_SET_FPS,	&video_capture_set_fps},
		{	MS_FILTER_GET_FPS,	&video_capture_get_fps},
		{	MS_FILTER_SET_VIDEO_SIZE, &video_capture_set_vsize},
		{	MS_FILTER_GET_VIDEO_SIZE, &video_capture_get_vsize},
		{	MS_FILTER_GET_PIX_FMT, &video_capture_get_pix_fmt},
374 375
		{	MS_VIDEO_DISPLAY_SET_NATIVE_WINDOW_ID , &video_set_native_preview_window },//preview is managed by capture filter
		{	MS_VIDEO_DISPLAY_GET_NATIVE_WINDOW_ID , &video_get_native_preview_window },
376 377
		{   MS_VIDEO_CAPTURE_SET_DEVICE_ORIENTATION, &video_set_device_rotation },
		{   MS_VIDEO_CAPTURE_SET_AUTOFOCUS, &video_capture_set_autofocus },
378
		{	0,0 }
379 380 381
};

MSFilterDesc ms_video_capture_desc={
382 383
		MS_ANDROID_VIDEO_READ_ID,
		"MSAndroidVideoCapture",
384
		N_("A filter that captures Android video."),
385 386 387 388
		MS_FILTER_OTHER,
		NULL,
		0,
		1,
389
		video_capture_init,
390 391 392
		video_capture_preprocess,
		video_capture_process,
		video_capture_postprocess,
393
		video_capture_uninit,
394
		video_capture_methods
395 396 397 398
};

MS_FILTER_DESC_EXPORT(ms_video_capture_desc)

399
/* Webcam methods */
400 401 402 403
static void video_capture_detect(MSWebCamManager *obj);
static void video_capture_cam_init(MSWebCam *cam){
	ms_message("Android VIDEO capture filter cam init");
}
404 405

static MSFilter *video_capture_create_reader(MSWebCam *obj){
406
	ms_message("Instanciating Android VIDEO capture MS filter");
407 408

	MSFilter* lFilter = ms_filter_new_from_desc(&ms_video_capture_desc);
409 410
	getContext(lFilter)->webcam = obj;
	
411
	return lFilter;
412 413
}

414
MSWebCamDesc ms_android_video_capture_desc={
415 416 417 418 419
		"AndroidVideoCapture",
		&video_capture_detect,
		&video_capture_cam_init,
		&video_capture_create_reader,
		NULL
420 421 422
};

static void video_capture_detect(MSWebCamManager *obj){
423
	ms_message("Detecting Android VIDEO cards");
424
	JNIEnv *env = ms_get_jni_env();
425
	jclass helperClass = getHelperClassGlobalRef(env);
426 427
	
	if (helperClass==NULL) return;
428

429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446
	// create 3 int arrays - assuming 2 webcams at most
	jintArray indexes = (jintArray)env->NewIntArray(2);
	jintArray frontFacing = (jintArray)env->NewIntArray(2);
	jintArray orientation = (jintArray)env->NewIntArray(2);

	jmethodID method = env->GetStaticMethodID(helperClass,"detectCameras", "([I[I[I)I");

	int count = env->CallStaticIntMethod(helperClass, method, indexes, frontFacing, orientation);

	ms_message("%d cards detected", count);
	for(int i=0; i<count; i++) {
		MSWebCam *cam = ms_web_cam_new(&ms_android_video_capture_desc);
		AndroidWebcamConfig* c = new AndroidWebcamConfig();
		env->GetIntArrayRegion(indexes, i, 1, &c->id);
		env->GetIntArrayRegion(frontFacing, i, 1, &c->frontFacing);
		env->GetIntArrayRegion(orientation, i, 1, &c->orientation);
		cam->data = c;
		cam->name = ms_strdup("Android video name");
447
		char* idstring = (char*) ms_malloc(15);
448 449 450 451 452
		snprintf(idstring, 15, "Android%d", c->id);
		cam->id = idstring;
		ms_web_cam_manager_add_cam(obj,cam);
		ms_message("camera created: id=%d frontFacing=%d orientation=%d [msid:%s]\n", c->id, c->frontFacing, c->orientation, idstring);
	}
jehan's avatar
jehan committed
453 454 455
	env->DeleteLocalRef(indexes);
	env->DeleteLocalRef(frontFacing);
	env->DeleteLocalRef(orientation);
456

457
	env->DeleteGlobalRef(helperClass);
458 459
	ms_message("Detection of Android VIDEO cards done");
}
460

461

462

463
/************************ JNI methods                  ************************/
464 465 466
#ifdef __cplusplus
extern "C" {
#endif
467

468
JNIEXPORT void JNICALL Java_org_linphone_mediastream_video_capture_AndroidVideoApi5JniWrapper_putImage(JNIEnv*  env,
469
		jclass  thiz,jlong nativePtr,jbyteArray frame) {
470
	AndroidReaderContext* d = (AndroidReaderContext*) nativePtr;
Simon Morlat's avatar
Simon Morlat committed
471
	
472
	ms_mutex_lock(&d->mutex);
Simon Morlat's avatar
Simon Morlat committed
473 474 475 476 477
	
	if (!d->androidCamera){
		ms_mutex_unlock(&d->mutex);
		return;
	}
478 479 480 481 482 483

	if (!ms_video_capture_new_frame(&d->fpsControl,d->filter->ticker->time)) {
		ms_mutex_unlock(&d->mutex);
		return;
	}

484 485 486 487
	if (d->rotation != UNDEFINED_ROTATION && d->rotationSavedDuringVSize != d->rotation) {
		ms_warning("Rotation has changed (new value: %d) since vsize was run (old value: %d)."
					"Will produce inverted images. Use set_device_orientation() then update call.\n",
			d->rotation, d->rotationSavedDuringVSize);
488 489
	}

490
	int image_rotation_correction = compute_image_rotation_correction(d, d->rotationSavedDuringVSize);
491

492
	jboolean isCopied;
493
	jbyte* jinternal_buff = env->GetByteArrayElements(frame, &isCopied);
494 495
	if (isCopied) {
		ms_warning("The video frame received from Java has been copied");
496 497
	}

498
	int y_cropping_offset=0, cbcr_cropping_offset=0;
499 500 501 502 503
	MSVideoSize targetSize;
	d->useDownscaling?targetSize.width=d->requestedSize.width*2:targetSize.width=d->requestedSize.width;
	d->useDownscaling?targetSize.height=d->requestedSize.height*2:targetSize.height=d->requestedSize.height;

	compute_cropping_offsets(d->hwCapableSize, targetSize, &y_cropping_offset, &cbcr_cropping_offset);
504 505 506 507

	int width = d->hwCapableSize.width;
	int height = d->hwCapableSize.height;

Simon Morlat's avatar
Simon Morlat committed
508 509
	uint8_t* y_src = (uint8_t*)(jinternal_buff + y_cropping_offset);
	uint8_t* cbcr_src = (uint8_t*) (jinternal_buff + width * height + cbcr_cropping_offset);
510

511

512 513 514 515 516
	/* Warning note: image_rotation_correction == 90 does not imply portrait mode !
	   (incorrect function naming).
	   It only implies one thing: image needs to rotated by that amount to be correctly
	   displayed.
	*/
Simon Morlat's avatar
Simon Morlat committed
517
 	mblk_t* yuv_block = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(d->allocator, y_src
518 519
														, cbcr_src
														, image_rotation_correction
520 521
														, d->usedSize.width
														, d->usedSize.height
522
														, d->hwCapableSize.width
523 524 525
														, d->hwCapableSize.width
														, false
														, d->useDownscaling);
526 527 528 529
	if (yuv_block) {
		if (d->frame)
			freemsg(d->frame);
		d->frame = yuv_block;
530
	}
531
	ms_mutex_unlock(&d->mutex);
532 533

	// JNI_ABORT free the buffer without copying back the possible changes
534
	env->ReleaseByteArrayElements(frame, jinternal_buff, JNI_ABORT);
535 536
}

537 538 539
#ifdef __cplusplus
}
#endif
540

541
static int compute_image_rotation_correction(AndroidReaderContext* d, int rotation) {
542 543 544 545
	AndroidWebcamConfig* conf = (AndroidWebcamConfig*)(AndroidWebcamConfig*)d->webcam->data;

	int result;
	if (conf->frontFacing) {
546 547
		ms_debug("%s: %d + %d\n", __FUNCTION__, ((AndroidWebcamConfig*)d->webcam->data)->orientation, rotation);
	 	result = ((AndroidWebcamConfig*)d->webcam->data)->orientation + rotation;
548
	} else {
549 550
		ms_debug("%s: %d - %d\n", __FUNCTION__, ((AndroidWebcamConfig*)d->webcam->data)->orientation, rotation);
	 	result = ((AndroidWebcamConfig*)d->webcam->data)->orientation - rotation;
551 552 553 554 555 556
	}
	while(result < 0)
		result += 360;
	return result % 360;
}

557

558
static void compute_cropping_offsets(MSVideoSize hwSize, MSVideoSize outputSize, int* yoff, int* cbcroff) {
559
	// if hw <= out -> return
560 561 562 563 564 565 566 567 568 569 570 571
	if (hwSize.width * hwSize.height <= outputSize.width * outputSize.height) {
		*yoff = 0;
		*cbcroff = 0;
		return;
	}

	int halfDiffW = (hwSize.width - ((outputSize.width>outputSize.height)?outputSize.width:outputSize.height)) / 2;
	int halfDiffH = (hwSize.height - ((outputSize.width<outputSize.height)?outputSize.width:outputSize.height)) / 2;

	*yoff = hwSize.width * halfDiffH + halfDiffW;
	*cbcroff = hwSize.width * halfDiffH * 0.5 + halfDiffW;
}
572

573

574 575
static jclass getHelperClassGlobalRef(JNIEnv *env) {
	ms_message("getHelperClassGlobalRef (env: %p)", env);
jehan's avatar
jehan committed
576
	const char* className;
577
	// FindClass only returns local references.
jehan's avatar
jehan committed
578
	
579 580 581 582 583 584 585
	// Find the current Android SDK version
	jclass version = env->FindClass(VersionPath);
	jmethodID method = env->GetStaticMethodID(version,"sdk", "()I");
	android_sdk_version = env->CallStaticIntMethod(version, method);
	ms_message("Android SDK version found is %i", android_sdk_version);
	env->DeleteLocalRef(version);

586
	if (android_sdk_version >= 9) {
jehan's avatar
jehan committed
587
		className = AndroidApi9WrapperPath;
588
	} else if (android_sdk_version >= 8) {
jehan's avatar
jehan committed
589 590 591 592 593 594 595 596
		className = AndroidApi8WrapperPath;
	} else {
		className = AndroidApi5WrapperPath;
	}
	jclass c = env->FindClass(className);
	if (c == 0) {
		ms_error("Could not load class '%s' (%d)", className, android_sdk_version);
		return NULL;
597
	} else {
jehan's avatar
jehan committed
598 599 600
		jclass globalRef = reinterpret_cast<jclass>(env->NewGlobalRef(c));
		env->DeleteLocalRef(c);
		return globalRef;
601
	}
602 603 604 605 606
}

static AndroidReaderContext *getContext(MSFilter *f) {
	return (AndroidReaderContext*) f->data;
}