androidvideo.cpp 21 KB
Newer Older
1 2
/*
mediastreamer2 library - modular sound and video processing and streaming
3 4 5
This is the video capture filter for Android.
It uses one of the JNI wrappers to access Android video capture API.
See:
6 7 8
	org.linphone.mediastream.video.capture.AndroidVideoApi9JniWrapper
	org.linphone.mediastream.video.capture.AndroidVideoApi8JniWrapper
	org.linphone.mediastream.video.capture.AndroidVideoApi5JniWrapper
9

10
 * Copyright (C) 2010  Belledonne Communications, Grenoble, France
11 12 13 14 15 16 17 18 19 20 21 22 23

This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
24
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
25
 */
26 27 28 29

#include "mediastreamer2/msvideo.h"
#include "mediastreamer2/msfilter.h"
#include "mediastreamer2/mswebcam.h"
30
#include "mediastreamer2/msjava.h"
31
#include "mediastreamer2/msticker.h"
32

33 34 35
#include <jni.h>
#include <math.h>

36 37
static int android_sdk_version = 5;

38 39 40
static const char* AndroidApi9WrapperPath = "org/linphone/mediastream/video/capture/AndroidVideoApi9JniWrapper";
static const char* AndroidApi8WrapperPath = "org/linphone/mediastream/video/capture/AndroidVideoApi8JniWrapper";
static const char* AndroidApi5WrapperPath = "org/linphone/mediastream/video/capture/AndroidVideoApi5JniWrapper";
41
static const char* VersionPath 			  = "org/linphone/mediastream/Version";
42

43
#define UNDEFINED_ROTATION -1
44 45 46

/************************ Data structures              ************************/
// Struct holding Android's cameras properties
47 48 49 50 51
struct AndroidWebcamConfig {
	int id;
	int frontFacing;
	int orientation;
};
52 53

struct AndroidReaderContext {
54
	AndroidReaderContext(MSFilter *f, MSWebCam *cam):filter(f), webcam(cam),frame(0),fps(5){
55
		ms_message("Creating AndroidReaderContext for Android VIDEO capture filter");
56
		ms_mutex_init(&mutex,NULL);
57 58
		androidCamera = 0;
		previewWindow = 0;
59
		rotation = rotationSavedDuringVSize = UNDEFINED_ROTATION;
Simon Morlat's avatar
Simon Morlat committed
60
		allocator = ms_yuv_buf_allocator_new();
61
		snprintf(fps_context, sizeof(fps_context), "Captured mean fps=%%f");
62 63 64
	};

	~AndroidReaderContext(){
65 66 67
		if (frame != 0) {
			freeb(frame);
		}
Simon Morlat's avatar
Simon Morlat committed
68
		ms_yuv_buf_allocator_free(allocator);
69
		ms_mutex_destroy(&mutex);
70 71
	};

72
	MSFrameRateController fpsControl;
73 74
	MSAverageFPS averageFps;

75
	MSFilter *filter;
76
	MSWebCam *webcam;
77

78
	mblk_t *frame;
79
	float fps;
80
	MSVideoSize requestedSize, hwCapableSize, usedSize;
81
	ms_mutex_t mutex;
82
	int rotation, rotationSavedDuringVSize;
83
	int useDownscaling;
84
	char fps_context[64];
Simon Morlat's avatar
Simon Morlat committed
85
	MSYuvBufAllocator *allocator;
86 87 88

	jobject androidCamera;
	jobject previewWindow;
89
	jclass helperClass;
90 91
};

92
/************************ Private helper methods       ************************/
93
static jclass getHelperClassGlobalRef(JNIEnv *env);
94
static int compute_image_rotation_correction(AndroidReaderContext* d, int rotation);
95
static void compute_cropping_offsets(MSVideoSize hwSize, MSVideoSize outputSize, int* yoff, int* cbcroff);
96
static AndroidReaderContext *getContext(MSFilter *f);
97

98

99
/************************ MS2 filter methods           ************************/
100 101 102
static int video_capture_set_fps(MSFilter *f, void *arg){
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
	d->fps=*((float*)arg);
103
	snprintf(d->fps_context, sizeof(d->fps_context), "Captured mean fps=%%f, expected=%f", d->fps);
104 105
	ms_video_init_framerate_controller(&d->fpsControl, d->fps);
	ms_video_init_average_fps(&d->averageFps, d->fps_context);
106 107
	return 0;
}
108

109 110 111
static int video_capture_set_autofocus(MSFilter *f, void* data){
	JNIEnv *env = ms_get_jni_env();
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
112 113
	jmethodID method = env->GetStaticMethodID(d->helperClass,"activateAutoFocus", "(Ljava/lang/Object;)V");
	env->CallStaticObjectMethod(d->helperClass, method, d->androidCamera);
114

115 116 117
	return 0;
}

118 119
static int video_capture_get_fps(MSFilter *f, void *arg){
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
120
	*((float*)arg) = ms_average_fps_get(&d->averageFps);
121
	return 0;
122 123
}

124
static int video_capture_set_vsize(MSFilter *f, void* data){
125 126 127 128 129
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
	ms_mutex_lock(&d->mutex);

	d->requestedSize=*(MSVideoSize*)data;

130 131 132 133 134 135 136
	// always request landscape mode, orientation is handled later
	if (d->requestedSize.height > d->requestedSize.width) {
		int tmp = d->requestedSize.height;
		d->requestedSize.height = d->requestedSize.width;
		d->requestedSize.width = tmp;
	}

137 138
	JNIEnv *env = ms_get_jni_env();

139
	jmethodID method = env->GetStaticMethodID(d->helperClass,"selectNearestResolutionAvailable", "(III)[I");
140 141

	// find neareast hw-available resolution (using jni call);
142
	jobject resArray = env->CallStaticObjectMethod(d->helperClass, method, ((AndroidWebcamConfig*)d->webcam->data)->id, d->requestedSize.width, d->requestedSize.height);
143 144

	if (!resArray) {
145
		ms_mutex_unlock(&d->mutex);
146 147 148 149 150 151
		ms_error("Failed to retrieve camera '%d' supported resolutions\n", ((AndroidWebcamConfig*)d->webcam->data)->id);
		return -1;
	}

	// handle result :
	//   - 0 : width
152 153
	//   - 1 : height
	//   - 2 : useDownscaling
154
	jint res[3];
155
	env->GetIntArrayRegion((jintArray)resArray, 0, 3, res);
156
	ms_message("Camera selected resolution is: %dx%d (requested: %dx%d) with downscaling?%d\n", res[0], res[1], d->requestedSize.width, d->requestedSize.height, res[2]);
157 158
	d->hwCapableSize.width =  res[0];
	d->hwCapableSize.height = res[1];
159
	d->useDownscaling = res[2];
160 161 162

	int rqSize = d->requestedSize.width * d->requestedSize.height;
	int hwSize = d->hwCapableSize.width * d->hwCapableSize.height;
163
	double downscale = d->useDownscaling ? 0.5 : 1;
164 165

	// if hw supplies a smaller resolution, modify requested size accordingly
166
	if ((hwSize * downscale * downscale) < rqSize) {
167
		ms_message("Camera cannot produce requested resolution %dx%d, will supply smaller one: %dx%d\n",
168
			d->requestedSize.width, d->requestedSize.height, (int) (res[0] * downscale), (int) (res[1]*downscale));
169 170
		d->usedSize.width = (int) (d->hwCapableSize.width * downscale);
		d->usedSize.height = (int) (d->hwCapableSize.height * downscale);
171
	} else if ((hwSize * downscale * downscale) > rqSize) {
172
		ms_message("Camera cannot produce requested resolution %dx%d, will capture a bigger one (%dx%d) and crop it to match encoder requested resolution\n",
173
			d->requestedSize.width, d->requestedSize.height, (int)(res[0] * downscale), (int)(res[1] * downscale));
174 175
		d->usedSize.width = d->requestedSize.width;
		d->usedSize.height = d->requestedSize.height;
176 177 178
	} else {
		d->usedSize.width = d->requestedSize.width;
		d->usedSize.height = d->requestedSize.height;
179
	}
180

181
	// is phone held |_ to cam orientation ?
182 183
	if (d->rotation == UNDEFINED_ROTATION || compute_image_rotation_correction(d, d->rotation) % 180 != 0) {
		if (d->rotation == UNDEFINED_ROTATION) {
184
			ms_error("To produce a correct image, Mediastreamer MUST be aware of device's orientation BEFORE calling 'configure_video_source'\n");
185 186 187 188 189 190
			ms_warning("Capture filter do not know yet about device's orientation.\n"
				"Current assumption: device is held perpendicular to its webcam (ie: portrait mode for a phone)\n");
			d->rotationSavedDuringVSize = 0;
		} else {
			d->rotationSavedDuringVSize = d->rotation;
		}
191
		bool camIsLandscape = d->hwCapableSize.width > d->hwCapableSize.height;
192
		bool useIsLandscape = d->usedSize.width > d->usedSize.height;
193 194

		// if both are landscape or both portrait, swap
195 196 197 198 199
		if (camIsLandscape == useIsLandscape) {
			int t = d->usedSize.width;
			d->usedSize.width = d->usedSize.height;
			d->usedSize.height = t;
			ms_message("Swapped resolution width and height to : %dx%d\n", d->usedSize.width, d->usedSize.height);
200
		}
201 202
	} else {
		d->rotationSavedDuringVSize = d->rotation;
203
	}
204

205
	ms_mutex_unlock(&d->mutex);
206 207 208 209 210
	return 0;
}

static int video_capture_get_vsize(MSFilter *f, void* data){
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
211
	*(MSVideoSize*)data=d->usedSize;
212 213
	return 0;
}
214

215 216 217 218 219
static int video_capture_get_pix_fmt(MSFilter *f, void *data){
	*(MSPixFmt*)data=MS_YUV420P;
	return 0;
}

220 221 222
// Java will give us a pointer to capture preview surface.
static int video_set_native_preview_window(MSFilter *f, void *arg) {
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
223

224 225
	ms_mutex_lock(&d->mutex);

226
	jobject w = (jobject)*((unsigned long*)arg);
227 228 229 230 231 232 233 234

	if (w == d->previewWindow) {
		ms_mutex_unlock(&d->mutex);
		return 0;
	}

	JNIEnv *env = ms_get_jni_env();

235
	jmethodID method = env->GetStaticMethodID(d->helperClass,"setPreviewDisplaySurface", "(Ljava/lang/Object;Ljava/lang/Object;)V");
236 237 238

	if (d->androidCamera) {
		if (d->previewWindow == 0) {
239
			ms_message("Preview capture window set for the 1st time (win: %p rotation:%d)\n", w, d->rotation);
240
		} else {
241
			ms_message("Preview capture window changed (oldwin: %p newwin: %p rotation:%d)\n", d->previewWindow, w, d->rotation);
242

243 244
			env->CallStaticVoidMethod(d->helperClass,
						env->GetStaticMethodID(d->helperClass,"stopRecording", "(Ljava/lang/Object;)V"),
245
						d->androidCamera);
246
			env->DeleteGlobalRef(d->androidCamera);
247
			d->androidCamera = env->NewGlobalRef(
248 249
			env->CallStaticObjectMethod(d->helperClass,
						env->GetStaticMethodID(d->helperClass,"startRecording", "(IIIIIJ)Ljava/lang/Object;"),
250 251 252
						((AndroidWebcamConfig*)d->webcam->data)->id,
						d->hwCapableSize.width,
						d->hwCapableSize.height,
253
						(jint)30,
254 255
						(d->rotation != UNDEFINED_ROTATION) ? d->rotation:0,
						(jlong)d));
256
		}
257 258
		// if previewWindow AND camera are valid => set preview window
		if (w && d->androidCamera)
259
			env->CallStaticVoidMethod(d->helperClass, method, d->androidCamera, w);
260 261 262 263 264 265 266 267 268 269
	} else {
		ms_message("Preview capture window set but camera not created yet; remembering it for later use\n");
	}
	d->previewWindow = w;

	ms_mutex_unlock(&d->mutex);
	return 0;
}

static int video_get_native_preview_window(MSFilter *f, void *arg) {
270
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
271
	*((unsigned long *)arg) = (unsigned long)d->previewWindow;
272
	return 0;
273 274 275 276 277
}

static int video_set_device_rotation(MSFilter* f, void* arg) {
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
	d->rotation=*((int*)arg);
278
	ms_message("%s : %d\n", __FUNCTION__, d->rotation);
279 280 281
	return 0;
}

282 283 284 285 286 287 288
void video_capture_preprocess(MSFilter *f){
	ms_message("Preprocessing of Android VIDEO capture filter");

	AndroidReaderContext *d = getContext(f);
	ms_mutex_lock(&d->mutex);

	ms_video_init_framerate_controller(&d->fpsControl, d->fps);
289
	ms_video_init_average_fps(&d->averageFps, d->fps_context);
290 291 292

	JNIEnv *env = ms_get_jni_env();

293
	jmethodID method = env->GetStaticMethodID(d->helperClass,"startRecording", "(IIIIIJ)Ljava/lang/Object;");
294

295
	ms_message("Starting Android camera '%d' (rotation:%d)", ((AndroidWebcamConfig*)d->webcam->data)->id, d->rotation);
296
	jobject cam = env->CallStaticObjectMethod(d->helperClass, method,
297 298 299
			((AndroidWebcamConfig*)d->webcam->data)->id,
			d->hwCapableSize.width,
			d->hwCapableSize.height,
300
			(jint)30,
301
			d->rotationSavedDuringVSize,
302 303 304 305
			(jlong)d);
	d->androidCamera = env->NewGlobalRef(cam);

	if (d->previewWindow) {
306 307
		method = env->GetStaticMethodID(d->helperClass,"setPreviewDisplaySurface", "(Ljava/lang/Object;Ljava/lang/Object;)V");
		env->CallStaticVoidMethod(d->helperClass, method, d->androidCamera, d->previewWindow);
308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334
	}
	ms_message("Preprocessing of Android VIDEO capture filter done");
	ms_mutex_unlock(&d->mutex);
}

static void video_capture_process(MSFilter *f){
	AndroidReaderContext* d = getContext(f);

	ms_mutex_lock(&d->mutex);

	// If frame not ready, return
	if (d->frame == 0) {
		ms_mutex_unlock(&d->mutex);
		return;
	}

	ms_video_update_average_fps(&d->averageFps, f->ticker->time);

	ms_queue_put(f->outputs[0],d->frame);
	d->frame = 0;
	ms_mutex_unlock(&d->mutex);
}

static void video_capture_postprocess(MSFilter *f){
	ms_message("Postprocessing of Android VIDEO capture filter");
	AndroidReaderContext* d = getContext(f);
	JNIEnv *env = ms_get_jni_env();
335

Simon Morlat's avatar
Simon Morlat committed
336
	ms_mutex_lock(&d->mutex);
337

338
	if (d->androidCamera) {
339
		jmethodID method = env->GetStaticMethodID(d->helperClass,"stopRecording", "(Ljava/lang/Object;)V");
340

341
		env->CallStaticVoidMethod(d->helperClass, method, d->androidCamera);
342 343 344 345
		env->DeleteGlobalRef(d->androidCamera);
	}
	d->androidCamera = 0;
	d->previewWindow = 0;
346 347 348 349
	if (d->frame){
		freemsg(d->frame);
		d->frame=NULL;
	}
350 351 352
	ms_mutex_unlock(&d->mutex);
}

353 354 355 356 357 358 359
static void video_capture_init(MSFilter *f) {
	AndroidReaderContext* d = new AndroidReaderContext(f, 0);
	ms_message("Init of Android VIDEO capture filter (%p)", d);
	JNIEnv *env = ms_get_jni_env();
	d->helperClass = getHelperClassGlobalRef(env);
	f->data = d;
}
360 361 362 363

static void video_capture_uninit(MSFilter *f) {
	ms_message("Uninit of Android VIDEO capture filter");
	AndroidReaderContext* d = getContext(f);
364 365
	JNIEnv *env = ms_get_jni_env();
	env->DeleteGlobalRef(d->helperClass);
366 367 368
	delete d;
}

369
static MSFilterMethod video_capture_methods[]={
370 371 372 373 374
		{	MS_FILTER_SET_FPS,	&video_capture_set_fps},
		{	MS_FILTER_GET_FPS,	&video_capture_get_fps},
		{	MS_FILTER_SET_VIDEO_SIZE, &video_capture_set_vsize},
		{	MS_FILTER_GET_VIDEO_SIZE, &video_capture_get_vsize},
		{	MS_FILTER_GET_PIX_FMT, &video_capture_get_pix_fmt},
375 376
		{	MS_VIDEO_DISPLAY_SET_NATIVE_WINDOW_ID , &video_set_native_preview_window },//preview is managed by capture filter
		{	MS_VIDEO_DISPLAY_GET_NATIVE_WINDOW_ID , &video_get_native_preview_window },
377 378
		{   MS_VIDEO_CAPTURE_SET_DEVICE_ORIENTATION, &video_set_device_rotation },
		{   MS_VIDEO_CAPTURE_SET_AUTOFOCUS, &video_capture_set_autofocus },
379
		{	0,0 }
380 381 382
};

MSFilterDesc ms_video_capture_desc={
383 384
		MS_ANDROID_VIDEO_READ_ID,
		"MSAndroidVideoCapture",
385
		N_("A filter that captures Android video."),
386 387 388 389
		MS_FILTER_OTHER,
		NULL,
		0,
		1,
390
		video_capture_init,
391 392 393
		video_capture_preprocess,
		video_capture_process,
		video_capture_postprocess,
394
		video_capture_uninit,
395
		video_capture_methods
396 397 398 399
};

MS_FILTER_DESC_EXPORT(ms_video_capture_desc)

400
/* Webcam methods */
401 402 403 404
static void video_capture_detect(MSWebCamManager *obj);
static void video_capture_cam_init(MSWebCam *cam){
	ms_message("Android VIDEO capture filter cam init");
}
405 406

static MSFilter *video_capture_create_reader(MSWebCam *obj){
407
	ms_message("Instanciating Android VIDEO capture MS filter");
408

409
	MSFilter* lFilter = ms_factory_create_filter_from_desc(ms_web_cam_get_factory(obj), &ms_video_capture_desc);
410
	getContext(lFilter)->webcam = obj;
411

412
	return lFilter;
413 414
}

415
MSWebCamDesc ms_android_video_capture_desc={
416 417 418 419 420
		"AndroidVideoCapture",
		&video_capture_detect,
		&video_capture_cam_init,
		&video_capture_create_reader,
		NULL
421 422 423
};

static void video_capture_detect(MSWebCamManager *obj){
424
	ms_message("Detecting Android VIDEO cards");
425
	JNIEnv *env = ms_get_jni_env();
426
	jclass helperClass = getHelperClassGlobalRef(env);
427

428
	if (helperClass == NULL) return;
429

430 431
	jmethodID countMethod = env->GetStaticMethodID(helperClass,"detectCamerasCount", "()I");
	int count = env->CallStaticIntMethod(helperClass, countMethod);
432

433 434 435
	jintArray indexes = (jintArray)env->NewIntArray(count);
	jintArray frontFacing = (jintArray)env->NewIntArray(count);
	jintArray orientation = (jintArray)env->NewIntArray(count);
436

437 438
	jmethodID method = env->GetStaticMethodID(helperClass,"detectCameras", "([I[I[I)I");
	env->CallStaticIntMethod(helperClass, method, indexes, frontFacing, orientation);
439 440 441 442 443 444 445 446 447 448

	ms_message("%d cards detected", count);
	for(int i=0; i<count; i++) {
		MSWebCam *cam = ms_web_cam_new(&ms_android_video_capture_desc);
		AndroidWebcamConfig* c = new AndroidWebcamConfig();
		env->GetIntArrayRegion(indexes, i, 1, &c->id);
		env->GetIntArrayRegion(frontFacing, i, 1, &c->frontFacing);
		env->GetIntArrayRegion(orientation, i, 1, &c->orientation);
		cam->data = c;
		cam->name = ms_strdup("Android video name");
449
		char* idstring = (char*) ms_malloc(15);
450 451 452 453 454
		snprintf(idstring, 15, "Android%d", c->id);
		cam->id = idstring;
		ms_web_cam_manager_add_cam(obj,cam);
		ms_message("camera created: id=%d frontFacing=%d orientation=%d [msid:%s]\n", c->id, c->frontFacing, c->orientation, idstring);
	}
jehan's avatar
jehan committed
455 456 457
	env->DeleteLocalRef(indexes);
	env->DeleteLocalRef(frontFacing);
	env->DeleteLocalRef(orientation);
458

459
	env->DeleteGlobalRef(helperClass);
460 461
	ms_message("Detection of Android VIDEO cards done");
}
462

463

464

465
/************************ JNI methods                  ************************/
466 467 468
#ifdef __cplusplus
extern "C" {
#endif
469

470
JNIEXPORT void JNICALL Java_org_linphone_mediastream_video_capture_AndroidVideoApi5JniWrapper_putImage(JNIEnv*  env,
471
		jclass  thiz,jlong nativePtr,jbyteArray frame) {
472
	AndroidReaderContext* d = (AndroidReaderContext*) nativePtr;
473

474
	ms_mutex_lock(&d->mutex);
475

Simon Morlat's avatar
Simon Morlat committed
476 477 478 479
	if (!d->androidCamera){
		ms_mutex_unlock(&d->mutex);
		return;
	}
480 481 482 483 484 485

	if (!ms_video_capture_new_frame(&d->fpsControl,d->filter->ticker->time)) {
		ms_mutex_unlock(&d->mutex);
		return;
	}

486 487 488 489
	if (d->rotation != UNDEFINED_ROTATION && d->rotationSavedDuringVSize != d->rotation) {
		ms_warning("Rotation has changed (new value: %d) since vsize was run (old value: %d)."
					"Will produce inverted images. Use set_device_orientation() then update call.\n",
			d->rotation, d->rotationSavedDuringVSize);
490 491
	}

492
	int image_rotation_correction = compute_image_rotation_correction(d, d->rotationSavedDuringVSize);
493

494
	jboolean isCopied;
495
	jbyte* jinternal_buff = env->GetByteArrayElements(frame, &isCopied);
496 497
	if (isCopied) {
		ms_warning("The video frame received from Java has been copied");
498 499
	}

500
	int y_cropping_offset=0, cbcr_cropping_offset=0;
501 502 503 504 505
	MSVideoSize targetSize;
	d->useDownscaling?targetSize.width=d->requestedSize.width*2:targetSize.width=d->requestedSize.width;
	d->useDownscaling?targetSize.height=d->requestedSize.height*2:targetSize.height=d->requestedSize.height;

	compute_cropping_offsets(d->hwCapableSize, targetSize, &y_cropping_offset, &cbcr_cropping_offset);
506 507 508 509

	int width = d->hwCapableSize.width;
	int height = d->hwCapableSize.height;

Simon Morlat's avatar
Simon Morlat committed
510 511
	uint8_t* y_src = (uint8_t*)(jinternal_buff + y_cropping_offset);
	uint8_t* cbcr_src = (uint8_t*) (jinternal_buff + width * height + cbcr_cropping_offset);
512

513

514 515 516 517 518
	/* Warning note: image_rotation_correction == 90 does not imply portrait mode !
	   (incorrect function naming).
	   It only implies one thing: image needs to rotated by that amount to be correctly
	   displayed.
	*/
Simon Morlat's avatar
Simon Morlat committed
519
 	mblk_t* yuv_block = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(d->allocator, y_src
520 521
														, cbcr_src
														, image_rotation_correction
522 523
														, d->usedSize.width
														, d->usedSize.height
524
														, d->hwCapableSize.width
525 526 527
														, d->hwCapableSize.width
														, false
														, d->useDownscaling);
528 529 530 531
	if (yuv_block) {
		if (d->frame)
			freemsg(d->frame);
		d->frame = yuv_block;
532
	}
533
	ms_mutex_unlock(&d->mutex);
534 535

	// JNI_ABORT free the buffer without copying back the possible changes
536
	env->ReleaseByteArrayElements(frame, jinternal_buff, JNI_ABORT);
537 538
}

539 540 541
#ifdef __cplusplus
}
#endif
542

543
static int compute_image_rotation_correction(AndroidReaderContext* d, int rotation) {
544 545 546 547
	AndroidWebcamConfig* conf = (AndroidWebcamConfig*)(AndroidWebcamConfig*)d->webcam->data;

	int result;
	if (conf->frontFacing) {
548 549
		ms_debug("%s: %d + %d\n", __FUNCTION__, ((AndroidWebcamConfig*)d->webcam->data)->orientation, rotation);
	 	result = ((AndroidWebcamConfig*)d->webcam->data)->orientation + rotation;
550
	} else {
551 552
		ms_debug("%s: %d - %d\n", __FUNCTION__, ((AndroidWebcamConfig*)d->webcam->data)->orientation, rotation);
	 	result = ((AndroidWebcamConfig*)d->webcam->data)->orientation - rotation;
553 554 555 556 557 558
	}
	while(result < 0)
		result += 360;
	return result % 360;
}

559

560
static void compute_cropping_offsets(MSVideoSize hwSize, MSVideoSize outputSize, int* yoff, int* cbcroff) {
561
	// if hw <= out -> return
562 563 564 565 566 567 568 569 570 571 572 573
	if (hwSize.width * hwSize.height <= outputSize.width * outputSize.height) {
		*yoff = 0;
		*cbcroff = 0;
		return;
	}

	int halfDiffW = (hwSize.width - ((outputSize.width>outputSize.height)?outputSize.width:outputSize.height)) / 2;
	int halfDiffH = (hwSize.height - ((outputSize.width<outputSize.height)?outputSize.width:outputSize.height)) / 2;

	*yoff = hwSize.width * halfDiffH + halfDiffW;
	*cbcroff = hwSize.width * halfDiffH * 0.5 + halfDiffW;
}
574

575

576 577
static jclass getHelperClassGlobalRef(JNIEnv *env) {
	ms_message("getHelperClassGlobalRef (env: %p)", env);
jehan's avatar
jehan committed
578
	const char* className;
579
	// FindClass only returns local references.
580

581 582 583 584 585 586 587
	// Find the current Android SDK version
	jclass version = env->FindClass(VersionPath);
	jmethodID method = env->GetStaticMethodID(version,"sdk", "()I");
	android_sdk_version = env->CallStaticIntMethod(version, method);
	ms_message("Android SDK version found is %i", android_sdk_version);
	env->DeleteLocalRef(version);

588
	if (android_sdk_version >= 9) {
jehan's avatar
jehan committed
589
		className = AndroidApi9WrapperPath;
590
	} else if (android_sdk_version >= 8) {
jehan's avatar
jehan committed
591 592 593 594 595 596 597 598
		className = AndroidApi8WrapperPath;
	} else {
		className = AndroidApi5WrapperPath;
	}
	jclass c = env->FindClass(className);
	if (c == 0) {
		ms_error("Could not load class '%s' (%d)", className, android_sdk_version);
		return NULL;
599
	} else {
jehan's avatar
jehan committed
600 601 602
		jclass globalRef = reinterpret_cast<jclass>(env->NewGlobalRef(c));
		env->DeleteLocalRef(c);
		return globalRef;
603
	}
604 605 606 607 608
}

static AndroidReaderContext *getContext(MSFilter *f) {
	return (AndroidReaderContext*) f->data;
}