Commit 5ce88630 authored by Yann Diorcet's avatar Yann Diorcet
Browse files

Reformat ios/osx code

Fix ios issue at stop
parent e613ba00
......@@ -42,14 +42,14 @@
- (id)init {
return [super initWithSession:[[[AVCaptureSession alloc] init] autorelease]];
return [super initWithSession:[[[AVCaptureSession alloc] init] autorelease]];
}
@end
@interface IOSCapture : UIView<AVCaptureVideoDataOutputSampleBufferDelegate> {
@private
AVCaptureDeviceInput *input;
AVCaptureDeviceInput *input;
AVCaptureVideoDataOutput * output;
ms_mutex_t mutex;
mblk_t *msframe;
......@@ -83,60 +83,60 @@
@synthesize parentView;
- (id)init {
self = [super init];
if (self) {
[self initIOSCapture];
}
return self;
self = [super init];
if (self) {
[self initIOSCapture];
}
return self;
}
- (id)initWithCoder:(NSCoder *)coder {
self = [super initWithCoder:coder];
if (self) {
[self initIOSCapture];
}
return self;
self = [super initWithCoder:coder];
if (self) {
[self initIOSCapture];
}
return self;
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
[self initIOSCapture];
}
return self;
self = [super initWithFrame:frame];
if (self) {
[self initIOSCapture];
}
return self;
}
- (void)initIOSCapture {
msframe = NULL;
ms_mutex_init(&mutex, NULL);
output = [[AVCaptureVideoDataOutput alloc] init];
[self setOpaque:YES];
[self setAutoresizingMask: UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight];
output = [[AVCaptureVideoDataOutput alloc] init];
[self setOpaque:YES];
[self setAutoresizingMask: UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight];
/*
Currently, the only supported key is kCVPixelBufferPixelFormatTypeKey. Supported pixel formats are kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange and kCVPixelFormatType_32BGRA, except on iPhone 3G, where the supported pixel formats are kCVPixelFormatType_422YpCbCr8 and kCVPixelFormatType_32BGRA..
*/
Currently, the only supported key is kCVPixelBufferPixelFormatTypeKey. Supported pixel formats are kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange and kCVPixelFormatType_32BGRA, except on iPhone 3G, where the supported pixel formats are kCVPixelFormatType_422YpCbCr8 and kCVPixelFormatType_32BGRA..
*/
NSDictionary* dic = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], (id)kCVPixelBufferPixelFormatTypeKey, nil];
[output setVideoSettings:dic];
//output.minFrameDuration = CMTimeMake(1, 12);
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.layer;
//output.minFrameDuration = CMTimeMake(1, 12);
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.layer;
[previewLayer setOrientation:AVCaptureVideoOrientationPortrait];
[previewLayer setBackgroundColor:[[UIColor clearColor] CGColor]];
[previewLayer setOpaque:YES];
[previewLayer setBackgroundColor:[[UIColor clearColor] CGColor]];
[previewLayer setOpaque:YES];
start_time=0;
frame_count=-1;
fps=0;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef frame = nil;
fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef frame = nil;
@synchronized(self) {
@try {
CVImageBufferRef frame = CMSampleBufferGetImageBuffer(sampleBuffer);
......@@ -191,8 +191,8 @@
case 180:
if (mOutputVideoSize.width*factor>plane_width || mOutputVideoSize.height*factor>plane_height) {
ms_warning("[1]IOS capture discarding frame because wrong dimensions (%d > %d || %d > %d)",
mOutputVideoSize.width*factor, plane_width,
mOutputVideoSize.height*factor, plane_height);
mOutputVideoSize.width*factor, plane_width,
mOutputVideoSize.height*factor, plane_height);
return;
}
break;
......@@ -200,8 +200,8 @@
case 270:
if (mOutputVideoSize.width*factor>plane_height || mOutputVideoSize.height*factor>plane_width) {
ms_warning("[2] IOS capture discarding frame because wrong dimensions (%d > %d || %d > %d)",
mOutputVideoSize.width*factor, plane_height,
mOutputVideoSize.height*factor, plane_width);
mOutputVideoSize.width*factor, plane_height,
mOutputVideoSize.height*factor, plane_width);
return;
}
break;
......@@ -235,7 +235,7 @@
NSError *error = nil;
unsigned int i = 0;
AVCaptureDevice * device = NULL;
NSArray * array = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (i = 0 ; i < [array count]; i++) {
AVCaptureDevice * currentDevice = [array objectAtIndex:i];
......@@ -249,78 +249,81 @@
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
[input retain]; // keep reference on an externally allocated object
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
error:&error];
[input retain]; // keep reference on an externally allocated object
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
[session addInput:input];
[session addOutput:output ];
[session addOutput:output];
}
- (void)dealloc {
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
[session removeInput:input];
[session removeOutput:output];
[output release];
[parentView release];
@synchronized(self) {
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
[session removeInput:input];
[session removeOutput:output];
[output setSampleBufferDelegate:nil queue:NULL];
[output release];
[parentView release];
if (msframe) {
freemsg(msframe);
if (msframe) {
freemsg(msframe);
}
ms_mutex_destroy(&mutex);
[super dealloc];
}
ms_mutex_destroy(&mutex);
[super dealloc];
}
+ (Class)layerClass {
return [AVCaptureVideoPreviewLayerEx class];
return [AVCaptureVideoPreviewLayerEx class];
}
- (int)start {
NSAutoreleasePool* myPool = [[NSAutoreleasePool alloc] init];
@synchronized(self) {
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
if (!session.running) {
[session startRunning]; //warning can take around 1s before returning
snprintf(fps_context, sizeof(fps_context), "Captured mean fps=%%f, expected=%f", fps);
ms_video_init_average_fps(&averageFps, fps_context);
ms_message("ioscapture video device started.");
}
}
[myPool drain];
NSAutoreleasePool* myPool = [[NSAutoreleasePool alloc] init];
@synchronized(self) {
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
if (!session.running) {
[session startRunning]; //warning can take around 1s before returning
snprintf(fps_context, sizeof(fps_context), "Captured mean fps=%%f, expected=%f", fps);
ms_video_init_average_fps(&averageFps, fps_context);
ms_message("ioscapture video device started.");
}
}
[myPool drain];
return 0;
}
- (int)stop {
NSAutoreleasePool* myPool = [[NSAutoreleasePool alloc] init];
@synchronized(self) {
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
if (session.running) {
[session stopRunning];
}
}
[myPool drain];
NSAutoreleasePool* myPool = [[NSAutoreleasePool alloc] init];
@synchronized(self) {
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
if (session.running) {
[session stopRunning];
}
}
[myPool drain];
return 0;
}
static AVCaptureVideoOrientation deviceOrientation2AVCaptureVideoOrientation(int deviceOrientation) {
switch (deviceOrientation) {
case 0: return AVCaptureVideoOrientationPortrait;
case 90: return AVCaptureVideoOrientationLandscapeLeft;
case -180:
case 180: return AVCaptureVideoOrientationPortraitUpsideDown;
case -90:
case 270: return AVCaptureVideoOrientationLandscapeRight;
default:
ms_error("Unexpected device orientation [%i] expected value are 0, 90, 180, 270",deviceOrientation);
break;
}
return AVCaptureVideoOrientationPortrait;
switch (deviceOrientation) {
case 0: return AVCaptureVideoOrientationPortrait;
case 90: return AVCaptureVideoOrientationLandscapeLeft;
case -180:
case 180: return AVCaptureVideoOrientationPortraitUpsideDown;
case -90:
case 270: return AVCaptureVideoOrientationLandscapeRight;
default:
ms_error("Unexpected device orientation [%i] expected value are 0, 90, 180, 270",deviceOrientation);
break;
}
return AVCaptureVideoOrientationPortrait;
}
- (void)setSize:(MSVideoSize) size {
@synchronized(self) {
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
[session beginConfiguration];
if (size.width*size.height == MS_VIDEO_SIZE_QVGA_W * MS_VIDEO_SIZE_QVGA_H) {
[session setSessionPreset: AVCaptureSessionPreset640x480];
......@@ -343,29 +346,29 @@ static AVCaptureVideoOrientation deviceOrientation2AVCaptureVideoOrientation(int
mOutputVideoSize=mCameraVideoSize;
mDownScalingRequired=false;
}
NSArray *connections = output.connections;
if ([connections count] > 0 && [[connections objectAtIndex:0] isVideoOrientationSupported]) {
switch (mDeviceOrientation) {
case 0:
[[connections objectAtIndex:0] setVideoOrientation:AVCaptureVideoOrientationPortrait];
ms_message("Configuring camera in AVCaptureVideoOrientationPortrait mode ");
break;
case 180:
[[connections objectAtIndex:0] setVideoOrientation:AVCaptureVideoOrientationPortraitUpsideDown];
ms_message("Configuring camera in AVCaptureVideoOrientationPortraitUpsideDown mode ");
break;
case 90:
[[connections objectAtIndex:0] setVideoOrientation:AVCaptureVideoOrientationLandscapeLeft];
ms_message("Configuring camera in AVCaptureVideoOrientationLandscapeLeft mode ");
break;
case 270:
[[connections objectAtIndex:0] setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
ms_message("Configuring camera in AVCaptureVideoOrientationLandscapeRight mode ");
default:
break;
}
}
NSArray *connections = output.connections;
if ([connections count] > 0 && [[connections objectAtIndex:0] isVideoOrientationSupported]) {
switch (mDeviceOrientation) {
case 0:
[[connections objectAtIndex:0] setVideoOrientation:AVCaptureVideoOrientationPortrait];
ms_message("Configuring camera in AVCaptureVideoOrientationPortrait mode ");
break;
case 180:
[[connections objectAtIndex:0] setVideoOrientation:AVCaptureVideoOrientationPortraitUpsideDown];
ms_message("Configuring camera in AVCaptureVideoOrientationPortraitUpsideDown mode ");
break;
case 90:
[[connections objectAtIndex:0] setVideoOrientation:AVCaptureVideoOrientationLandscapeLeft];
ms_message("Configuring camera in AVCaptureVideoOrientationLandscapeLeft mode ");
break;
case 270:
[[connections objectAtIndex:0] setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
ms_message("Configuring camera in AVCaptureVideoOrientationLandscapeRight mode ");
default:
break;
}
}
if (mDeviceOrientation == 0 || mDeviceOrientation == 180) {
......@@ -385,7 +388,7 @@ static AVCaptureVideoOrientation deviceOrientation2AVCaptureVideoOrientation(int
- (void)setFps:(float) value {
@synchronized(self) {
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
AVCaptureSession *session = [(AVCaptureVideoPreviewLayer *)self.layer session];
[session beginConfiguration];
if ([[[UIDevice currentDevice] systemVersion] floatValue] < 5) {
[output setMinFrameDuration:CMTimeMake(1, value)];
......@@ -405,77 +408,77 @@ static AVCaptureVideoOrientation deviceOrientation2AVCaptureVideoOrientation(int
}
- (void)setParentView:(UIView*)aparentView{
if (parentView == aparentView) {
return;
}
if(parentView != nil) {
[self removeFromSuperview];
[parentView release];
parentView = nil;
}
parentView = aparentView;
if(parentView != nil) {
[parentView retain];
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.layer;
if([parentView contentMode] == UIViewContentModeScaleAspectFit) {
previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
} else if([parentView contentMode] == UIViewContentModeScaleAspectFill) {
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
} else {
previewLayer.videoGravity = AVLayerVideoGravityResize;
}
[parentView insertSubview:self atIndex:0];
[self setFrame: [parentView bounds]];
}
if (parentView == aparentView) {
return;
}
if(parentView != nil) {
[self removeFromSuperview];
[parentView release];
parentView = nil;
}
parentView = aparentView;
if(parentView != nil) {
[parentView retain];
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.layer;
if([parentView contentMode] == UIViewContentModeScaleAspectFit) {
previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
} else if([parentView contentMode] == UIViewContentModeScaleAspectFill) {
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
} else {
previewLayer.videoGravity = AVLayerVideoGravityResize;
}
[parentView insertSubview:self atIndex:0];
[self setFrame: [parentView bounds]];
}
}
//filter methods
static void ioscapture_init(MSFilter *f) {
NSAutoreleasePool* myPool = [[NSAutoreleasePool alloc] init];
f->data = [[IOSCapture alloc] initWithFrame:CGRectMake(0, 0, 0, 0)];
[myPool drain];
NSAutoreleasePool* myPool = [[NSAutoreleasePool alloc] init];
f->data = [[IOSCapture alloc] initWithFrame:CGRectMake(0, 0, 0, 0)];
[myPool drain];
}
static void ioscapture_uninit(MSFilter *f) {
IOSCapture *thiz = (IOSCapture*)f->data;
if(thiz != nil) {
if(thiz != nil) {
NSAutoreleasePool* myPool = [[NSAutoreleasePool alloc] init];
[thiz performSelectorInBackground:@selector(stop) withObject:nil];
[thiz performSelectorOnMainThread:@selector(setParentView:) withObject:nil waitUntilDone:NO];
[thiz release];
[myPool drain];
}
[thiz performSelectorInBackground:@selector(stop) withObject:nil];
[thiz performSelectorOnMainThread:@selector(setParentView:) withObject:nil waitUntilDone:NO];
[thiz release];
[myPool drain];
}
}
static void ioscapture_process(MSFilter * obj) {
IOSCapture *thiz = (IOSCapture*)obj->data;
if(thiz != NULL) {
ms_mutex_lock(&thiz->mutex);
if (thiz->msframe) {
// keep only the latest image
ms_queue_flush(obj->outputs[0]);
ms_queue_put(obj->outputs[0],thiz->msframe);
ms_video_update_average_fps(&thiz->averageFps, obj->ticker->time);
thiz->msframe=0;
}
ms_mutex_unlock(&thiz->mutex);
}
if(thiz != NULL) {
ms_mutex_lock(&thiz->mutex);
if (thiz->msframe) {
// keep only the latest image
ms_queue_flush(obj->outputs[0]);
ms_queue_put(obj->outputs[0],thiz->msframe);
ms_video_update_average_fps(&thiz->averageFps, obj->ticker->time);
thiz->msframe=0;
}
ms_mutex_unlock(&thiz->mutex);
}
}
static void ioscapture_preprocess(MSFilter *f) {
IOSCapture *thiz = (IOSCapture*)f->data;
if (thiz != NULL) {
NSAutoreleasePool* myPool = [[NSAutoreleasePool alloc] init];
[thiz performSelectorInBackground:@selector(start) withObject:nil];
[myPool drain];
}
if (thiz != NULL) {
NSAutoreleasePool* myPool = [[NSAutoreleasePool alloc] init];
[thiz performSelectorInBackground:@selector(start) withObject:nil];
[myPool drain];
}
}
static void ioscapture_postprocess(MSFilter *f) {
......@@ -483,88 +486,88 @@ static void ioscapture_postprocess(MSFilter *f) {
static int ioscapture_get_fps(MSFilter *f, void *arg) {
IOSCapture *thiz = (IOSCapture*)f->data;
if (thiz != NULL) {
*((float*)arg) = thiz->fps;
}
if (thiz != NULL) {
*((float*)arg) = thiz->fps;
}
return 0;
}
static int ioscapture_set_fps(MSFilter *f, void *arg) {
IOSCapture *thiz = (IOSCapture*)f->data;
if (thiz != NULL) {
[thiz setFps:*(float*)arg];
}
if (thiz != NULL) {
[thiz setFps:*(float*)arg];
}
return 0;
}
static int ioscapture_get_pix_fmt(MSFilter *f,void *arg) {
*(MSPixFmt*)arg = MS_YUV420P;
*(MSPixFmt*)arg = MS_YUV420P;
return 0;
}
static int ioscapture_set_vsize(MSFilter *f, void *arg) {
IOSCapture *thiz = (IOSCapture*)f->data;
if (thiz != NULL) {
[thiz setSize:*((MSVideoSize*)arg)];
}
if (thiz != NULL) {
[thiz setSize:*((MSVideoSize*)arg)];
}
return 0;
}
static int ioscapture_get_vsize(MSFilter *f, void *arg) {
IOSCapture *thiz = (IOSCapture*)f->data;
if (thiz != NULL) {
*(MSVideoSize*)arg = *[thiz getSize];
}
if (thiz != NULL) {
*(MSVideoSize*)arg = *[thiz getSize];
}
return 0;
}
/*filter specific method*/
static int ioscapture_set_native_window(MSFilter *f, void *arg) {
UIView* parentView = *(UIView**)arg;
IOSCapture *thiz = (IOSCapture*)f->data;
if (thiz != nil) {
// set curent parent view
[thiz performSelectorOnMainThread:@selector(setParentView:) withObject:parentView waitUntilDone:NO];
}
UIView* parentView = *(UIView**)arg;
IOSCapture *thiz = (IOSCapture*)f->data;
if (thiz != nil) {
// set curent parent view
[thiz performSelectorOnMainThread:@selector(setParentView:) withObject:parentView waitUntilDone:NO];
}
return 0;
}
static int ioscapture_get_native_window(MSFilter *f, void *arg) {
IOSCapture *thiz = (IOSCapture*)f->data;
if (thiz != NULL) {
arg = &thiz->parentView;
}
return 0;
IOSCapture *thiz = (IOSCapture*)f->data;
if (thiz != NULL) {
arg = &thiz->parentView;
}
return 0;
}
static int ioscapture_set_device_orientation (MSFilter *f, void *arg) {
IOSCapture *thiz = (IOSCapture*)f->data;
if (thiz != NULL) {
if (thiz->mDeviceOrientation != *(int*)(arg)) {
thiz->mDeviceOrientation = *(int*)(arg);
[thiz setSize:thiz->mOutputVideoSize]; //to update size from orientation
// delete frame if any
ms_mutex_lock(&thiz->mutex);
if (thiz->msframe) {
freemsg(thiz->msframe);
thiz->msframe = 0;
}
ms_mutex_unlock(&thiz->mutex);
}
}
IOSCapture *thiz = (IOSCapture*)f->data;
if (thiz != NULL) {
if (thiz->mDeviceOrientation != *(int*)(arg)) {
thiz->mDeviceOrientation = *(int*)(arg);
[thiz setSize:thiz->mOutputVideoSize]; //to update size from orientation
// delete frame if any
ms_mutex_lock(&thiz->mutex);
if (thiz->msframe) {
freemsg(thiz->msframe);
thiz->msframe = 0;
}
ms_mutex_unlock(&thiz->mutex);
}
}
return 0;
}
/* this method is used to display the preview with correct orientation */
static int ioscapture_set_device_orientation_display (MSFilter *f, void *arg) {
IOSCapture *thiz=(IOSCapture*)f->data;
if (thiz != NULL) {
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)thiz.layer;
if ([previewLayer isOrientationSupported])
previewLayer.orientation = deviceOrientation2AVCaptureVideoOrientation(*(int*)(arg));
}
IOSCapture *thiz=(IOSCapture*)f->data;
if (thiz != NULL) {
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)thiz.layer;
if ([previewLayer isOrientationSupported])
previewLayer.orientation = deviceOrientation2AVCaptureVideoOrientation(*(int*)(arg));
}
return 0;
}
......@@ -575,9 +578,9 @@ static MSFilterMethod methods[] = {
{ MS_FILTER_SET_VIDEO_SIZE, ioscapture_set_vsize },
{ MS_FILTER_GET_VIDEO_SIZE, ioscapture_get_vsize },
{ MS_VIDEO_DISPLAY_SET_NATIVE_WINDOW_ID, ioscapture_set_native_window },//preview is managed by capture filter
{ MS_VIDEO_DISPLAY_GET_NATIVE_WINDOW_ID, ioscapture_get_native_window },
{ MS_VIDEO_DISPLAY_GET_NATIVE_WINDOW_ID, ioscapture_get_native_window },
{ MS_VIDEO_CAPTURE_SET_DEVICE_ORIENTATION, ioscapture_set_device_orientation },
{ MS_VIDEO_DISPLAY_SET_DEVICE_ORIENTATION, ioscapture_set_device_orientation_display },
{ MS_VIDEO_DISPLAY_SET_DEVICE_ORIENTATION, ioscapture_set_device_orientation_display },
{ 0, NULL }
};
......@@ -622,7 +625,7 @@ static void ms_v4ios_detect(MSWebCamManager *obj) {
ms_error("No capture support for IOS version below 4");
return<