Commit 7415e8e7 authored by Guillaume Beraudo's avatar Guillaume Beraudo

First version: non plannar, SDL 1.3 HG, FFMPEG GIT LGPL.

- Latest SDL 1.3 required (fix crashes on setVideoMode)
- Latest FFMPEG required (previous ones doesn't compile with SDL HG)
- Plannar captured images under investigation.
parent c02b03da
......@@ -123,9 +123,8 @@ AC_DEFUN([MS_CHECK_VIDEO],[
if test "$macosx_found" = "yes" ; then
enable_sdl_default=true
enable_x11_default=false
CFLAGS="$CFLAGS -Dmain=SDL_main"
OBJCFLAGS="$OBJCFLAGS -framework QTKit "
LIBS="$LIBS -framework QTKit -framework CoreVideo -lSDLmain"
OBJCFLAGS="$OBJCFLAGS -framework QTKit"
LIBS="$LIBS -framework QTKit -framework CoreVideo -framework Foundation"
AC_LANG_PUSH([Objective C])
AC_CHECK_HEADERS([QTKit/QTKit.h],[],[AC_MSG_ERROR([QTKit framework not found, required for video support])])
AC_LANG_POP([Objective C])
......@@ -212,12 +211,6 @@ AC_DEFUN([MS_CHECK_VIDEO],[
if test "$mingw_found" = "yes" ; then
VIDEO_LIBS="$VIDEO_LIBS -lvfw32 -lgdi32"
fi
case $target_os in
*darwin*)
LIBS="$LIBS -framework QuickTime"
;;
esac
fi
AC_SUBST(VIDEO_CFLAGS)
......
......@@ -11,8 +11,30 @@
struct v4mState;
static MSPixFmt ostype_to_pix_fmt(OSType pixelFormat, bool printFmtName){
// ms_message("OSType= %i", pixelFormat);
switch(pixelFormat){
case kCVPixelFormatType_420YpCbCr8Planar:
if (printFmtName) ms_message("FORMAT = MS_YUV420P");
return MS_YUV420P;
case kYUVSPixelFormat:
if (printFmtName) ms_message("FORMAT = MS_YUY2");
return MS_YUY2;
case kUYVY422PixelFormat:
if (printFmtName) ms_message("FORMAT = MS_UYVY");
return MS_UYVY;
case k32RGBAPixelFormat:
if (printFmtName) ms_message("FORMAT = MS_RGBA32");
return MS_RGBA32;
default:
if (printFmtName) ms_message("Format unknown: %i", (UInt32) pixelFormat);
return MS_PIX_FMT_UNKNOWN;
}
}
@interface NsMsWebCam :NSObject
{
NSAutoreleasePool *globalPool;
QTCaptureDeviceInput *input;
QTCaptureDecompressedVideoOutput * output;
QTCaptureSession *session;
......@@ -36,28 +58,74 @@ struct v4mState;
@end
@implementation NsMsWebCam
- (void)captureOutput:(QTCaptureOutput *)captureOutput didOutputVideoFrame:(CVImageBufferRef)videoFrame withSampleBuffer:(QTSampleBuffer *)sampleBuffer fromConnection:(QTCaptureConnection *)connection
- (void)captureOutput:(QTCaptureOutput *)captureOutput didOutputVideoFrame:(CVImageBufferRef)frame withSampleBuffer:(QTSampleBuffer *)sampleBuffer fromConnection:(QTCaptureConnection *)connection
{
NSAutoreleasePool* myPool = [[NSAutoreleasePool alloc] init];
ms_mutex_lock(&mutex);
mblk_t *buf;
uint8_t * data = (uint8_t *)[sampleBuffer bytesForAllSamples];
int size = [sampleBuffer lengthForAllSamples];
buf=allocb(size,0);
memcpy(buf->b_wptr, data, size);
buf->b_wptr+=size;
putq(&rq, buf);
CVReturn status = CVPixelBufferLockBaseAddress(frame, 0);
if (kCVReturnSuccess != status) {
ms_error("Error locking base address: %i", status);
return;
}
OSType pixelFormat = CVPixelBufferGetPixelFormatType(frame);
MSPixFmt msfmt = ostype_to_pix_fmt(pixelFormat, false);
if (CVPixelBufferIsPlanar(frame)) {
size_t numberOfPlanes = CVPixelBufferGetPlaneCount(frame);
MSPicture pict;
size_t w = CVPixelBufferGetWidth(frame);
size_t h = CVPixelBufferGetHeight(frame);
mblk_t *yuv_block = ms_yuv_buf_alloc(&pict, w, h);
//memset(pict.planes[0], 0, (w*h*3)/2);
int p;
for (p=0; p < numberOfPlanes; p++) {
size_t fullrow_width = CVPixelBufferGetBytesPerRowOfPlane(frame, p);
size_t plane_width = CVPixelBufferGetWidthOfPlane(frame, p);
size_t plane_height = CVPixelBufferGetHeightOfPlane(frame, p);
uint8_t *dst_plane = pict.planes[p];
uint8_t *src_plane = CVPixelBufferGetBaseAddressOfPlane(frame, p);
ms_message("CVPixelBuffer %ix%i; Plane %i %ix%i (%i)", w, h, p, plane_width, plane_height, fullrow_width);
int l;
for (l=0; l<plane_height; l++) {
memcpy(dst_plane, src_plane, plane_width);
src_plane += fullrow_width;
dst_plane += plane_width;
}
}
putq(&rq, yuv_block);
} else {
// Buffer doesn't contain a plannar image.
uint8_t * data = (uint8_t *)[sampleBuffer bytesForAllSamples];
int size = [sampleBuffer lengthForAllSamples];
mblk_t *buf=allocb(size,0);
memcpy(buf->b_wptr, data, size);
buf->b_wptr+=size;
putq(&rq, buf);
}
CVPixelBufferUnlockBaseAddress(frame, 0);
ms_mutex_unlock(&mutex);
[myPool drain];
}
-(id) init {
qinit(&rq);
ms_mutex_init(&mutex,NULL);
globalPool = [[NSAutoreleasePool alloc] init];
session = [[QTCaptureSession alloc] init];
output = [[QTCaptureDecompressedVideoOutput alloc] init];
[output automaticallyDropsLateVideoFrames];
[output setDelegate: self];
......@@ -86,6 +154,8 @@ struct v4mState;
}
flushq(&rq,0);
[globalPool drain];
ms_mutex_destroy(&mutex);
[super dealloc];
......@@ -108,44 +178,22 @@ struct v4mState;
-(int) getPixFmt{
QTCaptureDevice *device = [input device];
if([device isOpen])
{
if([device isOpen]) {
NSArray * array = [device formatDescriptions];
NSEnumerator *enumerator = [array objectEnumerator];
QTFormatDescription *desc;
while ((desc = [enumerator nextObject]))
{
if ([desc mediaType] == QTMediaTypeVideo)
{
while ((desc = [enumerator nextObject])) {
if ([desc mediaType] == QTMediaTypeVideo) {
UInt32 fmt = [desc formatType];
if( fmt == kCVPixelFormatType_420YpCbCr8Planar)
{ms_message("FORMAT = MS_YUV420P");return MS_YUV420P;}
//else if( fmt == MS_YUYV)
// return;
else if( fmt == kCVPixelFormatType_24RGB)
{ms_message("FORMAT = MS_RGB24");return MS_RGB24;}
//else if( fmt == MS_RGB24_REV)
// return;
//else if( fmt == MS_MJPEG)
// return;
else if( fmt == kUYVY422PixelFormat)
{ms_message("FORMAT = MS_UYVY");return MS_UYVY;}
else if( fmt == kYUVSPixelFormat)
{ms_message("FORMAT = MS_YUY2");return MS_YUY2;}
else if( fmt == k32RGBAPixelFormat)
{ms_message("FORMAT = MS_RGBA32");return MS_RGBA32;}
}
}
}
ms_warning("The camera wasn't open; using MS_YUV420P pixel format");
MSPixFmt format = ostype_to_pix_fmt(fmt, true);
if (format != MS_PIX_FMT_UNKNOWN) return format;
}
}
} else {
ms_warning("The camera wasn't opened");
}
ms_warning("No format found, using MS_YUV420P pixel format");
return MS_YUV420P;
}
......@@ -155,7 +203,7 @@ struct v4mState;
unsigned int i = 0;
QTCaptureDevice * device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
if(name != nil)
{
NSArray * array = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
......@@ -163,7 +211,7 @@ struct v4mState;
for(i = 0 ; i < [array count]; i++)
{
QTCaptureDevice * deviceTmp = [array objectAtIndex:i];
if(!strcmp([[device localizedDisplayName] UTF8String], name))
if(!strcmp([[deviceTmp localizedDisplayName] UTF8String], name))
{
device = deviceTmp;
break;
......@@ -177,7 +225,7 @@ struct v4mState;
ms_error("%s", [[error localizedDescription] UTF8String]);
return;
}
input = [[QTCaptureDeviceInput alloc] initWithDevice:device];
success = [session addInput:input error:&error];
......@@ -196,7 +244,7 @@ struct v4mState;
NSDictionary * dic = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger:size.width], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithInteger:size.height],(id)kCVPixelBufferHeightKey,
//[NSNumber numberWithInteger:kCVPixelFormatType_420YpCbCr8Planar], (id)kCVPixelBufferPixelFormatTypeKey,
// [NSNumber numberWithInteger:kCVPixelFormatType_420YpCbCr8Planar], (id)kCVPixelBufferPixelFormatTypeKey, // force pixel format to plannar
nil];
[output setPixelBufferAttributes:dic];
......@@ -237,20 +285,20 @@ struct v4mState;
@end
typedef struct v4mState{
typedef struct v4mState{
NsMsWebCam * webcam;
NSAutoreleasePool* myPool;
int frame_ind;
int frame_ind;
float fps;
float start_time;
int frame_count;
}v4mState;
static void v4m_init(MSFilter *f){
v4mState *s=ms_new0(v4mState,1);
s->myPool = [[NSAutoreleasePool alloc] init];
s->webcam= [[NsMsWebCam alloc] init];
[s->webcam retain];
// [s->webcam retain];
s->start_time=0;
s->frame_count=-1;
s->fps=15;
......@@ -277,7 +325,6 @@ static void v4m_uninit(MSFilter *f){
v4m_stop(f,NULL);
[s->webcam release];
[s->myPool release];
ms_free(s);
}
......@@ -387,20 +434,12 @@ static void ms_v4m_cam_init(MSWebCam *cam)
static int v4m_set_device(MSFilter *f, void *arg)
{
v4mState *s=(v4mState*)f->data;
/*s->id = (char*) malloc(sizeof(char)*strlen((char*)arg));
strcpy(s->id,(char*)arg);*/
return 0;
}
static int v4m_set_name(MSFilter *f, void *arg){
v4mState *s=(v4mState*)f->data;
//s->name = (char*) malloc(sizeof(char)*strlen((char*)arg));
//strcpy(s->name,(char*)arg);
[s->webcam setName:(char*)arg];
return 0;
......@@ -442,7 +481,7 @@ static void ms_v4m_detect(MSWebCamManager *obj){
cam->data = NULL;
ms_web_cam_manager_add_cam(obj,cam);
}
[myPool release];
[myPool drain];
}
#endif
......@@ -28,6 +28,9 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#define INVIDEOUT_C 1
#include "mediastreamer2/msvideoout.h"
#ifdef __APPLE__
#include <CoreFoundation/CFRunLoop.h>
#endif
struct _MSDisplay;
......@@ -235,6 +238,7 @@ static int sdl_create_window(SdlDisplay *wd, int w, int h){
if (info->blit_hw_A)
ms_message("Alpha blits between sw to hw surfaces: accelerated");
ms_debug("Setting SDL video mode");
wd->sdl_screen = SDL_SetVideoMode(w,h, 0,flags);
if (wd->sdl_screen == NULL ) {
ms_warning("no hardware for video mode: %s\n",
......@@ -247,27 +251,45 @@ static int sdl_create_window(SdlDisplay *wd, int w, int h){
SDL_WM_SetCaption("Video window", NULL);
once=FALSE;
}
wd->lay=SDL_CreateYUVOverlay(w , h ,SDL_YV12_OVERLAY,wd->sdl_screen);
if (wd->lay==NULL){
ms_warning("Couldn't create yuv overlay: %s\n",
SDL_GetError());
return -1;
}else{
ms_message("Number of planes: %i", wd->lay->planes);
SDL_LockYUVOverlay(wd->lay); // necessary for getting accurate plane addresses since SDL 1.3
ms_message("%i x %i YUV overlay created: hw_accel=%i, pitches=%i,%i,%i",wd->lay->w,wd->lay->h,wd->lay->hw_overlay,
wd->lay->pitches[0],wd->lay->pitches[1],wd->lay->pitches[2]);
ms_message("planes= %p %p %p %i %i",wd->lay->pixels[0],wd->lay->pixels[1],wd->lay->pixels[2],
wd->lay->pixels[1]-wd->lay->pixels[0],wd->lay->pixels[2]-wd->lay->pixels[1]);
SDL_UnlockYUVOverlay(wd->lay);
}
SDL_ShowCursor(0);//Hide the mouse cursor if was displayed
return 0;
}
static void free_overlay_and_surface(SdlDisplay* wd) {
if (wd->lay!=NULL) {
ms_message("Freeing overlay");
SDL_FreeYUVOverlay(wd->lay);
}
if (wd->sdl_screen!=NULL) {
ms_message("Freeing surface");
SDL_FreeSurface(wd->sdl_screen);
}
wd->lay=NULL;
wd->sdl_screen=NULL;
}
static bool_t sdl_display_init(MSDisplay *obj, MSFilter *f, MSPicture *fbuf, MSPicture *fbuf_selfview){
SdlDisplay *wd = (SdlDisplay*)obj->data;
int i;
if (wd==NULL){
char driver[128];
/* Initialize the SDL library */
ms_message("Initialize SDL video");
wd=(SdlDisplay*)ms_new0(SdlDisplay,1);
wd->filter = f;
obj->data=wd;
......@@ -284,13 +306,9 @@ static bool_t sdl_display_init(MSDisplay *obj, MSFilter *f, MSPicture *fbuf, MSP
ms_mutex_lock(&wd->sdl_mutex);
}else {
ms_message("Cleaning WD");
ms_mutex_lock(&wd->sdl_mutex);
if (wd->lay!=NULL)
SDL_FreeYUVOverlay(wd->lay);
if (wd->sdl_screen!=NULL)
SDL_FreeSurface(wd->sdl_screen);
wd->lay=NULL;
wd->sdl_screen=NULL;
free_overlay_and_surface(wd);
}
wd->filter = f;
......@@ -365,7 +383,10 @@ static void sdl_display_update(MSDisplay *obj, int new_image, int new_selfview){
rect.y = (wd->screen_size.height-h)/2;
rect.w = w;
rect.h = h;
SDL_DisplayYUVOverlay(wd->lay,&rect);
if (SDL_DisplayYUVOverlay(wd->lay,&rect) != 0)
ms_error("Error while displaying overlay");
ms_mutex_unlock(&wd->sdl_mutex);
}
......@@ -395,21 +416,18 @@ static int sdl_poll_event(MSDisplay *obj, MSDisplayEvent *ev){
static void sdl_display_uninit(MSDisplay *obj){
SdlDisplay *wd = (SdlDisplay*)obj->data;
SDL_Event event;
int i;
if (wd==NULL)
return;
if (wd->lay!=NULL)
SDL_FreeYUVOverlay(wd->lay);
if (wd->sdl_screen!=NULL){
SDL_FreeSurface(wd->sdl_screen);
wd->sdl_screen=NULL;
if (wd!=NULL) {
free_overlay_and_surface(wd);
ms_free(wd);
wd=NULL;
}
wd->lay=NULL;
wd->sdl_screen=NULL;
ms_free(wd);
ms_message("WD Fred");
#ifdef __linux
/*purge the event queue before leaving*/
SDL_Event event;
int i;
for(i=0;SDL_PollEvent(&event) && i<100;++i){
}
#endif
......@@ -479,6 +497,12 @@ typedef struct VideoOut
bool_t ready;
bool_t autofit;
bool_t mirror;
#ifdef __APPLE__
bool_t need_update;
CFRunLoopTimerRef timer;
CFRunLoopTimerContext timer_context;
#endif
} VideoOut;
static void set_corner(VideoOut *s, int corner)
......@@ -520,6 +544,7 @@ static void set_vsize(VideoOut *s, MSVideoSize *sz){
}
static void video_out_init(MSFilter *f){
ms_message("video_out_init");
VideoOut *obj=(VideoOut*)ms_new0(VideoOut,1);
MSVideoSize def_size;
obj->ratio.num=11;
......@@ -624,8 +649,67 @@ static int _video_out_handle_resizing(MSFilter *f, void *data){
return ret;
}
static void poll_for_resizing_lock_filter_and_enventually_prepare(MSFilter *f) {
VideoOut *obj=(VideoOut*)f->data;
int i;
for(i=0;i<100;++i){
int ret = _video_out_handle_resizing(f, NULL);
if (ret<0)
break;
}
ms_filter_lock(f);
if (!obj->ready) video_out_prepare(f);
}
#ifdef __APPLE__
static void apple_loop_cb(CFRunLoopTimerRef timer, void *info) {
MSFilter *f = (MSFilter *) info;
VideoOut *obj=(VideoOut*)f->data;
poll_for_resizing_lock_filter_and_enventually_prepare(f);
if (obj->need_update) {
ms_display_update(obj->display, 1, 1);
obj->need_update=false;
}
ms_filter_unlock(f);
}
#endif
static void video_out_postprocess(MSFilter *f){
#ifdef __APPLE__
VideoOut* obj = (VideoOut*) f->data;
CFRunLoopRemoveTimer(CFRunLoopGetCurrent(), obj->timer, kCFRunLoopCommonModes);
obj->timer = NULL;
#endif
}
static void video_out_preprocess(MSFilter *f){
#ifndef __APPLE__
video_out_prepare(f);
#else
VideoOut* obj = (VideoOut*) f->data;
if (obj->timer != NULL) ms_error("Non null timer found");
CFTimeInterval interval=0.01f; // 10 milliseconds
obj->timer_context.version=0;
obj->timer_context.info=f;
obj->timer_context.retain=NULL;
obj->timer_context.release=NULL;
obj->timer_context.copyDescription=NULL;
obj->timer = CFRunLoopTimerCreate (NULL,
CFAbsoluteTimeGetCurrent() + interval,
interval,
0,
0,
apple_loop_cb,
&(obj->timer_context)
);
CFRunLoopAddTimer (CFRunLoopGetCurrent(), obj->timer, kCFRunLoopCommonModes);
#endif
}
......@@ -634,16 +718,11 @@ static void video_out_process(MSFilter *f){
mblk_t *inm;
int update=0;
int update_selfview=0;
int i;
for(i=0;i<100;++i){
int ret = _video_out_handle_resizing(f, NULL);
if (ret<0)
break;
}
ms_filter_lock(f);
if (!obj->ready) video_out_prepare(f);
if (obj->display==NULL){
#ifndef __APPLE__
poll_for_resizing_lock_filter_and_enventually_prepare(f);
#endif
if (!obj->ready){
ms_filter_unlock(f);
if (f->inputs[0]!=NULL)
ms_queue_flush(f->inputs[0]);
......@@ -723,7 +802,14 @@ static void video_out_process(MSFilter *f){
if (!ms_video_size_equal(newsize,cur)){
set_vsize(obj,&newsize);
ms_message("autofit: new size is %ix%i",newsize.width,newsize.height);
#ifndef __APPLE__
video_out_prepare(f);
#else
obj->ready=false;
ms_queue_flush(f->inputs[0]);
ms_filter_unlock(f);
return;
#endif
}
}
if (obj->sws1==NULL){
......@@ -760,7 +846,14 @@ static void video_out_process(MSFilter *f){
ms_display_unlock(obj->display);
}
ms_display_update(obj->display, update, update_selfview);
if (update == 1 || update_selfview == 1) {
#ifdef __APPLE__
obj->need_update = true;
#else
ms_display_update(obj->display, update, update_selfview);
#endif
}
ms_filter_unlock(f);
}
......@@ -932,6 +1025,7 @@ MSFilterDesc ms_video_out_desc={
.init=video_out_init,
.preprocess=video_out_preprocess,
.process=video_out_process,
.postprocess=video_out_postprocess,
.uninit=video_out_uninit,
.methods=methods
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment