Commit c02b03da authored by Guillaume Beraudo's avatar Guillaume Beraudo

Add Apple loop + more verbose qtcapture.

Merge branch 'dev_videomac' of git://git.linphone.org/mediastreamer2 into dev_videomac

Conflicts:
	acinclude.m4
parents 7b399de6 87b952d4
......@@ -268,8 +268,11 @@ static void au_card_detect(MSSndCardManager * m)
ms_error("get kAudioHardwarePropertyDevices error %ld", err);
return;
}
//first, add Default AudioUnit
ms_snd_card_manager_add_card(m,ca_card_new("Default", "",-1, MS_SND_CARD_CAP_CAPTURE|MS_SND_CARD_CAP_PLAYBACK));
/*first, add Default AudioUnit
does not work: why ?
*/
/*ms_snd_card_manager_add_card(m,ca_card_new("Default", "",-1, MS_SND_CARD_CAP_CAPTURE|MS_SND_CARD_CAP_PLAYBACK));
*/
count = slen / sizeof(AudioDeviceID);
for (i = 0; i < count; i++) {
......
......@@ -1600,11 +1600,11 @@ mblk_t *ms_load_jpeg_as_yuv(const char *jpgpath, MSVideoSize *reqsize){
uint8_t *jpgbuf;
DWORD err;
HANDLE fd;
BOOL res;
BOOL res;
#ifdef UNICODE
WCHAR wUnicode[1024];
MultiByteToWideChar(CP_UTF8, 0, jpgpath, -1, wUnicode, 1024);
fd = CreateFile(wUnicode, GENERIC_READ, FILE_SHARE_READ, NULL,
fd = CreateFile(wUnicode, GENERIC_READ, FILE_SHARE_READ, NULL,
OPEN_EXISTING, 0, NULL);
#else
fd = CreateFile(jpgpath, GENERIC_READ, FILE_SHARE_READ, NULL,
......@@ -1633,8 +1633,8 @@ mblk_t *ms_load_jpeg_as_yuv(const char *jpgpath, MSVideoSize *reqsize){
m=ms_load_generate_yuv(reqsize);
return m;
}
err=0;
res = ReadFile(fd, jpgbuf, st_sizel, &err, NULL) ;
err=0;
res = ReadFile(fd, jpgbuf, st_sizel, &err, NULL) ;
if (err!=st_sizel){
ms_error("Could not read as much as wanted !");
......@@ -1655,11 +1655,8 @@ mblk_t *ms_load_jpeg_as_yuv(const char *jpgpath, MSVideoSize *reqsize){
struct stat statbuf;
uint8_t *jpgbuf;
int err;
#ifndef WIN32
int fd=open(jpgpath,O_RDONLY);
#else
int fd=open(jpgpath,O_RDONLY|O_BINARY);
#endif
if (fd!=-1){
fstat(fd,&statbuf);
if (statbuf.st_size<=0)
......
......@@ -16,7 +16,6 @@ struct v4mState;
QTCaptureDeviceInput *input;
QTCaptureDecompressedVideoOutput * output;
QTCaptureSession *session;
//QTCaptureDevice *device;
ms_mutex_t mutex;
queue_t rq;
......@@ -39,8 +38,7 @@ struct v4mState;
@implementation NsMsWebCam
-(void)captureOutput:(QTCaptureOutput *)captureOutput didOutputVideoFrame:(CVImageBufferRef)videoFrame withSampleBuffer:(QTSampleBuffer *)sampleBuffer fromConnection:
(QTCaptureConnection *)connection
- (void)captureOutput:(QTCaptureOutput *)captureOutput didOutputVideoFrame:(CVImageBufferRef)videoFrame withSampleBuffer:(QTSampleBuffer *)sampleBuffer fromConnection:(QTCaptureConnection *)connection
{
ms_mutex_lock(&mutex);
mblk_t *buf;
......@@ -123,31 +121,31 @@ struct v4mState;
UInt32 fmt = [desc formatType];
if( fmt == kCVPixelFormatType_420YpCbCr8Planar)
return MS_YUV420P;
{ms_message("FORMAT = MS_YUV420P");return MS_YUV420P;}
//else if( fmt == MS_YUYV)
// return;
else if( fmt == kCVPixelFormatType_24RGB)
return MS_RGB24;
{ms_message("FORMAT = MS_RGB24");return MS_RGB24;}
//else if( fmt == MS_RGB24_REV)
// return;
//else if( fmt == MS_MJPEG)
// return;
else if( fmt == kUYVY422PixelFormat)
return MS_UYVY;
{ms_message("FORMAT = MS_UYVY");return MS_UYVY;}
else if( fmt == kYUVSPixelFormat)
return MS_YUY2;
{ms_message("FORMAT = MS_YUY2");return MS_YUY2;}
else if( fmt == k32RGBAPixelFormat)
return MS_RGBA32;
{ms_message("FORMAT = MS_RGBA32");return MS_RGBA32;}
}
}
}
ms_warning("The camera wasn't open; using MS_YUV420P pixel format");
return MS_YUV420P;
}
......@@ -173,19 +171,28 @@ struct v4mState;
}
}
if(device)
if(![device open:&error])
return;
bool success = [device open:&error];
if (success) ms_message("Device opened");
else {
ms_error("%s", [[error localizedDescription] UTF8String]);
return;
}
input = [[QTCaptureDeviceInput alloc] initWithDevice:device];
[session addInput:input error:&error];
[session addOutput:output error:&error];
success = [session addInput:input error:&error];
if (success) ms_message("Input added to session");
else ms_error("%s", [[error localizedDescription] UTF8String]);
success = [session addOutput:output error:&error];
if (success) ms_message("Output added to session");
else ms_error("%s", [[error localizedDescription] UTF8String]);
}
-(void) setSize:(MSVideoSize) size
{
ms_message("Set size w=%i, h=%i", size.width, size.height);
NSDictionary * dic = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger:size.width], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithInteger:size.height],(id)kCVPixelBufferHeightKey,
......@@ -209,7 +216,7 @@ struct v4mState;
size.width = [[dic objectForKey:(id)kCVPixelBufferWidthKey] integerValue];
size.height = [[dic objectForKey:(id)kCVPixelBufferHeightKey] integerValue];
}
ms_message("get size w=%i, h=%i", size.width, size.height);
return size;
}
......@@ -233,12 +240,10 @@ struct v4mState;
typedef struct v4mState{
NsMsWebCam * webcam;
NSAutoreleasePool* myPool;
mblk_t *mire;
int frame_ind;
float fps;
float start_time;
int frame_count;
bool_t usemire;
}v4mState;
static void v4m_init(MSFilter *f){
......@@ -246,11 +251,9 @@ static void v4m_init(MSFilter *f){
s->myPool = [[NSAutoreleasePool alloc] init];
s->webcam= [[NsMsWebCam alloc] init];
[s->webcam retain];
s->mire=NULL;
s->start_time=0;
s->frame_count=-1;
s->fps=15;
s->usemire=(getenv("DEBUG")!=NULL);
f->data=s;
}
......@@ -273,51 +276,11 @@ static void v4m_uninit(MSFilter *f){
v4mState *s=(v4mState*)f->data;
v4m_stop(f,NULL);
freemsg(s->mire);
[s->webcam release];
[s->myPool release];
ms_free(s);
}
static mblk_t * v4m_make_mire(v4mState *s){
unsigned char *data;
int i,j,line,pos;
MSVideoSize vsize = [s->webcam getSize];
int patternw=vsize.width/6;
int patternh=vsize.height/6;
int red,green=0,blue=0;
if (s->mire==NULL){
s->mire=allocb(vsize.width*vsize.height*3,0);
s->mire->b_wptr=s->mire->b_datap->db_lim;
}
data=s->mire->b_rptr;
for (i=0;i<vsize.height;++i){
line=i*vsize.width*3;
if ( ((i+s->frame_ind)/patternh) & 0x1) red=255;
else red= 0;
for (j=0;j<vsize.width;++j){
pos=line+(j*3);
if ( ((j+s->frame_ind)/patternw) & 0x1) blue=255;
else blue= 0;
data[pos]=red;
data[pos+1]=green;
data[pos+2]=blue;
}
}
s->frame_ind++;
return s->mire;
}
static mblk_t * v4m_make_nowebcam(v4mState *s){
if (s->mire==NULL && s->frame_ind==0){
//s->mire=ms_load_nowebcam(&s->vsize, -1);
}
s->frame_ind++;
return s->mire;
}
static void v4m_process(MSFilter * obj){
v4mState *s=(v4mState*)obj->data;
uint32_t timestamp;
......@@ -338,23 +301,7 @@ static void v4m_process(MSFilter * obj){
{
om=getq([s->webcam rq]);
}
else
{
/*if (s->pix_fmt==MS_YUV420P && s->vsize.width==MS_VIDEO_SIZE_CIF_W && s->vsize.height==MS_VIDEO_SIZE_CIF_H)
{
if (s->usemire)
{
om=dupmsg(v4m_make_mire(s));
}
else
{
mblk_t *tmpm=v4m_make_nowebcam(s);
if (tmpm)
om=dupmsg(tmpm);
}
}*/
}
if (om!=NULL)
{
timestamp=obj->ticker->time*90;/* rtp uses a 90000 Hz clockrate for video*/
......
......@@ -41,6 +41,10 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#include <stdlib.h>
#include <string.h>
#ifdef __APPLE__
#include <CoreFoundation/CFRunLoop.h>
#endif
static int cond=1;
static const char * capture_card=NULL;
......@@ -434,6 +438,10 @@ static void run_media_streams(int localport, const char *remote_ip, int remotepo
}
}else{ /* no interactive stuff - continuous debug output */
rtp_session_register_event_queue(session,q);
#ifdef __APPLE__
CFRunLoopRun();
#else
while(cond)
{
int n;
......@@ -463,6 +471,7 @@ static void run_media_streams(int localport, const char *remote_ip, int remotepo
parse_events(q);
}
}
#endif // target MAC
}
printf("stopping all...\n");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment