Commit 3097777b authored by Gautier Pelloux-Prayer's avatar Gautier Pelloux-Prayer

Merge remote-tracking branch 'origin/master' into new_adaptive_algorithm

Conflicts:
	src/videofilters/vp8.c
	tester/Makefile.am
	tester/mediastreamer2_audio_stream_tester.c
	tester/mediastreamer2_tester.c
	tester/mediastreamer2_tester.h
parents 3f2a9afc af520273
......@@ -32,18 +32,26 @@ Compilation and installation
* Optional dependencies (for video to be enabled, see --enable-video):
- libavcodec
- libswscale
- libvpx
- libopus
- x11 with libxv-dev on linux
- theora
For Linux, MacOS, mingw compilation:
* For Linux, MacOS or mingw
$> ./configure
$> make
$> su -c 'make install'
For Windows with Visual Studio 2008 IDE, open build/win32native/mediastreamer2.sln
More instructions and advices can be found for the mingw compilation procedure in Linphone's README.mingw.
More instructions and advices can be found for the mingw compilation procedure in Linphone's README.mingw.
* Windows XP and later with Visual Studio 2010
1) Make a directory where you have together:
oRTP (clone from git://git.linphone.org/ortp.git )
mediastreamer2 (clone from git://git.linphone.org/mediastreamer2.git)
linphone-deps (directory to be created).
2) Download latest linphone-deps-win32 zip from http://download-mirror.savannah.gnu.org/releases/linphone/misc/ and unpack it in the linphone-deps directory.
3) open build/win32native/mediastreamer2.sln
Contact information:
--------------------
......
......@@ -71,7 +71,8 @@ MS2_PUBLIC void ring_stop (RingStream * stream);
/*
* Crypto suite used configure encrypted stream*/
typedef enum _MSCryptoSuite{
MS_AES_128_SHA1_80 = 1,
MS_CRYPTO_SUITE_INVALID=0,
MS_AES_128_SHA1_80,
MS_AES_128_SHA1_32,
MS_AES_128_NO_AUTH,
MS_NO_CIPHER_SHA1_80,
......@@ -79,6 +80,14 @@ typedef enum _MSCryptoSuite{
MS_AES_256_SHA1_32
} MSCryptoSuite;
typedef struct _MSCryptoSuiteNameParams{
const char *name;
const char *params;
}MSCryptoSuiteNameParams;
MS2_PUBLIC MSCryptoSuite ms_crypto_suite_build_from_name_params(const MSCryptoSuiteNameParams *nameparams);
MS2_PUBLIC int ms_crypto_suite_to_name_params(MSCryptoSuite cs, MSCryptoSuiteNameParams *nameparams);
typedef enum StreamType {
AudioStreamType,
VideoStreamType
......@@ -128,7 +137,6 @@ struct _MediaStream {
time_t start_time;
time_t last_iterate_time;
bool_t use_rc;
bool_t is_beginning;
bool_t owns_sessions;
bool_t pad;
/**
......@@ -588,29 +596,6 @@ MS2_PUBLIC void video_stream_iterate(VideoStream *stream);
*/
MS2_PUBLIC void video_stream_send_fir(VideoStream *stream);
/**
* Ask the video stream to send a Picture Loss Indication.
* @param[in] stream The videostream object.
*/
MS2_PUBLIC void video_stream_send_pli(VideoStream *stream);
/**
* Ask the video stream to send a Slice Loss Indication.
* @param[in] stream The videostream object.
* @param[in] first The address of the first lost macroblock.
* @param[in] number The number of lost macroblocks.
* @param[in] picture_id The six least significant bits of the picture ID.
*/
MS2_PUBLIC void video_stream_send_sli(VideoStream *stream, uint16_t first, uint16_t number, uint8_t picture_id);
/**
* Ask the video stream to send a Reference Picture Selection Indication.
* @param[in] stream The videostream object.
* @param[in] bit_string A pointer to the variable length native RPSI bit string to include in the RTCP FB message.
* @param[in] bit_string_len The length of the bit_string in bits.
*/
MS2_PUBLIC void video_stream_send_rpsi(VideoStream *stream, uint8_t *bit_string, uint16_t bit_string_len);
/**
* Ask the video stream to generate a Video Fast Update (generally after receiving a Full-Intra Request.
* @param[in] stream The videostream object.
......
......@@ -431,10 +431,19 @@ MS2_PUBLIC int ms_filter_call_method_noarg(MSFilter *f, unsigned int id);
* @param f A MSFilter object.
* @param id A method ID.
*
* Returns: 0 if successfull, -1 otherwise.
* Returns: TRUE if method is implemented, FALSE otherwise.
*/
MS2_PUBLIC bool_t ms_filter_has_method(MSFilter *f, unsigned int id);
/**
* Returns whether a filter implements a given interface.
* @param f a MSFilter object
* @param id an interface id.
*
* Returns TRUE if interface is implemented, FALSE, otherwise.
**/
bool_t ms_filter_implements_interface(MSFilter *f, MSFilterInterfaceId id);
/**
* Set a callback on filter's to be informed of private filter's event.
* This callback is called from the filter's MSTicker, unless a global event queue
......
......@@ -35,6 +35,13 @@ struct _MSVideoCodecRPSI {
uint16_t bit_string_len;
};
typedef struct _MSVideoEncoderPixFmt MSVideoEncoderPixFmt;
struct _MSVideoEncoderPixFmt {
uint32_t pixfmt;
bool_t supported;
};
/**
* Interface definition for video display filters.
**/
......@@ -224,8 +231,8 @@ typedef enum _MSRecorderState MSRecorderState;
* Interface definition for video encoders.
**/
#define MS_VIDEO_ENCODER_HAS_BUILTIN_CONVERTER \
MS_FILTER_METHOD(MSFilterVideoEncoderInterface, 0, bool_t)
#define MS_VIDEO_ENCODER_SUPPORTS_PIXFMT \
MS_FILTER_METHOD(MSFilterVideoEncoderInterface, 0, MSVideoEncoderPixFmt*)
/* request a video-fast-update (=I frame for H263,MP4V-ES) to a video encoder*/
#define MS_VIDEO_ENCODER_REQ_VFU \
MS_FILTER_METHOD_NO_ARG(MSFilterVideoEncoderInterface, 1)
......
......@@ -83,7 +83,7 @@ static int channel_process_in(Channel *chan, MSQueue *q, int32_t *sum, int nsamp
return 0;
}
static int channel_flow_control(Channel *chan, int threshold, uint64_t time, bool_t do_purge){
static int channel_flow_control(Channel *chan, int threshold, uint64_t time){
int size;
int skip=0;
if (chan->last_flow_control==(uint64_t)-1){
......@@ -96,7 +96,7 @@ static int channel_flow_control(Channel *chan, int threshold, uint64_t time, boo
if (time-chan->last_flow_control>=5000){
if (chan->min_fullness>=threshold){
skip=chan->min_fullness-(threshold/2);
if (do_purge) ms_bufferizer_skip_bytes(&chan->bufferizer,skip);
ms_bufferizer_skip_bytes(&chan->bufferizer,skip);
}
chan->last_flow_control=time;
chan->min_fullness=-1;
......@@ -210,13 +210,12 @@ static void mixer_process(MSFilter *f){
/* read from all inputs and sum everybody */
for(i=0;i<MIXER_MAX_CHANNELS;++i){
MSQueue *q=f->inputs[i];
int do_purge=i!=s->master_channel;
if (q){
if (channel_process_in(&s->channels[i],q,s->sum,nwords))
got_something=TRUE;
if ((skip=channel_flow_control(&s->channels[i],s->skip_threshold,f->ticker->time,do_purge))>0){
ms_warning("Too much data in channel %i, %i ms in excess %s",i,(skip*1000)/(2*s->nchannels*s->rate),do_purge ? "were skipped.":".");
if ((skip=channel_flow_control(&s->channels[i],s->skip_threshold,f->ticker->time))>0){
ms_warning("Too much data in channel %i, %i ms in excess dropped",i,(skip*1000)/(2*s->nchannels*s->rate));
}
}
}
......@@ -225,30 +224,27 @@ static void mixer_process(MSFilter *f){
#endif
/* compute outputs. In conference mode each one has a different output, because its channel own contribution has to be removed*/
if (got_something){
do{
if (s->conf_mode==0){
mblk_t *om=NULL;
for(i=0;i<MIXER_MAX_CHANNELS;++i){
MSQueue *q=f->outputs[i];
if (q){
if (om==NULL){
om=make_output(s->sum,nwords);
}else{
om=dupb(om);
}
ms_queue_put(q,om);
if (s->conf_mode==0){
mblk_t *om=NULL;
for(i=0;i<MIXER_MAX_CHANNELS;++i){
MSQueue *q=f->outputs[i];
if (q){
if (om==NULL){
om=make_output(s->sum,nwords);
}else{
om=dupb(om);
}
ms_queue_put(q,om);
}
}else{
for(i=0;i<MIXER_MAX_CHANNELS;++i){
MSQueue *q=f->outputs[i];
if (q){
ms_queue_put(q,channel_process_out(&s->channels[i],s->sum,nwords));
}
}
}else{
for(i=0;i<MIXER_MAX_CHANNELS;++i){
MSQueue *q=f->outputs[i];
if (q){
ms_queue_put(q,channel_process_out(&s->channels[i],s->sum,nwords));
}
}
skip-=s->bytespertick;
}while(skip>=s->bytespertick);
}
}
}
......@@ -304,6 +300,8 @@ static int mixer_set_conference_mode(MSFilter *f, void *data){
return 0;
}
/*not implemented yet. A master channel is a channel that is used as a reference to mix other inputs. Samples from the master channel should never be dropped*/
static int mixer_set_master_channel(MSFilter *f, void *data){
MixerState *s=(MixerState *)f->data;
s->master_channel=*(int*)data;
......
......@@ -282,15 +282,15 @@ static void compute_max_bitrate(OpusEncData *d, int ptimeStep) {
if (normalized_cbr<6000) {
int initial_value = normalized_cbr;
normalized_cbr = 6000;
d->max_network_bitrate = ((normalized_cbr*d->ptime/8000) + 12 + 8 + 20) *8000/d->ptime;
ms_warning("Opus encoder doesn't support bitrate [%i] set to 6kbps network bitrate [%d]", initial_value, d->max_network_bitrate);
d->max_network_bitrate = (normalized_cbr/(pps*8) + 12 + 8 + 20) *8*pps;
ms_warning("Opus encoder doesn't support bitrate [%i] set to 6kbps, network bitrate [%d]", initial_value, d->max_network_bitrate);
}
if (normalized_cbr>510000) {
int initial_value = normalized_cbr;
normalized_cbr = 510000;
d->max_network_bitrate = ((normalized_cbr*d->ptime/8000) + 12 + 8 + 20) *8000/d->ptime;
ms_warning("Opus encoder doesn't support bitrate [%i] set to 510kbps network bitrate [%d]", initial_value, d->max_network_bitrate);
d->max_network_bitrate = (normalized_cbr/(pps*8) + 12 + 8 + 20) *8*pps;
ms_warning("Opus encoder doesn't support bitrate [%i] set to 510kbps, network bitrate [%d]", initial_value, d->max_network_bitrate);
}
d->bitrate = normalized_cbr;
......@@ -375,6 +375,8 @@ static int ms_opus_enc_set_bitrate(MSFilter *f, void *arg) {
int bitrate = *((int *)arg); // the argument is the network bitrate requested
/* this function also manage the ptime, check if we are increasing or decreasing the bitrate in order to possibly decrease or increase ptime */
if (d->bitrate>0 && d->ptime>0) { /* at first call to set_bitrate(bitrate is initialised at -1), do not modify ptime, neither if it wasn't initialised too */
if (bitrate > d->max_network_bitrate ) {
......@@ -396,14 +398,16 @@ static int ms_opus_enc_set_bitrate(MSFilter *f, void *arg) {
ptimeStepValue = 20;
}
}
d->max_network_bitrate = bitrate;
ms_message("opus setbitrate to %d",d->max_network_bitrate);
ms_filter_lock(f);
compute_max_bitrate(d, ptimeStepValue*ptimeStepSign);
apply_max_bitrate(d);
ms_filter_unlock(f);
if (d->bitrate>0 && d->ptime>0) { /*don't apply bitrate before prepocess*/
ms_filter_lock(f);
compute_max_bitrate(d, ptimeStepValue*ptimeStepSign);
apply_max_bitrate(d);
ms_filter_unlock(f);
}
return 0;
}
......
......@@ -211,6 +211,10 @@ bool_t ms_filter_desc_implements_interface(MSFilterDesc *desc, MSFilterInterface
return FALSE;
}
bool_t ms_filter_implements_interface(MSFilter *f, MSFilterInterfaceId id){
return ms_filter_desc_implements_interface(f->desc,id);
}
MSList *ms_filter_lookup_by_interface(MSFilterInterfaceId id){
MSList *ret=NULL;
MSList *elem;
......
/*
mediastreamer2 library - modular sound and video processing and streaming
Copyright (C) 2010 Belledonne Communications SARL
Copyright (C) 2010 Belledonne Communications SARL
Author: Simon Morlat <simon.morlat@linphone.org>
This program is free software; you can redistribute it and/or
......@@ -31,6 +31,7 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
typedef struct _DecData{
mblk_t *yuv_msg;
mblk_t *sps,*pps;
AVFrame* orig;
Rfc3984Context unpacker;
MSPicture outbuf;
struct SwsContext *sws_ctx;
......@@ -78,6 +79,10 @@ static void dec_init(MSFilter *f){
d->outbuf.h=0;
d->bitstream_size=65536;
d->bitstream=ms_malloc0(d->bitstream_size);
d->orig = avcodec_alloc_frame();
if (!d->orig) {
ms_error("Could not allocate frame");
}
f->data=d;
}
......@@ -98,6 +103,7 @@ static void dec_uninit(MSFilter *f){
if (d->yuv_msg) freemsg(d->yuv_msg);
if (d->sps) freemsg(d->sps);
if (d->pps) freemsg(d->pps);
if (d->orig) avcodec_free_frame(&d->orig);
ms_free(d->bitstream);
ms_free(d);
}
......@@ -120,7 +126,7 @@ static mblk_t *get_as_yuvmsg(MSFilter *f, DecData *s, AVFrame *orig){
ctx->width,ctx->height,PIX_FMT_YUV420P,SWS_FAST_BILINEAR,
NULL, NULL, NULL);
}
#if LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(0,9,0)
#if LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(0,9,0)
if (sws_scale(s->sws_ctx,(const uint8_t * const *)orig->data,orig->linesize, 0,
ctx->height, s->outbuf.planes, s->outbuf.strides)<0){
#else
......@@ -215,7 +221,7 @@ static int nalusToFrame(DecData *d, MSQueue *naluq, bool_t *new_sps_pps){
*dst++=0;
start_picture=FALSE;
}
/*prepend nal marker*/
*dst++=0;
*dst++=0;
......@@ -243,7 +249,7 @@ static void dec_process(MSFilter *f){
DecData *d=(DecData*)f->data;
mblk_t *im;
MSQueue nalus;
AVFrame orig;
ms_queue_init(&nalus);
while((im=ms_queue_get(f->inputs[0]))!=NULL){
/*push the sps/pps given in sprop-parameter-sets if any*/
......@@ -270,18 +276,18 @@ static void dec_process(MSFilter *f){
int len;
int got_picture=0;
AVPacket pkt;
avcodec_get_frame_defaults(&orig);
avcodec_get_frame_defaults(d->orig);
av_init_packet(&pkt);
pkt.data = p;
pkt.size = end-p;
len=avcodec_decode_video2(&d->av_context,&orig,&got_picture,&pkt);
len=avcodec_decode_video2(&d->av_context,d->orig,&got_picture,&pkt);
if (len<=0) {
ms_warning("ms_AVdecoder_process: error %i.",len);
ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_DECODING_ERRORS);
break;
}
if (got_picture) {
ms_queue_put(f->outputs[0],get_as_yuvmsg(f,d,&orig));
ms_queue_put(f->outputs[0],get_as_yuvmsg(f,d,d->orig));
if (!d->first_image_decoded) {
d->first_image_decoded = TRUE;
ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_FIRST_IMAGE_DECODED);
......
......@@ -32,6 +32,7 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
typedef struct {
FILE *file;
AVCodec *codec;
AVFrame* pict;
}JpegWriter;
static void jpg_init(MSFilter *f){
......@@ -40,6 +41,7 @@ static void jpg_init(MSFilter *f){
if (s->codec==NULL){
ms_error("Could not find CODEC_ID_MJPEG !");
}
s->pict = avcodec_alloc_frame();
f->data=s;
}
......@@ -48,6 +50,7 @@ static void jpg_uninit(MSFilter *f){
if (s->file!=NULL){
fclose(s->file);
}
if (s->pict) avcodec_free_frame(&s->pict);
ms_free(s);
}
......@@ -86,14 +89,13 @@ static void jpg_process(MSFilter *f){
int error,got_pict;
int comp_buf_sz=msgdsize(m);
uint8_t *comp_buf=(uint8_t*)alloca(comp_buf_sz);
AVFrame pict;
mblk_t *jpegm;
struct SwsContext *sws_ctx;
struct AVPacket packet;
AVCodecContext *avctx=avcodec_alloc_context3(s->codec);
memset(&packet, 0, sizeof(packet));
avctx->width=yuvbuf.w;
avctx->height=yuvbuf.h;
avctx->time_base.num = 1;
......@@ -115,7 +117,7 @@ static void jpg_process(MSFilter *f){
goto end;
}
jpegm=ms_yuv_buf_alloc (&yuvjpeg,avctx->width, avctx->height);
#if LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(0,9,0)
#if LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(0,9,0)
if (sws_scale(sws_ctx,(const uint8_t *const*)yuvbuf.planes,yuvbuf.strides,0,avctx->height,yuvjpeg.planes,yuvjpeg.strides)<0){
#else
if (sws_scale(sws_ctx,(uint8_t **)yuvbuf.planes,yuvbuf.strides,0,avctx->height,yuvjpeg.planes,yuvjpeg.strides)<0){
......@@ -127,11 +129,11 @@ static void jpg_process(MSFilter *f){
goto end;
}
sws_freeContext(sws_ctx);
avcodec_get_frame_defaults(&pict);
avpicture_fill((AVPicture*)&pict,(uint8_t*)jpegm->b_rptr,avctx->pix_fmt,avctx->width,avctx->height);
avcodec_get_frame_defaults(s->pict);
avpicture_fill((AVPicture*)s->pict,(uint8_t*)jpegm->b_rptr,avctx->pix_fmt,avctx->width,avctx->height);
packet.data=comp_buf; packet.size=comp_buf_sz;
error=avcodec_encode_video2(avctx, &packet, &pict, &got_pict);
error=avcodec_encode_video2(avctx, &packet, s->pict, &got_pict);
if (error<0){
ms_error("Could not encode jpeg picture.");
}else{
......
......@@ -111,17 +111,16 @@ static int msv4l2_close(V4l2State *s){
return 0;
}
static bool_t v4lv2_try_format( V4l2State *s, struct v4l2_format *fmt, int fmtid){
static bool_t v4lv2_try_format( int fd, struct v4l2_format *fmt, int fmtid){
fmt->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt->fmt.pix.pixelformat = fmtid;
fmt->fmt.pix.field = V4L2_FIELD_ANY;
if (v4l2_ioctl (s->fd, VIDIOC_TRY_FMT, fmt)<0){
if (v4l2_ioctl (fd, VIDIOC_TRY_FMT, fmt)<0){
ms_message("VIDIOC_TRY_FMT: %s",strerror(errno));
return FALSE;
}
if (v4l2_ioctl (s->fd, VIDIOC_S_FMT, fmt)<0){
if (v4l2_ioctl (fd, VIDIOC_S_FMT, fmt)<0){
ms_message("VIDIOC_S_FMT: %s",strerror(errno));
return FALSE;
}
......@@ -145,6 +144,126 @@ static int get_picture_buffer_size(MSPixFmt pix_fmt, int w, int h){
return 0;
}
static int query_max_fps_for_format_resolution(int fd, int pixelformat, MSVideoSize vsize) {
int fps = -1;
struct v4l2_frmivalenum frmival;
frmival.index = 0;
frmival.pixel_format = pixelformat;
frmival.width = vsize.width;
frmival.height = vsize.height;
while (v4l2_ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival) >= 0) {
frmival.index++;
if (frmival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
fps = MAX(fps, (int) (frmival.discrete.denominator / frmival.discrete.numerator));
}
}
return fps;
}
typedef struct _V4L2FormatDescription {
/* format */
int pixel_format;
/* native or emulated */
bool_t native;
/* compressed or not */
bool_t compressed;
/* max fps */
int max_fps;
} V4L2FormatDescription;
static const V4L2FormatDescription* query_format_description_for_size(int fd, MSVideoSize vsize) {
/* hardcode supported format */
static V4L2FormatDescription formats[4];
formats[0].pixel_format = V4L2_PIX_FMT_YUV420;
formats[0].max_fps = -1;
formats[1].pixel_format = V4L2_PIX_FMT_YUYV;
formats[1].max_fps = -1;
formats[2].pixel_format = V4L2_PIX_FMT_MJPEG;
formats[2].max_fps = -1;
formats[3].pixel_format = V4L2_PIX_FMT_RGB24;
formats[3].max_fps = -1;
{
struct v4l2_fmtdesc fmt;
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (v4l2_ioctl(fd, VIDIOC_ENUM_FMT, &fmt) >= 0) {
int i;
for (i=0; i<4; i++) {
if (fmt.pixelformat == formats[i].pixel_format) {
formats[i].max_fps = query_max_fps_for_format_resolution(fd, fmt.pixelformat, vsize);
formats[i].native = !(fmt.flags & V4L2_FMT_FLAG_EMULATED);
formats[i].compressed = fmt.flags & V4L2_FMT_FLAG_COMPRESSED;
break;
}
}
fmt.index++;
}
}
return formats;
}
static MSPixFmt v4l2_format_to_ms(int v4l2format) {
switch (v4l2format) {
case V4L2_PIX_FMT_YUV420:
return MS_YUV420P;
case V4L2_PIX_FMT_YUYV:
return MS_YUYV;
case V4L2_PIX_FMT_MJPEG:
return MS_MJPEG;
case V4L2_PIX_FMT_RGB24:
return MS_RGB24;
default:
ms_error("Unknown v4l2 format 0x%08x", v4l2format);
return MS_PIX_FMT_UNKNOWN;
}
}
static MSPixFmt pick_best_format(int fd, const V4L2FormatDescription* format_desc, MSVideoSize vsize) {
/* rules for picking a format are:
- only max_fps >= 15 images/sec are considered
- native > compressed > emulated
*/
enum { PREFER_NATIVE = 0, PREFER_COMPRESSED, NO_PREFERENCE} i;
int j;
for (i=PREFER_NATIVE; i<=NO_PREFERENCE; i++) {
for (j=0; j<4; j++) {
int candidate = -1;
if (format_desc[j].max_fps >= 15) {
switch (i) {
case PREFER_NATIVE:
if (format_desc[j].native && !format_desc[j].compressed)
candidate = j;
break;
case PREFER_COMPRESSED:
if (format_desc[j].compressed)
candidate = j;
break;
case NO_PREFERENCE:
default:
candidate = j;
break;
}
}
if (candidate != -1) {
struct v4l2_format fmt;
fmt.fmt.pix.width = vsize.width;
fmt.fmt.pix.height = vsize.height;
if (v4lv2_try_format(fd, &fmt, format_desc[j].pixel_format)) {
return v4l2_format_to_ms(format_desc[j].pixel_format);
}
}
}
}
ms_error("No compatible format found");
return MS_PIX_FMT_UNKNOWN;
}
static int msv4l2_configure(V4l2State *s){
struct v4l2_capability cap;
struct v4l2_format fmt;
......@@ -165,7 +284,9 @@ static int msv4l2_configure(V4l2State *s){
return -1;
}
ms_message("Driver is %s",cap.driver);
memset(&fmt,0,sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
......@@ -173,35 +294,16 @@ static int msv4l2_configure(V4l2State *s){
ms_error("VIDIOC_G_FMT failed: %s",strerror(errno));
}
vsize=s->vsize;
do{
fmt.fmt.pix.width = s->vsize.width;
fmt.fmt.pix.height = s->vsize.height;
ms_message("v4l2: trying %ix%i",s->vsize.width,s->vsize.height);
if (v4lv2_try_format(s,&fmt,V4L2_PIX_FMT_YUV420)){
s->pix_fmt=MS_YUV420P;
s->int_pix_fmt=V4L2_PIX_FMT_YUV420;
ms_message("v4lv2: YUV420P chosen");
break;
}else if (v4lv2_try_format(s,&fmt,V4L2_PIX_FMT_YUYV)){
s->pix_fmt=MS_YUYV;
s->int_pix_fmt=V4L2_PIX_FMT_YUYV;
ms_message("v4lv2: V4L2_PIX_FMT_YUYV chosen");
break;
}else if (v4lv2_try_format(s,&fmt,V4L2_PIX_FMT_RGB24)){
s->pix_fmt=MS_RGB24;
s->int_pix_fmt=V4L2_PIX_FMT_RGB24;
ms_message("v4lv2: RGB24 chosen");
break;
}else if (v4lv2_try_format(s,&fmt,V4L2_PIX_FMT_MJPEG)){
s->pix_fmt=MS_MJPEG;
s->int_pix_fmt=V4L2_PIX_FMT_MJPEG;
ms_message("v4lv2: MJPEG chosen");
break;
}else{
ms_error("Could not find supported pixel format for %ix%i", s->vsize.width, s->vsize.height);
}
s->vsize=ms_video_size_get_just_lower_than(s->vsize);
}while(s->vsize.width!=0);
const V4L2FormatDescription* formats_desc = query_format_description_for_size(s->fd, s->vsize);
s->pix_fmt = pick_best_format(s->fd, formats_desc, s->vsize);
if (s->pix_fmt == MS_PIX_FMT_UNKNOWN)
s->vsize=ms_video_size_get_just_lower_than(s->vsize);
} while(s->vsize.width!=0 && (s->pix_fmt == MS_PIX_FMT_UNKNOWN));
if (s->vsize.width==0){
ms_message("Could not find any combination of resolution/pixel-format that works !");
s->vsize=vsize;
......@@ -214,7 +316,7 @@ static int msv4l2_configure(V4l2State *s){
if (v4l2_ioctl (s->fd, VIDIOC_G_FMT, &fmt)<0){
ms_error("VIDIOC_G_FMT failed: %s",strerror(errno));
}else{
ms_message("Size of webcam delivered pictures is %ix%i",fmt.fmt.pix.width,fmt.fmt.pix.height);
ms_message("Size of webcam delivered pictures is %ix%i. Format:0x%08x",fmt.fmt.pix.width,fmt.fmt.pix.height, s->pix_fmt);
s->vsize.width=fmt.fmt.pix.width;
s->vsize.height=fmt.fmt.pix.height;
}
......@@ -638,6 +740,7 @@ static void msv4l2_detect(MSWebCamManager *obj){
struct v4l2_capability cap;
char devname[32];
int i;
for(i=0;i<10;++i){
int fd;
snprintf(devname,sizeof(devname),"/dev/video%i",i);
......
......@@ -56,12 +56,12 @@ static mblk_t *jpeg2yuv(uint8_t *jpgbuf, int bufsize, MSVideoSize *reqsize){
#ifndef NO_FFMPEG
AVCodecContext av_context;
int got_picture=0;
AVFrame orig;
mblk_t *ret;
struct SwsContext *sws_ctx;
AVPacket pkt;
MSPicture dest;
AVCodec *codec=avcodec_find_decoder(CODEC_ID_MJPEG);
AVFrame* orig = avcodec_alloc_frame();
if (codec==NULL){
ms_error("Could not find MJPEG decoder in ffmpeg.");
......@@ -77,8 +77,7 @@ static mblk_t *jpeg2yuv(uint8_t *jpgbuf, int bufsize, MSVideoSize *reqsize){
pkt.data=jpgbuf;
pkt.size=bufsize;
memset(&orig, 0, sizeof(orig));
if (avcodec_decode_video2(&av_context,&orig,&got_picture,&pkt) < 0) {
if (avcodec_decode_video2(&av_context,orig,&got_picture,&pkt) < 0) {
ms_error("jpeg2yuv: avcodec_decode_video failed");
avcodec_close(&av_context);
return NULL;
......@@ -96,10 +95,10 @@ static mblk_t *jpeg2yuv(uint8_t *jpgbuf, int bufsize, MSVideoSize *reqsize){
return NULL;
}
#if LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(0,9,0)
if (sws_scale(sws_ctx,(const uint8_t* const *)orig.data,orig.linesize,0,av_context.height,dest.planes,dest.strides)<0){
#if LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(0,9,0)
if (sws_scale(sws_ctx,(const uint8_t* const *)orig->data,orig->linesize,0,av_context.height,dest.planes,dest.strides)<0){
#else
if (sws_scale(sws_ctx,(uint8_t**)orig.data,orig.linesize,0,av_context.height,dest.planes,dest.strides)<0){
if (sws_scale(sws_ctx,(uint8_t**)orig->data,orig->linesize,0,av_context.height,dest.planes,dest.strides)<0){
#endif
ms_error("jpeg2yuv: ms_sws_scale() failed.");
sws_freeContext(sws_ctx);
......@@ -108,13 +107,14 @@ static mblk_t *jpeg2yuv(uint8_t *jpgbuf, int bufsize, MSVideoSize *reqsize){
return NULL;
}
sws_freeContext(sws_ctx);
avcodec_free_frame(&orig);
avcodec_close(&av_context);
return ret;
#elif TARGET_OS_IPHONE
MSPicture dest;
CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, jpgbuf, bufsize, NULL);
// use the data provider to get a CGImage; release the data provider
CGImageRef image = CGImageCreateWithJPEGDataProvider(dataProvider, NULL, FALSE,
CGImageRef image = CGImageCreateWithJPEGDataProvider(dataProvider, NULL, FALSE,
kCGRenderingIntentDefault);
CGDataProviderRelease(dataProvider);
reqsize->width = CGImageGetWidth(image);
......@@ -138,7 +138,7 @@ static mblk_t *jpeg2yuv(uint8_t *jpgbuf, int bufsize, MSVideoSize *reqsize){