Commit 3825b526 authored by Anton Khirnov's avatar Anton Khirnov
Browse files

lavfi: check all ff_start_frame/draw_slice/end_frame calls for errors

parent d4f89906
...@@ -323,9 +323,10 @@ static int request_frame(AVFilterLink *link) ...@@ -323,9 +323,10 @@ static int request_frame(AVFilterLink *link)
switch (link->type) { switch (link->type) {
case AVMEDIA_TYPE_VIDEO: case AVMEDIA_TYPE_VIDEO:
ff_start_frame(link, avfilter_ref_buffer(buf, ~0)); if ((ret = ff_start_frame(link, avfilter_ref_buffer(buf, ~0))) < 0 ||
ff_draw_slice(link, 0, link->h, 1); (ret = ff_draw_slice(link, 0, link->h, 1)) < 0 ||
ff_end_frame(link); (ret = ff_end_frame(link)) < 0)
goto fail;
break; break;
case AVMEDIA_TYPE_AUDIO: case AVMEDIA_TYPE_AUDIO:
ret = ff_filter_samples(link, avfilter_ref_buffer(buf, ~0)); ret = ff_filter_samples(link, avfilter_ref_buffer(buf, ~0));
...@@ -334,6 +335,7 @@ static int request_frame(AVFilterLink *link) ...@@ -334,6 +335,7 @@ static int request_frame(AVFilterLink *link)
return AVERROR(EINVAL); return AVERROR(EINVAL);
} }
fail:
avfilter_unref_buffer(buf); avfilter_unref_buffer(buf);
return ret; return ret;
......
...@@ -241,9 +241,11 @@ static int request_frame(AVFilterLink *outlink) ...@@ -241,9 +241,11 @@ static int request_frame(AVFilterLink *outlink)
* so we don't have to worry about dereferencing it ourselves. */ * so we don't have to worry about dereferencing it ourselves. */
switch (outlink->type) { switch (outlink->type) {
case AVMEDIA_TYPE_VIDEO: case AVMEDIA_TYPE_VIDEO:
ff_start_frame(outlink, fifo->root.next->buf); if ((ret = ff_start_frame(outlink, fifo->root.next->buf)) < 0 ||
ff_draw_slice (outlink, 0, outlink->h, 1); (ret = ff_draw_slice(outlink, 0, outlink->h, 1)) < 0 ||
ff_end_frame (outlink); (ret = ff_end_frame(outlink)) < 0)
return ret;
queue_pop(fifo); queue_pop(fifo);
break; break;
case AVMEDIA_TYPE_AUDIO: case AVMEDIA_TYPE_AUDIO:
......
...@@ -143,9 +143,11 @@ static int request_frame(AVFilterLink *outlink) ...@@ -143,9 +143,11 @@ static int request_frame(AVFilterLink *outlink)
buf->pts = av_rescale_q(s->first_pts, ctx->inputs[0]->time_base, buf->pts = av_rescale_q(s->first_pts, ctx->inputs[0]->time_base,
outlink->time_base) + s->frames_out; outlink->time_base) + s->frames_out;
ff_start_frame(outlink, buf); if ((ret = ff_start_frame(outlink, buf)) < 0 ||
ff_draw_slice(outlink, 0, outlink->h, 1); (ret = ff_draw_slice(outlink, 0, outlink->h, 1)) < 0 ||
ff_end_frame(outlink); (ret = ff_end_frame(outlink)) < 0)
return ret;
s->frames_out++; s->frames_out++;
} }
return 0; return 0;
...@@ -231,9 +233,13 @@ static int end_frame(AVFilterLink *inlink) ...@@ -231,9 +233,13 @@ static int end_frame(AVFilterLink *inlink)
buf_out->pts = av_rescale_q(s->first_pts, inlink->time_base, buf_out->pts = av_rescale_q(s->first_pts, inlink->time_base,
outlink->time_base) + s->frames_out; outlink->time_base) + s->frames_out;
ff_start_frame(outlink, buf_out); if ((ret = ff_start_frame(outlink, buf_out)) < 0 ||
ff_draw_slice(outlink, 0, outlink->h, 1); (ret = ff_draw_slice(outlink, 0, outlink->h, 1)) < 0 ||
ff_end_frame(outlink); (ret = ff_end_frame(outlink)) < 0) {
avfilter_unref_bufferp(&buf);
return ret;
}
s->frames_out++; s->frames_out++;
} }
flush_fifo(s->fifo); flush_fifo(s->fifo);
......
...@@ -438,18 +438,28 @@ static int source_request_frame(AVFilterLink *outlink) ...@@ -438,18 +438,28 @@ static int source_request_frame(AVFilterLink *outlink)
{ {
Frei0rContext *frei0r = outlink->src->priv; Frei0rContext *frei0r = outlink->src->priv;
AVFilterBufferRef *picref = ff_get_video_buffer(outlink, AV_PERM_WRITE, outlink->w, outlink->h); AVFilterBufferRef *picref = ff_get_video_buffer(outlink, AV_PERM_WRITE, outlink->w, outlink->h);
int ret;
picref->video->pixel_aspect = (AVRational) {1, 1}; picref->video->pixel_aspect = (AVRational) {1, 1};
picref->pts = frei0r->pts++; picref->pts = frei0r->pts++;
picref->pos = -1; picref->pos = -1;
ff_start_frame(outlink, avfilter_ref_buffer(picref, ~0)); ret = ff_start_frame(outlink, avfilter_ref_buffer(picref, ~0));
if (ret < 0)
goto fail;
frei0r->update(frei0r->instance, av_rescale_q(picref->pts, frei0r->time_base, (AVRational){1,1000}), frei0r->update(frei0r->instance, av_rescale_q(picref->pts, frei0r->time_base, (AVRational){1,1000}),
NULL, (uint32_t *)picref->data[0]); NULL, (uint32_t *)picref->data[0]);
ff_draw_slice(outlink, 0, outlink->h, 1); ret = ff_draw_slice(outlink, 0, outlink->h, 1);
ff_end_frame(outlink); if (ret < 0)
goto fail;
ret = ff_end_frame(outlink);
fail:
avfilter_unref_buffer(picref); avfilter_unref_buffer(picref);
return 0; return ret;
} }
AVFilter avfilter_vsrc_frei0r_src = { AVFilter avfilter_vsrc_frei0r_src = {
......
...@@ -279,12 +279,15 @@ static int request_frame(AVFilterLink *outlink) ...@@ -279,12 +279,15 @@ static int request_frame(AVFilterLink *outlink)
if (av_fifo_size(select->pending_frames)) { if (av_fifo_size(select->pending_frames)) {
AVFilterBufferRef *picref; AVFilterBufferRef *picref;
int ret;
av_fifo_generic_read(select->pending_frames, &picref, sizeof(picref), NULL); av_fifo_generic_read(select->pending_frames, &picref, sizeof(picref), NULL);
ff_start_frame(outlink, avfilter_ref_buffer(picref, ~0)); if ((ret = ff_start_frame(outlink, avfilter_ref_buffer(picref, ~0))) < 0 ||
ff_draw_slice(outlink, 0, outlink->h, 1); (ret = ff_draw_slice(outlink, 0, outlink->h, 1)) < 0 ||
ff_end_frame(outlink); (ret = ff_end_frame(outlink)) < 0);
avfilter_unref_buffer(picref); avfilter_unref_buffer(picref);
return 0; return ret;
} }
while (!select->select) { while (!select->select) {
......
...@@ -157,11 +157,11 @@ static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, int w, ...@@ -157,11 +157,11 @@ static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, int w,
return picref; return picref;
} }
static void return_frame(AVFilterContext *ctx, int is_second) static int return_frame(AVFilterContext *ctx, int is_second)
{ {
YADIFContext *yadif = ctx->priv; YADIFContext *yadif = ctx->priv;
AVFilterLink *link= ctx->outputs[0]; AVFilterLink *link= ctx->outputs[0];
int tff; int tff, ret;
if (yadif->parity == -1) { if (yadif->parity == -1) {
tff = yadif->cur->video->interlaced ? tff = yadif->cur->video->interlaced ?
...@@ -193,12 +193,16 @@ static void return_frame(AVFilterContext *ctx, int is_second) ...@@ -193,12 +193,16 @@ static void return_frame(AVFilterContext *ctx, int is_second)
} else { } else {
yadif->out->pts = AV_NOPTS_VALUE; yadif->out->pts = AV_NOPTS_VALUE;
} }
ff_start_frame(ctx->outputs[0], yadif->out); ret = ff_start_frame(ctx->outputs[0], yadif->out);
if (ret < 0)
return ret;
} }
ff_draw_slice(ctx->outputs[0], 0, link->h, 1); if ((ret = ff_draw_slice(ctx->outputs[0], 0, link->h, 1)) < 0 ||
ff_end_frame(ctx->outputs[0]); (ret = ff_end_frame(ctx->outputs[0])) < 0)
return ret;
yadif->frame_pending = (yadif->mode&1) && !is_second; yadif->frame_pending = (yadif->mode&1) && !is_second;
return 0;
} }
static int start_frame(AVFilterLink *link, AVFilterBufferRef *picref) static int start_frame(AVFilterLink *link, AVFilterBufferRef *picref)
......
...@@ -142,19 +142,29 @@ static int color_request_frame(AVFilterLink *link) ...@@ -142,19 +142,29 @@ static int color_request_frame(AVFilterLink *link)
{ {
ColorContext *color = link->src->priv; ColorContext *color = link->src->priv;
AVFilterBufferRef *picref = ff_get_video_buffer(link, AV_PERM_WRITE, color->w, color->h); AVFilterBufferRef *picref = ff_get_video_buffer(link, AV_PERM_WRITE, color->w, color->h);
int ret;
picref->video->pixel_aspect = (AVRational) {1, 1}; picref->video->pixel_aspect = (AVRational) {1, 1};
picref->pts = color->pts++; picref->pts = color->pts++;
picref->pos = -1; picref->pos = -1;
ff_start_frame(link, avfilter_ref_buffer(picref, ~0)); ret = ff_start_frame(link, avfilter_ref_buffer(picref, ~0));
if (ret < 0)
goto fail;
ff_draw_rectangle(picref->data, picref->linesize, ff_draw_rectangle(picref->data, picref->linesize,
color->line, color->line_step, color->hsub, color->vsub, color->line, color->line_step, color->hsub, color->vsub,
0, 0, color->w, color->h); 0, 0, color->w, color->h);
ff_draw_slice(link, 0, color->h, 1); ret = ff_draw_slice(link, 0, color->h, 1);
ff_end_frame(link); if (ret < 0)
goto fail;
ret = ff_end_frame(link);
fail:
avfilter_unref_buffer(picref); avfilter_unref_buffer(picref);
return 0; return ret;
} }
AVFilter avfilter_vsrc_color = { AVFilter avfilter_vsrc_color = {
......
...@@ -289,13 +289,20 @@ static int request_frame(AVFilterLink *outlink) ...@@ -289,13 +289,20 @@ static int request_frame(AVFilterLink *outlink)
return ret; return ret;
outpicref = avfilter_ref_buffer(movie->picref, ~0); outpicref = avfilter_ref_buffer(movie->picref, ~0);
ff_start_frame(outlink, outpicref); ret = ff_start_frame(outlink, outpicref);
ff_draw_slice(outlink, 0, outlink->h, 1); if (ret < 0)
ff_end_frame(outlink); goto fail;
ret = ff_draw_slice(outlink, 0, outlink->h, 1);
if (ret < 0)
goto fail;
ret = ff_end_frame(outlink);
fail:
avfilter_unref_buffer(movie->picref); avfilter_unref_buffer(movie->picref);
movie->picref = NULL; movie->picref = NULL;
return 0; return ret;
} }
AVFilter avfilter_vsrc_movie = { AVFilter avfilter_vsrc_movie = {
......
...@@ -130,6 +130,7 @@ static int request_frame(AVFilterLink *outlink) ...@@ -130,6 +130,7 @@ static int request_frame(AVFilterLink *outlink)
{ {
TestSourceContext *test = outlink->src->priv; TestSourceContext *test = outlink->src->priv;
AVFilterBufferRef *picref; AVFilterBufferRef *picref;
int ret;
if (test->max_pts >= 0 && test->pts > test->max_pts) if (test->max_pts >= 0 && test->pts > test->max_pts)
return AVERROR_EOF; return AVERROR_EOF;
...@@ -143,9 +144,10 @@ static int request_frame(AVFilterLink *outlink) ...@@ -143,9 +144,10 @@ static int request_frame(AVFilterLink *outlink)
test->nb_frame++; test->nb_frame++;
test->fill_picture_fn(outlink->src, picref); test->fill_picture_fn(outlink->src, picref);
ff_start_frame(outlink, picref); if ((ret = ff_start_frame(outlink, picref)) < 0 ||
ff_draw_slice(outlink, 0, test->h, 1); (ret = ff_draw_slice(outlink, 0, test->h, 1)) < 0 ||
ff_end_frame(outlink); (ret = ff_end_frame(outlink)) < 0)
return ret;
return 0; return 0;
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment