文章目录
- AVFilter 流程:
- api
- 核心代码
- 变量
- yuv视频裁剪
AVFilter 流程:
⾸先使⽤split滤波器将input流分成两路流(main和tmp),然后分别对两路流进⾏处理。对于tmp流,先经过crop滤波器进⾏裁剪处理,再经过flip滤波器进⾏垂直⽅向上的翻转操作,输出的结果命名为flip流。再将main流和flip流输⼊到overlay滤波器进⾏合成操作
input 为buffer源过滤波器,output为buffersink滤波器,图中每个节点都是⼀个AVFilterContext,每个连线就是AVFliterLink。所有这些信息都统⼀由AVFilterGraph来管理
api
- 获取FFmpeg中定义的filter,调⽤该⽅法前需要先调⽤avfilter_register_all();进⾏滤波器注册
AVFilter avfilter_get_by_name(const char name); - 往源滤波器buffer中输⼊待处理的数据
int av_buffersrc_add_frame(AVFilterContext ctx, AVFrame frame); - 从⽬的滤波器buffersink中获取处理完的数据
int av_buffersink_get_frame(AVFilterContext ctx, AVFrame frame); - 创建⼀个滤波器图filter graph
AVFilterGraph *avfilter_graph_alloc(void); - 创建⼀个滤波器实例AVFilterContext,并添加到AVFilterGraph中
int avfilter_graph_create_filter(AVFilterContext **filt_ctx, const AVFilter *filt,
const char name, const char args, void *opaque,
AVFilterGraph *graph_ctx); - 连接两个滤波器节点
int avfilter_link(AVFilterContext *src, unsigned srcpad,
AVFilterContext *dst, unsigned dstpad);
核心代码
变量
– AVFilterGraph-对filters系统的整体管理
struct AVFilterGraph
{
AVFilterContext **filters;
– AVFilter结构体
AVFilter ff_vf_overlay = {.name = "overlay",.inputs = avfilter_vf_overlay_inputs,.outputs = avfilter_vf_overlay_outputs,// ..};struct AVFilterContext
{const AVFilter *filter;char *name;AVFilterPad *input_pads;AVFilterLink **inputs;unsigned nb_inputsAVFilterPad *output_pads;AVFilterLink **outputs;unsigned nb_outputs;struct AVFilterGraph *graph; // 从属于哪个AVFilterGraph
}
– AVFilterLink-定义两个filters之间的联接
struct AVFilterLink{AVFilterContext *src;AVFilterPad *srcpad;AVFilterContext *dst;AVFilterPad *dstpad;struct AVFilterGraph *graph;};
– AVFilterPad-定义filter的输⼊/输出接⼝
struct AVFilterPad
{const char *name;AVFrame *(*get_video_buffer)(AVFilterLink *link, int w, int h);AVFrame *(*get_audio_buffer)(AVFilterLink *link, int nb_samples);int (*filter_frame)(AVFilterLink *link, AVFrame *frame);AVFilterPad-定义filter的输⼊/输出接⼝int (*request_frame)(AVFilterLink *link);
};
yuv视频裁剪
对输入yuv文件,在中间处裁剪,留下上半部分,下半部分为上半部分的镜像
#include <stdio.h>#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavfilter/avfilter.h>
#include <libavfilter/buffersink.h>
#include <libavfilter/buffersrc.h>
#include <libavutil/opt.h>
#include <libavutil/imgutils.h>int main(int argc, char* argv)
{int ret = 0;// input yuvFILE* inFile = NULL;const char* inFileName = "768x320.yuv";fopen_s(&inFile, inFileName, "rb+");int in_width = 768;int in_height = 320;// output yuvFILE* outFile = NULL;const char* outFileName = "out_crop_vfilter.yuv";fopen_s(&outFile, outFileName, "wb");avfilter_register_all();AVFilterGraph* filter_graph = avfilter_graph_alloc();// source filterchar args[512];AVFilter* bufferSrc = avfilter_get_by_name("buffer"); // AVFilterGraph的输入源AVFilterContext* bufferSrc_ctx;avfilter_graph_create_filter(&bufferSrc_ctx, bufferSrc, "in", args, NULL, filter_graph);// sink filterAVBufferSinkParams *bufferSink_params;AVFilterContext* bufferSink_ctx;AVFilter* bufferSink = avfilter_get_by_name("buffersink");enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE };bufferSink_params = av_buffersink_params_alloc();bufferSink_params->pixel_fmts = pix_fmts;ret = avfilter_graph_create_filter(&bufferSink_ctx, bufferSink, "out", NULL,bufferSink_params, filter_graph);// split filterAVFilter *splitFilter = avfilter_get_by_name("split");AVFilterContext *splitFilter_ctx;avfilter_graph_create_filter(&splitFilter_ctx, splitFilter, "split", "outputs=2",NULL, filter_graph);// crop filterAVFilter *cropFilter = avfilter_get_by_name("crop");AVFilterContext *cropFilter_ctx;revfilter_graph_create_filter(&cropFilter_ctx, cropFilter, "crop","out_w=iw:out_h=ih/2:x=0:y=0", NULL, filter_graph);// vflip filterAVFilter *vflipFilter = avfilter_get_by_name("vflip");AVFilterContext *vflipFilter_ctx;avfilter_graph_create_filter(&vflipFilter_ctx, vflipFilter, "vflip", NULL, NULL, filter_graph);// overlay filterAVFilter *overlayFilter = avfilter_get_by_name("overlay");AVFilterContext *overlayFilter_ctx;avfilter_graph_create_filter(&overlayFilter_ctx, overlayFilter, "overlay","y=0:H/2", NULL, filter_graph);avfilter_link(bufferSrc_ctx, 0, splitFilter_ctx, 0);// split filter's first pad to overlay filter's main padavfilter_link(splitFilter_ctx, 0, overlayFilter_ctx, 0);// split filter's second pad to crop filteravfilter_link(splitFilter_ctx, 1, cropFilter_ctx, 0);// crop filter to vflip filteravfilter_link(cropFilter_ctx, 0, vflipFilter_ctx, 0);// vflip filter to overlay filter's second padavfilter_link(vflipFilter_ctx, 0, overlayFilter_ctx, 1);// overlay filter to sink filteravfilter_link(overlayFilter_ctx, 0, bufferSink_ctx, 0);// check filter graphavfilter_graph_config(filter_graph, NULL);char *graph_str = avfilter_graph_dump(filter_graph, NULL);FILE* graphFile = NULL;fopen_s(&graphFile, "graphFile.txt", "w"); // 打印filtergraph的具体情况fprintf(graphFile, "%s", graph_str);av_free(graph_str);AVFrame *frame_in = av_frame_alloc();unsigned char *frame_buffer_in = (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, in_width, in_height, 1));av_image_fill_arrays(frame_in->data, frame_in->linesize, frame_buffer_in,AV_PIX_FMT_YUV420P, in_width, in_height, 1);AVFrame *frame_out = av_frame_alloc();unsigned char *frame_buffer_out = (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, in_width, in_height, 1));av_image_fill_arrays(frame_out->data, frame_out->linesize, frame_buffer_out,AV_PIX_FMT_YUV420P, in_width, in_height, 1);frame_in->width = in_width;frame_in->height = in_height;frame_in->format = AV_PIX_FMT_YUV420P;uint32_t frame_count = 0;while (1) {// 读取yuv数据if (fread(frame_buffer_in, 1, in_width*in_height * 3 / 2, inFile) != in_width*in_height * 3 / 2) {break;}//input Y,U,Vframe_in->data[0] = frame_buffer_in;frame_in->data[1] = frame_buffer_in + in_width*in_height;frame_in->data[2] = frame_buffer_in + in_width*in_height * 5 / 4;if (av_buffersrc_add_frame(bufferSrc_ctx, frame_in) < 0) {printf("Error while add frame.\n");break;}// filter内部自己处理/* pull filtered pictures from the filtergraph */ret = av_buffersink_get_frame(bufferSink_ctx, frame_out);if (ret < 0)break;//output Y,U,Vif (frame_out->format == AV_PIX_FMT_YUV420P) {for (int i = 0; i < frame_out->height; i++) {fwrite(frame_out->data[0] + frame_out->linesize[0] * i, 1, frame_out->width, outFile);}for (int i = 0; i < frame_out->height / 2; i++) {fwrite(frame_out->data[1] + frame_out->linesize[1] * i, 1, frame_out->width / 2, outFile);}for (int i = 0; i < frame_out->height / 2; i++) {fwrite(frame_out->data[2] + frame_out->linesize[2] * i, 1, frame_out->width / 2, outFile);}}++frame_count;if(frame_count % 25 == 0)printf("Process %d frame!\n",frame_count);av_frame_unref(frame_out);}fclose(inFile);fclose(outFile);av_frame_free(&frame_in);av_frame_free(&frame_out);avfilter_graph_free(&filter_graph); // 内部去释放AVFilterContext产生的内存return 0;
}