FFmpeg AVfilter应用例子

头文件
extern "C" {
#include "libavutil/mem.h"
#include "libavfilter/avfiltergraph.h"
#include "libavfilter/buffersink.h"
#include "libavfilter/buffersrc.h"
#include "libavutil/avutil.h"
#include "libavutil/imgutils.h"
#include "libavdevice/avdevice.h"
};

注册过滤器
 avfilter_register_all();
/*
对pSrcFrame画框,最终图像数据保存在pDestFrame
*/
void VideoDecodec::DrawRectangleToFrame(AVFrame* pSrcFrame, AVFrame* pDestFrame, int x, int y, int w, int h)
{
 AVFilterGraph* pFilterGraph = avfilter_graph_alloc();
 char szErrMsg[128] = { 0 };
 char szArgs[512] = { 0 };
 char szFilterDescr[256] = { 0 };
 sprintf(szFilterDescr, "drawbox=x=%d:y=%d:w=%d:h=%d:color=yellow@1", x, y, w, h);
 AVFilter* pBufferSrc = avfilter_get_by_name("buffer");
 AVFilter* pBufferSink = avfilter_get_by_name("buffersink");
 AVFilterInOut* pFilterOut = avfilter_inout_alloc();
 AVFilterInOut* pFilterIn = avfilter_inout_alloc();
 enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_NONE };
 AVBufferSinkParams* pBufferSinkParams;

 //最后的几个参数没有使用真实的视频格式参数
 snprintf(szArgs, sizeof(szArgs), "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
    pSrcFrame->width, pSrcFrame->height, pSrcFrame->format, 1, 1, 1, 1);

 int ret = 0;
 AVFilterContext* pBufferSinkContext = NULL;
 AVFilterContext* pBufferSrcContext = NULL;
 if ((ret = avfilter_graph_create_filter(&pBufferSrcContext, pBufferSrc, "in", szArgs, NULL, pFilterGraph)) < 0)
 {
  sprintf(szErrMsg, "Cannot graph create filter, error:%s\n", av_err2str(ret));
  return;
 }
 pBufferSinkParams = av_buffersink_params_alloc();
 pBufferSinkParams->pixel_fmts = pix_fmts;
 if ((ret = avfilter_graph_create_filter(&pBufferSinkContext, pBufferSink, "out", NULL, pBufferSinkParams, pFilterGraph)) < 0)
 {
  sprintf(szErrMsg, "Cannot graph create filter, error:%s\n", av_err2str(ret));
  return;
 }

 pFilterOut->name = av_strdup("in");
 pFilterOut->filter_ctx = pBufferSrcContext;
 pFilterOut->pad_idx = 0;
 pFilterOut->next = NULL;

 pFilterIn->name = av_strdup("out");
 pFilterIn->filter_ctx = pBufferSinkContext;
 pFilterIn->pad_idx = 0;
 pFilterIn->next = NULL;

 do
 {
  if ((ret = avfilter_graph_parse_ptr(pFilterGraph, szFilterDescr, &pFilterIn, &pFilterOut, NULL)) < 0)
  {
   sprintf(szErrMsg, "Cannot graph parse ptr, error:%s\n", av_err2str(ret));
   break;
  }

  if ((ret = avfilter_graph_config(pFilterGraph, NULL)) < 0)
  {
   sprintf(szErrMsg, "Cannot graph config filter, error:%s\n", av_err2str(ret));
   break;
  }
  
  if ((ret = av_buffersrc_add_frame(pBufferSrcContext, pSrcFrame)) < 0)
  {
   sprintf(szErrMsg, "Cannot add frame from buffersrc, error:%s\n", av_err2str(ret));
   break;
  }
  //pDestFrame帧的长宽必须指定
  //图像格式转换之后,pSrcFrame中的data数据被置为NULL,pSrcFrame结构不可用
  if ((ret = av_buffersink_get_frame(pBufferSinkContext, pDestFrame)) < 0)
  {
   sprintf(szErrMsg, "Cannot get frame frome buffersink, error:%s\n", av_err2str(ret));
   break;
  }
 } while (0);

 avfilter_inout_free(&pFilterIn);
 avfilter_inout_free(&pFilterOut);
 av_free(pBufferSinkParams);
 avfilter_graph_free(&pFilterGraph);
}


注意

1.调用avfilter_get_by_name("ffbuffersink")时在新版本3.4的ffmpeg要修改为avfilter_get_by_name("buffersink");否则返回指针为空,调用avfilter_graph_create_filter返回-12


上一篇:avformat_open_input 阻塞 and 设置超时时间


下一篇:多媒体开发(10):提取图片以及位图保存