最终解码效果:
1.UI设计
2.在控件属性窗口中输入默认值
3.复制已编译FFmpeg库到工程同级目录下
4.在工程引用FFmpeg库及头文件
5.链接指定FFmpeg库
6.使用FFmpeg库
引用头文件
extern "C"
{
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "libavcodec/avcodec.h"
#include "libavcodec/bsf.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libavutil/imgutils.h"
#include "libavutil/log.h"
#include "libavutil/imgutils.h"
#include "libavutil/time.h"
#include <libswresample/swresample.h>}
创建视频编解码管理类
实现视频编解码管理类
#include "ffmpegmananger.h"
#include <QThread>
ffmpegMananger::ffmpegMananger(QObject *parent ):QObject(parent)
{m_pInFmtCtx = nullptr;m_pTsFmtCtx = nullptr;m_qstrRtspURL = "";m_qstrOutPutFile = "";
}
ffmpegMananger::~ffmpegMananger()
{avformat_free_context(m_pInFmtCtx);avformat_free_context(m_pTsFmtCtx);
}void ffmpegMananger::getRtspURL(QString strRtspURL)
{this->m_qstrRtspURL = strRtspURL;
}
void ffmpegMananger::getOutURL(QString strRute)
{this->m_qstrOutPutFile = strRute;printf("===========%s\n",m_qstrOutPutFile.toStdString().c_str());
}
void ffmpegMananger::setOutputCtx(AVCodecContext *encCtx, AVFormatContext **pTsFmtCtx,int &nVideoIdx_out)
{avformat_alloc_output_context2(pTsFmtCtx , nullptr, nullptr, m_qstrOutPutFile.toStdString().c_str());if (!pTsFmtCtx ) {printf("Could not create output context\n");return;}if (avio_open(&((*pTsFmtCtx)->pb), m_qstrOutPutFile.toStdString().c_str(), AVIO_FLAG_READ_WRITE) < 0){avformat_free_context(*pTsFmtCtx);printf("avio_open fail.");return;}AVStream *out_stream = avformat_new_stream(*pTsFmtCtx, encCtx->codec);nVideoIdx_out = out_stream->index;//nVideoIdx_out = out_stream->index;avcodec_parameters_from_context(out_stream->codecpar, encCtx);printf("==========Output Information==========\n");av_dump_format(*pTsFmtCtx, 0, m_qstrOutPutFile.toStdString().c_str(), 1);printf("======================================\n");
}
int ffmpegMananger::ffmepgInput()
{int nRet = 0;AVCodecContext *encCtx = nullptr;//编码器//const char *pUrl = "D:/videos/264.dat";std::string temp = m_qstrRtspURL.toStdString();const char *pUrl = temp.c_str();printf("===========%s\n",pUrl);AVDictionary *options = nullptr;av_dict_set(&options,"rtsp_transport", "tcp", 0);av_dict_set(&options,"stimeout","10000000",0);// 设置“buffer_size”缓存容量av_dict_set(&options, "buffer_size", "1024000", 0);nRet = avformat_open_input(&m_pInFmtCtx,pUrl,nullptr,&options);if( nRet < 0){printf("Could not open input file,===========keep trying \n");return nRet;}avformat_find_stream_info(m_pInFmtCtx, nullptr);printf("===========Input Information==========\n");av_dump_format(m_pInFmtCtx, 0, pUrl, 0);printf("======================================\n");//1.获取视频流编号int nVideo_indx = av_find_best_stream(m_pInFmtCtx,AVMEDIA_TYPE_VIDEO,-1,-1,nullptr,0);if(nVideo_indx < 0){avformat_free_context(m_pInFmtCtx);printf("查找解码器失败\n");return -1;}//2.查找解码器AVCodec *pInCodec = avcodec_find_decoder(m_pInFmtCtx->streams[nVideo_indx]->codecpar->codec_id);if(nullptr == pInCodec){printf("avcodec_find_decoder fail.");return -1;}//获取解码器上下文AVCodecContext* pInCodecCtx = avcodec_alloc_context3(pInCodec);//复制解码器参数nRet = avcodec_parameters_to_context(pInCodecCtx, m_pInFmtCtx->streams[nVideo_indx]->codecpar);if(nRet < 0){avcodec_free_context(&pInCodecCtx);printf("avcodec_parameters_to_context fail.");return -1;}//打开解码器if(avcodec_open2(pInCodecCtx, pInCodec, nullptr) < 0){avcodec_free_context(&pInCodecCtx);printf("Error: Can't open codec!\n");return -1;}printf("width = %d\n", pInCodecCtx->width);printf("height = %d\n", pInCodecCtx->height);int frame_index = 0;int got_picture = 0;AVStream *in_stream =nullptr;AVStream *out_stream =nullptr;AVFrame *pFrame= av_frame_alloc();AVPacket *newpkt = av_packet_alloc();AVPacket *packet = av_packet_alloc();av_init_packet(newpkt);av_init_packet(packet);// alloc AVFrameAVFrame*pFrameRGB = av_frame_alloc();// 图像色彩空间转换、分辨率缩放、前后图像滤波处理SwsContext *m_SwsContext = sws_getContext(pInCodecCtx->width, pInCodecCtx->height,pInCodecCtx->pix_fmt, pInCodecCtx->width, pInCodecCtx->height,AV_PIX_FMT_RGB32, SWS_BICUBIC, nullptr, nullptr, nullptr);int bytes = av_image_get_buffer_size(AV_PIX_FMT_RGB32, pInCodecCtx->width, pInCodecCtx->height,4);uint8_t *m_OutBuffer = (uint8_t *)av_malloc(bytes * sizeof(uint8_t));// 将分配的内存空间给pFrameRGB使用avpicture_fill((AVPicture *)pFrameRGB, m_OutBuffer, AV_PIX_FMT_RGB32, pInCodecCtx->width, pInCodecCtx->height);if(encCtx == nullptr){//打开编码器openEncoder(pInCodecCtx->width, pInCodecCtx->height,&encCtx);}int videoindex_out = 0;//设置输出文件上下文setOutputCtx(encCtx,&m_pTsFmtCtx,videoindex_out);//Write file headerif (avformat_write_header(m_pTsFmtCtx, nullptr) < 0){avformat_free_context(m_pTsFmtCtx);printf("Error occurred when opening output file\n");return -1;}printf("==============writer trail===================.\n");int count = 0;nRet = 0;while(av_read_frame(m_pInFmtCtx, packet) >= 0)//从pInFmtCtx读H264数据到packet;{if(packet->stream_index != nVideo_indx)//仅保留图像{continue;}if(avcodec_send_packet(pInCodecCtx, packet)<0)//送packet中H264数据给解码器码器进行解码,解码好的YUV数据放在pInCodecCtx,{break;}av_packet_unref(packet);got_picture = avcodec_receive_frame(pInCodecCtx, pFrame);//把解码好的YUV数据放到pFrame中if(0 == got_picture)//解码好一帧数据{//发送显示图像的信号// 对解码视频帧进行缩放、格式转换等操作sws_scale(m_SwsContext, (uint8_t const * const *)pFrame->data,pFrame->linesize, 0, pInCodecCtx->height,pFrameRGB->data, pFrameRGB->linesize);// 转换到QImageQImage tmmImage((uchar *)m_OutBuffer, pInCodecCtx->width, pInCodecCtx->height, QImage::Format_RGB32);QImage image = tmmImage.copy();// 发送QImageemit Sig_GetOneFrame(image);setDecoderPts(newpkt->stream_index,count, pFrame);count++;//送原始数据给编码器进行编码nRet = avcodec_send_frame(encCtx,pFrame);if(nRet < 0){continue;}//从编码器获取编号的数据while(nRet >= 0){nRet = avcodec_receive_packet(encCtx,newpkt);if(nRet < 0){break;}setEncoderPts(nVideo_indx,frame_index,videoindex_out,newpkt);int _count = 1;printf("Write %d Packet. size:%5d\tpts:%lld\n", _count,newpkt->size, newpkt->pts);if (av_interleaved_write_frame(m_pTsFmtCtx, newpkt) < 0){printf("Error muxing packet\n");goto end;}_count++;av_packet_unref(newpkt);}}}while(1)//从pInFmtCtx读H264数据到packet;{if(packet->stream_index != nVideo_indx)//仅保留图像{continue;}if(avcodec_send_packet(pInCodecCtx, packet)<0)//送packet中H264数据给解码器码器进行解码,解码好的YUV数据放在pInCodecCtx,{continue;}av_packet_unref(packet);got_picture = avcodec_receive_frame(pInCodecCtx, pFrame);//把解码好的YUV数据放到pFrame中if(!got_picture)//解码好一帧数据{AVRational in_time_base1 = in_stream->time_base;in_stream = m_pInFmtCtx->streams[newpkt->stream_index];//Duration between 2 frames (us)int64_t in_duration = (double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate);pFrame->pts = (double)(count*in_duration) / (double)(av_q2d(in_time_base1)*AV_TIME_BASE);count++;//送原始数据给编码器进行编码nRet = avcodec_send_frame(encCtx,pFrame);if(nRet < 0){break;}//从编码器获取编号的数据while(nRet >= 0){nRet = avcodec_receive_packet(encCtx,newpkt);if(nRet < 0){continue;}in_stream = m_pInFmtCtx->streams[newpkt->stream_index];out_stream = m_pTsFmtCtx->streams[videoindex_out];if (newpkt->stream_index == nVideo_indx){//FIX:No PTS (Example: Raw H.264)//Simple Write PTSif (newpkt->pts == AV_NOPTS_VALUE){//Write PTSAVRational time_base1 = in_stream->time_base;//Duration between 2 frames (us)int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate);//Parametersnewpkt->pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);newpkt->dts = newpkt->pts;newpkt->duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);frame_index++;}}//Convert PTS/DTSnewpkt->pts = av_rescale_q_rnd(newpkt->pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));newpkt->dts = av_rescale_q_rnd(newpkt->dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));newpkt->duration = av_rescale_q(newpkt->duration, in_stream->time_base, out_stream->time_base);newpkt->pos = -1;newpkt->stream_index = videoindex_out;int count = 1;printf("Write %d Packet. size:%5d\tpts:%lld\n", count,newpkt->size, newpkt->pts);if (av_interleaved_write_frame(m_pTsFmtCtx, newpkt) < 0){printf("Error muxing packet\n");goto end;}count++;av_packet_unref(newpkt);}}}//Write file trailerav_write_trailer(m_pTsFmtCtx);
end:av_frame_free(&pFrame);av_frame_free(&pFrameRGB);av_packet_unref(newpkt);av_packet_unref(packet);std::cout<<"rtsp's h264 to ts end"; return 0;
}
void ffmpegMananger::setDecoderPts(int idx,int count,AVFrame *pFrame)
{AVStream* in_stream = m_pInFmtCtx->streams[idx];AVRational in_time_base1 = in_stream->time_base;//Duration between 2 frames (us)int64_t in_duration = (double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate);pFrame->pts = (double)(count*in_duration) / (double)(av_q2d(in_time_base1)*AV_TIME_BASE);
}
void ffmpegMananger::setEncoderPts(int nVideo_indx,int frame_index,int videoindex_out,AVPacket *newpkt)
{AVStream*in_stream = m_pInFmtCtx->streams[newpkt->stream_index];AVStream*out_stream = m_pTsFmtCtx->streams[videoindex_out];if (newpkt->stream_index == nVideo_indx){//FIX:No PTS (Example: Raw H.264)//Simple Write PTSif (newpkt->pts == AV_NOPTS_VALUE){//Write PTSAVRational time_base1 = in_stream->time_base;//Duration between 2 frames (us)int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate);//Parametersnewpkt->pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);newpkt->dts = newpkt->pts;newpkt->duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);frame_index++;}}//Convert PTS/DTSnewpkt->pts = av_rescale_q_rnd(newpkt->pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));newpkt->dts = av_rescale_q_rnd(newpkt->dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));newpkt->duration = av_rescale_q(newpkt->duration, in_stream->time_base, out_stream->time_base);newpkt->pos = -1;newpkt->stream_index = videoindex_out;
}
void ffmpegMananger::writeTail()
{//Write file trailerav_write_trailer(m_pTsFmtCtx);
}
void ffmpegMananger::openEncoder(int width, int height, AVCodecContext** enc_ctx)
{//使用libx264编码器AVCodec * pCodec = avcodec_find_encoder_by_name("libx264");if(nullptr == pCodec){printf("avcodec_find_encoder_by_name fail.\n");return;}//获取编码器上下文*enc_ctx = avcodec_alloc_context3(pCodec);if(nullptr == enc_ctx){printf("avcodec_alloc_context3(pCodec) fail.\n");return;}//sps/pps(*enc_ctx)->profile = FF_PROFILE_H264_MAIN;(*enc_ctx)->level = 30;//表示level是5.0//分辨率(*enc_ctx)->width = width;(*enc_ctx)->height = height;//gop(*enc_ctx)->gop_size = 25;//i帧间隔(*enc_ctx)->keyint_min = 20;//设置最小自动插入i帧的间隔.OPTION//B帧(*enc_ctx)->max_b_frames = 0;//不要B帧(*enc_ctx)->has_b_frames = 0;////参考帧(*enc_ctx)->refs = 3;//OPTION//设置输入的yuv格式(*enc_ctx)->pix_fmt = AV_PIX_FMT_YUV420P;//设置码率(*enc_ctx)->bit_rate = 3000000;//设置帧率//(*enc_ctx)->time_base = (AVRational){1,25};//帧与帧之间的间隔(*enc_ctx)->time_base.num = 1;(*enc_ctx)->time_base.den = 25;//(*enc_ctx)->framerate = (AVRational){25,1};//帧率 25帧每秒(*enc_ctx)->framerate.num = 25;(*enc_ctx)->framerate.den = 1;if(avcodec_open2((*enc_ctx),pCodec,nullptr) < 0){printf("avcodec_open2 fail.\n");}return;
}