背景
最简单的基于FFmpeg的推流器(以推送RTMP为例)
针对FFmpeg4.4版本的API进行了调整
代码
#include <iostream>
#include <windows.h>
extern "C"
{
#include "libavformat/avformat.h"
//引入时间
#include "libavutil/time.h"
}
//引入库
#pragma comment(lib,"avformat.lib")
//工具库,包括获取错误信息等
#pragma comment(lib,"avutil.lib")
//编解码的库
#pragma comment(lib,"avcodec.lib")
int avError(int errNum)
{
char szErrBuf[1024] = { 0 };
av_strerror(errNum, szErrBuf, sizeof(szErrBuf));
std::cout << "failed info:" << szErrBuf << std::endl;
return -1;
}
int main()
{
int nVideoIndex = -1;
avformat_network_init();
const char* pszFile = "D:/hls/1.mp4";
const char* pszRTMPURL = "rtmp://192.168.11.172:1935/hls/home";
AVFormatContext* pInputAVFormatContext = NULL;
int nRet = avformat_open_input(&pInputAVFormatContext, pszFile, 0, NULL);
if (nRet < 0)
{
return avError(nRet);
}
nRet = avformat_find_stream_info(pInputAVFormatContext, 0);
if (nRet != 0)
{
return avError(nRet);
}
av_dump_format(pInputAVFormatContext, 0, pszFile, 0);
AVFormatContext* pOutputAVFormatContext = NULL;
nRet = avformat_alloc_output_context2(&pOutputAVFormatContext, NULL, "flv", pszRTMPURL);
if (nRet < 0)
{
return avError(nRet);
}
for (int i = 0; i < pInputAVFormatContext->nb_streams; i++)
{
AVStream* pInputAVStream = pInputAVFormatContext->streams[i];
AVStream* pOutputAVStream = avformat_new_stream(pOutputAVFormatContext, 0);
nRet = avcodec_parameters_copy(pOutputAVStream->codecpar, pInputAVStream->codecpar);
if (nRet < 0)
{
return avError(nRet);
}
pOutputAVStream->codecpar->codec_tag = 0;
}
for (int i = 0; i < pInputAVFormatContext->nb_streams; i++)
{
if (pInputAVFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
nVideoIndex = i;
break;
}
}
av_dump_format(pOutputAVFormatContext, 0, pszRTMPURL, 1);
nRet = avio_open(&pOutputAVFormatContext->pb, pszRTMPURL, AVIO_FLAG_WRITE);
if (nRet < 0)
{
return avError(nRet);
}
nRet = avformat_write_header(pOutputAVFormatContext, 0);
if (nRet < 0)
{
return avError(nRet);
}
AVPacket pkt;
std::int64_t llStartTime = av_gettime();
std::int64_t llFrameIndex = 0;
while (true)
{
AVStream* pInputStream = NULL;
AVStream* pOutputStream = NULL;
nRet = av_read_frame(pInputAVFormatContext, &pkt);
if (nRet < 0)
{
break;
}
if (pkt.pts == AV_NOPTS_VALUE)
{
AVRational timeBase = pInputAVFormatContext->streams[nVideoIndex]->time_base;
std::int64_t llCalcDuration = (double)AV_TIME_BASE / av_q2d(pInputAVFormatContext->streams[nVideoIndex]->r_frame_rate);
pkt.pts = (double)(llFrameIndex * llCalcDuration) / (double(av_q2d(timeBase)*AV_TIME_BASE));
pkt.dts = pkt.pts;
pkt.duration = (double)llCalcDuration / (double)(av_q2d(timeBase)*AV_TIME_BASE);
}
if (pkt.stream_index == nVideoIndex)
{
AVRational timeBase = pInputAVFormatContext->streams[nVideoIndex]->time_base;
AVRational timeBaseQ = { 1, AV_TIME_BASE };
std::int64_t pts_time = av_rescale_q(pkt.dts, timeBase, timeBaseQ);
std::int64_t now_time = av_gettime() - llStartTime;
AVRational avr = pInputAVFormatContext->streams[nVideoIndex]->time_base;
if (pts_time > now_time)
{
av_usleep((unsigned int)(pts_time - now_time));
}
}
pInputStream = pInputAVFormatContext->streams[pkt.stream_index];
pOutputStream = pOutputAVFormatContext->streams[pkt.stream_index];
pkt.pts = av_rescale_q_rnd(pkt.pts, pInputStream->time_base, pOutputStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, pInputStream->time_base, pOutputStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.duration = (int)av_rescale_q(pkt.duration, pInputStream->time_base, pOutputStream->time_base);
pkt.pos = -1;
if (pkt.stream_index == nVideoIndex)
{
llFrameIndex++;
}
nRet = av_interleaved_write_frame(pOutputAVFormatContext, &pkt);
if (nRet < 0) {
std::cout << "发送数据包出错" << std::endl;
break;
}
av_packet_unref(&pkt);
}
av_write_trailer(pOutputAVFormatContext);
if (!(pOutputAVFormatContext->oformat->flags & AVFMT_NOFILE))
{
avio_close(pOutputAVFormatContext->pb);
}
avformat_free_context(pOutputAVFormatContext);
avformat_close_input(&pInputAVFormatContext);
return 0;
}
PTS/DTS调整说明
基于FFmpeg进行RTMP推流(二) - 简书 ()