PS:本程序并不包含视频转码的功能。
RTMP推流器(Streamer)的在流媒体系统中的作用可以用下图表示。首先将视频数据以RTMP的形式发送到流媒体服务器端(Server,比如FMS,Red5,Wowza等),然后客户端(一般为Flash Player)通过访问流媒体服务器就可以收看实时流了。
运行本程序之前需要先运行RTMP流媒体服务器,并在流媒体服务器上建立相应的Application。有关流媒体服务器的操作不在本文的论述范围内,在此不再详述。本程序运行后,即可通过RTMP客户端(例如 Flash Player, FFplay等等)收看推送的直播流。
发送流媒体的数据的时候需要延时。不然的话,FFmpeg处理数据速度很快,瞬间就能把所有的数据发送出去,流媒体服务器是接受不了的。因此需要按照视频实际的帧率发送数据。本文记录的推流器在视频帧与帧之间采用了av_usleep()函数休眠的方式来延迟发送。这样就可以按照视频的帧率发送数据了,参考代码如下。
//帧延时
if (pkt.stream_index == videoindex)
{
AVRational time_base = ifmt_ctx->streams[videoindex]->time_base;
AVRational time_base_q = { 1, AV_TIME_BASE };
int64_t pts_time = av_rescale_q(pkt.pts, time_base, time_base_q); //显示的时间
int64_t now_time = av_gettime() - start_time;
if (pts_time > now_time)
av_usleep(pts_time - now_time);
}
//设置 PTS
if (pkt.pts == AV_NOPTS_VALUE)
{
//原始两帧之间的时间
int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
//设置一帧的时间参数
AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base;
pkt.pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt.dts = pkt.pts;
pkt.duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
}
#include <stdio.h>
#define __STDC_CONSTANT_MACROS
#ifdef _WIN32
//Windows
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/mathematics.h"
#include "libavutil/time.h"
};
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavformat/avformat.h>
#include <libavutil/mathematics.h>
#include <libavutil/time.h>
#ifdef __cplusplus
};
#endif
#endif
int main(int argc, char* argv[])
{
AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL; //输入对应一个AVFormatContext,输出对应一个AVFormatContext
AVOutputFormat *ofmt = NULL;
AVPacket pkt;
const char *in_filename, *out_filename;
int ret, i;
int videoindex = -1;
int frame_index = 0;
int64_t start_time = 0;
in_filename = "cuc_ieschool.flv";//输入URL
out_filename = "rtmp://192.168.3.189:1935/oflaDemo/hkfystream";//输出 URL[RTMP]
//【1】 注册
av_register_all();
//Network
avformat_network_init();
//【2】 打开输入输入文件,获取视频索引号
if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0)
{
printf("Could not open input file.");
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0)
{
printf("Failed to retrieve input stream information");
goto end;
}
for (i = 0; i < ifmt_ctx->nb_streams; i++)
{
if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoindex = i;
break;
}
}
// 输出输入文件的格式信息
av_dump_format(ifmt_ctx, 0, in_filename, 0);
//【3】新建输出RTMP流的格式内容信息
avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_filename); //RTMP
if (!ofmt_ctx)
{
printf("Could not create output context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ofmt = ofmt_ctx->oformat;
for (i = 0; i < ifmt_ctx->nb_streams; i++)
{
//根据输入流创建输出流
AVStream *in_stream = ifmt_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
if (!out_stream)
{
printf("Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
//复制AVCodecContext的设置
ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
if (ret < 0)
{
printf("Failed to copy context from input to output stream codec context\n");
goto end;
}
// 添加全局头信息
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
// 输出输出RTMP文件的格式信息
av_dump_format(ofmt_ctx, 0, out_filename, 1);
//【4】打开输出URL
if (!(ofmt->flags & AVFMT_NOFILE))
{
ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
if (ret < 0)
{
printf("Could not open output URL '%s'", out_filename);
goto end;
}
}
//写文件头
ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0)
{
printf("Error occurred when opening output URL\n");
goto end;
}
//【5】写入视音频帧
start_time = av_gettime();
while (1)
{
AVStream *in_stream, *out_stream;
//获取一个AVPacket
ret = av_read_frame(ifmt_ctx, &pkt);
if (ret < 0)
break;
//设置 PTS
if (pkt.pts == AV_NOPTS_VALUE)
{
//原始两帧之间的时间
int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
//设置一帧的时间参数
AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base;
pkt.pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt.dts = pkt.pts;
pkt.duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
}
//帧延时
if (pkt.stream_index == videoindex)
{
AVRational time_base = ifmt_ctx->streams[videoindex]->time_base;
AVRational time_base_q = { 1, AV_TIME_BASE };
int64_t pts_time = av_rescale_q(pkt.pts, time_base, time_base_q); //显示的时间
int64_t now_time = av_gettime() - start_time;
if (pts_time > now_time)
av_usleep(pts_time - now_time);
}
in_stream = ifmt_ctx->streams[pkt.stream_index];
out_stream = ofmt_ctx->streams[pkt.stream_index];
//转换PTS/DTS
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
//Print to Screen
if (pkt.stream_index == videoindex)
{
printf("Send %8d video frames to output URL\n", frame_index);
frame_index++;
}
ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
if (ret < 0)
{
printf("Error muxing packet\n");
break;
}
av_free_packet(&pkt);
}
//【5】写文件尾
av_write_trailer(ofmt_ctx);
end:
avformat_close_input(&ifmt_ctx);
//【6】 关闭输出
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
if (ret < 0 && ret != AVERROR_EOF)
{
printf("Error occurred.\n");
return -1;
}
return 0;
}程序开始运行后,可以通过网页播放器播放推送的直播流。
例如下图所示,使用Flash Media Server 的Samples文件夹下的videoPlayer播放直播流的截图如下图所示。(直播地址:rtmp://1925.168.3.189/oflaDemo/hkfystream)
http://download.csdn.net/detail/aoshilang2249/8538753
参考:http://blog.csdn.net/leixiaohua1020/article/category/1362941
原文:http://blog.csdn.net/aoshilang2249/article/details/44674971