算下来这个推流的项目作品写了有四年多了,最初第一个版本只有文件点播的功能,用的纯QTcpSocket通信实现,属于比较简单的功能。由于文件点播只支持文件形式的推流,不支持网络流或者本地设备采集,所以迫切需要打破这个瓶颈,而后加入核心的网络推流功能,这也是本项目的核心功能,不仅支持各种各样的流媒体服务,推流这块支持文件、网络音视频流、本地设备采集、本地桌面采集。自定义各种参数,视音频同步推流等,不断的迭代和完善。也不是一开始就具备这些功能的,而是随着视频播放组件的保存视频功能不断完善后改进的,因为推流其实就是保存功能,只不过保存到一个推流地址就行,然后推流的格式换下,所以是和保存功能完全公用的。整个推流组件是负责管理一堆的保存类,拿到当前推流状态,当前音视频是否存在以及是否编码推流的状态显示到表格中。
一开始也是没有网页预览的功能,后面用户对这块要求比较强烈,都是希望推流后能够通过一个简单的方式,能够直接网页中预览,有多少个通道就显示多少个通道,这样可以判断推流是否成功,不然要一个个手动的打开播放器输入播放地址验证,很麻烦。而且推流的主要应用场景就是希望推流后给网页或者手机app拉流显示。直接网页预览还可以对比实时性,用户对两个指标特别敏感,一个是延迟,一个是流畅。所以一直在这块功能精心打磨,尽量做到极致。
二、效果图



void NetPushClient::record(){if (ffmpegSave) {//取出推流码QString flag = pushUrl.split("/").last();//文件名不能包含特殊字符/需要替换成固定字母QString pattern("[\\\\/:|*?\"<>]|[cC][oO][mM][1-9]|[lL][pP][tT][1-9]|[cC][oO][nM]|[pP][rR][nN]|[aA][uU][xX]|[nN][uU][lL]");#if (QT_VERSION >= QT_VERSION_CHECK(6,0,0))QRegularExpression rx(pattern);#elseQRegExp rx(pattern);#endifflag.replace(rx, "X");//文件名加上时间结尾QString path = QString("%1/video/%2").arg(qApp->applicationDirPath()).arg(QDATE);QString name = QString("%1/%2_%3.mp4").arg(path).arg(flag).arg(STRDATETIME);//目录不存在则新建QDir dir(path);if (!dir.exists()) {dir.mkpath(path);}//先停止再打开重新录制ffmpegSave->stop();ffmpegSave->open(name);recordTime = QDateTime::currentDateTime();}}void NetPushClient::receivePlayStart(int time){//演示添加OSD后推流#ifdef betaversionint height = ffmpegThread->getVideoHeight();QList<OsdInfo> osds = WidgetHelper::getTestOsd(height);ffmpegThread->setOsdInfo(osds);#endif//打开后才能启动录像ffmpegThread->recordStart(pushUrl);//推流以外还单独存储if (!ffmpegSave && recordType > 0) {//源头保存没成功就不用继续FFmpegSave *saveFile = ffmpegThread->getSaveFile();if (!saveFile->getIsOk()) {return;}ffmpegSave = new FFmpegSave(this);//重新编码过的则取视频保存类的对象AVStream *videoStreamIn = saveFile->getVideoEncode() ? saveFile->getVideoStream() : ffmpegThread->getVideoStream();AVStream *audioStreamIn = saveFile->getAudioEncode() ? saveFile->getAudioStream() : ffmpegThread->getAudioStream();ffmpegSave->setSavePara(ffmpegThread->getMediaType(), SaveVideoType_Mp4, videoStreamIn, audioStreamIn);this->record();timerRecord->start();}}void NetPushClient::receivePacket(AVPacket *packet){if (ffmpegSave && ffmpegSave->getIsOk()) {ffmpegSave->writePacket2(packet);}FFmpegHelper::freePacket(packet);}void NetPushClient::recorderStateChanged(const RecorderState &state, const QString &file){int width = 0;int height = 0;int videoStatus = 0;int audioStatus = 0;if (ffmpegThread) {width = ffmpegThread->getVideoWidth();height = ffmpegThread->getVideoHeight();FFmpegSave *saveFile = ffmpegThread->getSaveFile();if (saveFile->getIsOk()) {if (saveFile->getVideoIndexIn() >= 0) {if (saveFile->getVideoIndexOut() >= 0) {videoStatus = (saveFile->getVideoEncode() ? 3 : 2);} else {videoStatus = 1;}}if (saveFile->getAudioIndexIn() >= 0) {if (saveFile->getAudioIndexOut() >= 0) {audioStatus = (saveFile->getAudioEncode() ? 3 : 2);} else {audioStatus = 1;}}}}//只有处于录制中才表示正常推流开始bool start = (state == RecorderState_Recording);emit pushStart(mediaUrl, width, height, videoStatus, audioStatus, start);}void NetPushClient::receiveSaveStart(){emit pushChanged(mediaUrl, 0);}void NetPushClient::receiveSaveFinsh(){emit pushChanged(mediaUrl, 1);}void NetPushClient::receiveSaveError(int error){emit pushChanged(mediaUrl, 2);}void NetPushClient::setMediaUrl(const QString &mediaUrl){this->mediaUrl = mediaUrl;}void NetPushClient::setPushUrl(const QString &pushUrl){this->pushUrl = pushUrl;}void NetPushClient::start(){if (ffmpegThread || mediaUrl.isEmpty() || pushUrl.isEmpty()) {return;}//实例化视频采集线程ffmpegThread = new FFmpegThread;//关联播放开始信号用来启动推流connect(ffmpegThread, SIGNAL(receivePlayStart(int)), this, SLOT(receivePlayStart(int)));//关联录制信号变化用来判断是否推流成功connect(ffmpegThread, SIGNAL(recorderStateChanged(RecorderState, QString)), this, SLOT(recorderStateChanged(RecorderState, QString)));//设置播放地址ffmpegThread->setMediaUrl(mediaUrl);//设置解码内核ffmpegThread->setVideoCore(VideoCore_FFmpeg);//设置视频模式#ifdef openglxffmpegThread->setVideoMode(VideoMode_Opengl);#elseffmpegThread->setVideoMode(VideoMode_Painter);#endif//设置通信协议(如果是rtsp视频流建议设置tcp)//ffmpegThread->setTransport("tcp");//设置硬解码(和推流无关/只是为了加速显示/推流只和硬编码有关)//ffmpegThread->setHardware("dxva2");//设置缓存大小(如果分辨率帧率码流很大需要自行加大缓存)ffmpegThread->setCaching(8192000);//设置解码策略(推流的地址再拉流建议开启最快速度)//ffmpegThread->setDecodeType(DecodeType_Fastest);//设置读取超时时间超时后会自动重连ffmpegThread->setReadTimeout(5 * 1000);//设置连接超时时间(0表示一直连)ffmpegThread->setConnectTimeout(0);//设置重复播放相当于循环推流ffmpegThread->setPlayRepeat(true);//设置默认不播放音频(界面上切换到哪一路就开启)ffmpegThread->setPlayAudio(false);//设置默认不预览视频(界面上切换到哪一路就开启)ffmpegThread->setPushPreview(false);//设置保存视频类将数据包信号发出来用于保存文件FFmpegSave *saveFile = ffmpegThread->getSaveFile();saveFile->setProperty("checkB", true);saveFile->setSendPacket(recordType > 0, false);connect(saveFile, SIGNAL(receivePacket(AVPacket *)), this, SLOT(receivePacket(AVPacket *)));connect(saveFile, SIGNAL(receiveSaveStart()), this, SLOT(receiveSaveStart()));connect(saveFile, SIGNAL(receiveSaveFinsh()), this, SLOT(receiveSaveFinsh()));connect(saveFile, SIGNAL(receiveSaveError(int)), this, SLOT(receiveSaveError(int)));//如果是本地设备或者桌面录屏要取出其他参数VideoHelper::initVideoPara(ffmpegThread, mediaUrl, encodeVideoScale);//设置视频编码格式/视频压缩比率/视频缩放比例ffmpegThread->setEncodeVideo((EncodeVideo)encodeVideo);ffmpegThread->setEncodeVideoRatio(encodeVideoRatio);ffmpegThread->setEncodeVideoScale(encodeVideoScale);//启动播放ffmpegThread->play();}void NetPushClient::stop(){//停止推流和采集并彻底释放对象if (ffmpegThread) {ffmpegThread->recordStop();ffmpegThread->stop();ffmpegThread->deleteLater();ffmpegThread = NULL;}//停止录制if (ffmpegSave) {timerRecord->stop();ffmpegSave->stop();ffmpegSave->deleteLater();ffmpegSave = NULL;}}