[音視頻 ffmpeg] 復用推流

2024年2月6日 19点热度 0人点赞

簡介: [音視頻 ffmpeg] 復用推流

獲取攝像頭demo

videodecodethread.cpp

#include "videodecodethread.h"
VideodecodeThread::VideodecodeThread(QObject *parent)
    :QThread(parent)
{
    avdevice_register_all();
    avformat_network_init();
}
VideodecodeThread::~VideodecodeThread()
{
    if(pFormatCtx)
    {
        avformat_close_input(&pFormatCtx);
    }
    if(packet)
    {
        av_packet_free(&packet);
    }
    if(pAvCodecCtx)
    {
        avcodec_close(pAvCodecCtx);
    }
    if(pAvFrame)
    {
        av_free(pAvFrame);
    }
}
void VideodecodeThread::run()
{
    fmt = av_find_input_format("dshow");
    av_dict_set(&options, "video_size",  "640*480", 0);
    av_dict_set(&options, "framerate",  "30", 0);
    ret = avformat_open_input(&pFormatCtx, "video=ov9734_azurewave_camera", fmt, &options);
    if (ret < 0)
    {
        qDebug() << "Couldn't open input stream." << ret;
        return;
    }
    ret = avformat_find_stream_info(pFormatCtx, &options);
    if(ret < 0)
    {
        qDebug()<< "Couldn't find stream information.";
        return;
    }
    videoIndex = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &pAvCodec, 0);
    if(videoIndex < 0)
    {
        qDebug()<< "Couldn't av_find_best_stream.";
        return;
    }
    pAvCodec = avcodec_find_decoder(pFormatCtx->streams[videoIndex]->codecpar->codec_id);
    if(!pAvCodec)
    {
        qDebug()<< "Couldn't avcodec_find_decoder.";
        return;
    }
    qDebug()<<"pAVCodec->name:" << QString::fromStdString(pAvCodec->name);
    if(pFormatCtx->streams[videoIndex]->avg_frame_rate.den != 0)
    {
            float fps_ = pFormatCtx->streams[videoIndex]->avg_frame_rate.num / pFormatCtx->streams[videoIndex]->avg_frame_rate.den;
            qDebug() <<"fps:" << fps_;
    }
    int64_t video_length_sec_ = pFormatCtx->duration/AV_TIME_BASE;
    qDebug() <<"video_length_sec_:" << video_length_sec_;
    pAvCodecCtx = avcodec_alloc_context3(pAvCodec);
    if(!pAvCodecCtx)
    {
        qDebug()<< "Couldn't avcodec_alloc_context3.";
        return;
    }
    ret = avcodec_parameters_to_context(pAvCodecCtx, pFormatCtx->streams[videoIndex]->codecpar);
    if(ret < 0)
    {
        qDebug()<< "Couldn't avcodec_parameters_to_context.";
        return;
    }
    ret = avcodec_open2(pAvCodecCtx, pAvCodec, nullptr);
    if(ret!=0)
    {
        qDebug("avcodec_open2 %d", ret);
        return;
    }
    pAvFrame = av_frame_alloc();
    pAVFrameRGB = av_frame_alloc();
    pSwsCtx = sws_getContext(pAvCodecCtx->width, pAvCodecCtx->height, pAvCodecCtx->pix_fmt,
                                          pAvCodecCtx->width, pAvCodecCtx->height, AV_PIX_FMT_RGB32,
                                          SWS_BICUBIC, NULL, NULL, NULL);
    m_size = av_image_get_buffer_size(AVPixelFormat(AV_PIX_FMT_RGB32), pAvCodecCtx->width, pAvCodecCtx->height, 1);
    buffer = (uint8_t*)av_malloc(m_size);
    //為已經分配的空間的結構體AVPicture掛上一段用於保存數據的空間
    av_image_fill_arrays(pAVFrameRGB->data, pAVFrameRGB->linesize, buffer, AV_PIX_FMT_RGB32, pAvCodecCtx->width, pAvCodecCtx->height, 1);
    packet = av_packet_alloc();
    av_new_packet(packet, pAvCodecCtx->width * pAvCodecCtx->height);
    while(runFlag && !av_read_frame(pFormatCtx, packet))
    {
        if (packet->stream_index == videoIndex)
        {
            //解碼一幀視頻數據
            int iGotPic = avcodec_send_packet(pAvCodecCtx, packet);
            if(iGotPic != 0)
            {
                qDebug("VideoIndex avcodec_send_packet error :%d", iGotPic);
                continue;
            }
            iGotPic = avcodec_receive_frame(pAvCodecCtx, pAvFrame);
            if(iGotPic == 0){
                //轉換像素
                sws_scale(pSwsCtx, (uint8_t const * const *)pAvFrame->data, pAvFrame->linesize, 0,
                        pAvFrame->height, pAVFrameRGB->data, pAVFrameRGB->linesize);
                QImage desImage = QImage((uchar*)buffer, pAvCodecCtx->width,pAvCodecCtx->height,
                                    QImage::Format_RGB32); //RGB32
                emit sigSendQImage(desImage);//得到圖片的時候觸發信號
                byte = QByteArray((char*)pAvFrame->data);
                videoQueue.push(byte);
                videoCount  ;
                msleep(25);
            }
        }
        av_packet_unref(packet);
    }
}
void VideodecodeThread::setRunFlag(bool flag)
{
    runFlag = flag;
}

videodecodethread.h

粉絲福利, 免費領取C 音視頻學習資料包 學習路線大綱、技術視頻/代碼,內容包括(音視頻開發,面試題,FFmpeg ,webRTC ,rtmp ,hls ,rtsp ,ffplay ,編解碼,推拉流,srs),有需要的可以進企鵝裙927239107領取哦~

#ifndef VIDEODECODETHREAD_H
#define VIDEODECODETHREAD_H
#include <QObject>
#include <QThread>
#include <QDebug>
#include <mutex>
#include <QImage>
#include "SharedVariables.h"
extern "C" {
    #include "libavdevice/avdevice.h"    // 調用輸入設備需要的頭文件
    #include "libavcodec/avcodec.h"
    #include "libavformat/avformat.h"
    #include "libavutil/avutil.h"
    #include "libswscale/swscale.h"
    #include "libavutil/imgutils.h"
    #include "libavutil/pixfmt.h"
    #include "libavutil/error.h"
    #include "libswresample/swresample.h"
    #include "libavfilter/avfilter.h"
}
class VideodecodeThread :public QThread
{
    Q_OBJECT
public:
    VideodecodeThread(QObject *parent = nullptr);
    ~VideodecodeThread();
    void run() override;
    void setRunFlag(bool flag);
signals:
    void sigSendQImage(QImage);
private:
    const AVInputFormat *fmt = nullptr;
    AVFormatContext *pFormatCtx = nullptr;
    AVDictionary *options = nullptr;
    AVPacket *packet = nullptr;
    AVFrame* pAvFrame = nullptr;
    const AVCodec *pAvCodec = nullptr;
    AVCodecContext *pAvCodecCtx = nullptr;
    AVFrame* pAVFrameRGB = nullptr;
    SwsContext* pSwsCtx = nullptr;
    QByteArray byte;
    int  m_size = 0;
    uint8_t* buffer = nullptr;
    int ret = -1;
    int videoIndex = -1;
    bool runFlag = true;
    int videoCount = 0;
};
#endif // VIDEODECODETHREAD_H

攝像頭麥克風打開正常