沐诗羽 2023-08-23 11:01 采纳率: 27.3%
浏览 61
已结题

QT使用FFmpeg把QImage合成视频只有一帧

我使用FFmpeg6.0+QT去把QImage合成视频.但是我不管怎么循环去保存,都只有一个一帧的视频

qDebug()<<"================================================================start======================================================================";
    int ret;
    QImage image("/sdcard/photo/20210101130446468.jpg");
    int imagewidth = image.width();
    int imageheight = image.height();
    // 初始化 FFmpeg
    qDebug()<<"avdevice_register_all()";
    avdevice_register_all(); //初始化所有设备
    qDebug()<<"formatContext = avformat_alloc_context()";
    formatContext = avformat_alloc_context();//分配format上下文
    
    qint64 timeT = QDateTime::currentMSecsSinceEpoch();//毫秒级时间戳
    QString outputFileName = QString("/sdcard/").append("ffmpeg").append(QString::number(timeT)).append(".mp4");
    //第三个参数可以直接使用nullptr 根据outputFileName的后缀自动识别
    qDebug()<<"avformat_alloc_output_context2(&formatContext, nullptr, \"mp4\", outputFileName.toUtf8().constData())";
    ret = avformat_alloc_output_context2(&formatContext, nullptr, nullptr, outputFileName.toUtf8().constData());
    qDebug()<<"ret===="<<ret;
    qDebug()<<"formatContext===="<<formatContext;
    qDebug()<<"formatContext->oformat = av_guess_format(nullptr, outputFileName.toUtf8().constData(), nullptr);";
    formatContext->oformat = av_guess_format(nullptr, outputFileName.toUtf8().constData(), nullptr);
    qDebug() << "avio_open(&formatContext->pb, outputFileName.toUtf8().constData(), AVIO_FLAG_WRITE) < 0";
    // 打开输出文件
    if (avio_open(&formatContext->pb, outputFileName.toUtf8().constData(), AVIO_FLAG_WRITE) < 0) {
        qDebug() << "Failed to open output file";
        return;
    }
    qDebug() << "AVStream* stream = avformat_new_stream(formatContext, nullptr);";
    // 创建一个AVStream对象
    AVStream* stream = avformat_new_stream(formatContext, nullptr);
    if (!stream) {
        qDebug() << "Failed to create output stream";
        return;
    }
    
    qDebug() << "AVCodecParameters* codecParameters = stream->codecpar;";
     // 配置AVCodecContext
    AVCodecParameters* codecParameters = stream->codecpar;
    codecParameters->codec_type = AVMEDIA_TYPE_VIDEO;
    codecParameters->codec_id = AV_CODEC_ID_H264; // 使用H.264编码器
    codecParameters->width = imagewidth;
    codecParameters->height = imageheight;
    qDebug() << " const AVCodec* codec = avcodec_find_encoder(codecParameters->codec_id);";
    
    qDebug() << "const AVCodec* codec = avcodec_find_encoder(codecParameters->codec_id);";
     // 打开编解码器
    const AVCodec* codec = avcodec_find_encoder(codecParameters->codec_id);
    AVCodecContext* codecContext = avcodec_alloc_context3(codec);
    codecContext->width = imagewidth;
    codecContext->height = imageheight;
    codecContext->pix_fmt = AV_PIX_FMT_YUV420P;
    codecContext->time_base = {1, 30}; // 设置编码器的时间基为 1秒/30帧
    codecContext->framerate = {30, 1}; // 设置编码器的帧率为 30fps
    
    
    qDebug() << "AV_PIX_FMT_YUV420P====="<<AV_PIX_FMT_YUV420P;
    qDebug() << "codecContext->pix_fmt====="<<codecContext->pix_fmt;
    qDebug() << "avcodec_open2(codecContext, codec, nullptr);";
    //设置完成编码格式以后要立刻打开,要不然调用avcodec_parameters_to_context的时候会重置编码
    ret = avcodec_open2(codecContext, codec, nullptr);
    if(ret < 0){
         qDebug() << "Failed to avcodec_open2";
         return;
    }
    qDebug() << "avcodec_parameters_to_context(codecContext, codecParameters);";
    // 将编码器参数复制到输出流
    avcodec_parameters_to_context(codecContext, codecParameters);
    // 检查编解码器支持的像素格式
    const AVPixelFormat* pixFmt = codec->pix_fmts;
    qDebug() << "while";
    while (*pixFmt != AV_PIX_FMT_NONE) {
        qDebug() << av_get_pix_fmt_name(*pixFmt);
        ++pixFmt;
    }
    
    qDebug() << " avformat_write_header(formatContext, nullptr);";
    // 写入头部信息
    avformat_write_header(formatContext, nullptr);
    
    
    
    int num = 0;
    while (num < 1200) {
        qDebug() << "  AVFrame* frame = av_frame_alloc();";
        // 逐个写入图像帧
        AVFrame* frame = av_frame_alloc();
        if (!frame) {
            qDebug() << "Failed to allocate frame.";
            return;
        }
        qDebug() << "frame->format = AV_PIX_FMT_YUV420P";
        frame->format = AV_PIX_FMT_YUV420P;
        frame->width = imagewidth;
        frame->height = imageheight;
        
        frame->pts = av_rescale_q(stream->nb_frames, stream->time_base, codecContext->time_base);
        
        if (av_frame_get_buffer(frame, 0) < 0) {
            qDebug() << "Failed to allocate frame buffer.";
            av_frame_free(&frame);
            return;
        }
        
        // 图像格式转换
        SwsContext* swsContext = sws_getContext(imagewidth, imageheight, AV_PIX_FMT_BGR32,
                                                frame->width, frame->height, AV_PIX_FMT_YUV420P,
                                                SWS_BICUBIC, nullptr, nullptr, nullptr);
        if (!swsContext) {
            qDebug() << "Failed to create SwsContext.";
            av_frame_free(&frame);
            return;
        }
        
        uint8_t* destData[4] = {frame->data[0], frame->data[1], frame->data[2], nullptr};
        int destLinesize[4] = {frame->linesize[0], frame->linesize[1], frame->linesize[2], 0};
        
        image = image.convertToFormat(QImage::Format_RGB32);
        const uchar* bits = image.constBits();
        int bytesPerLine = image.bytesPerLine();
        
        // 函数返回的值是转换后的图像的输出行数。输出的图像高度为图像像素。
        ret = sws_scale(swsContext, &bits, &bytesPerLine, 0, image.height(), destData, destLinesize);
        
        qDebug() << "sws_scale ret==="<<ret;
        //函数用于释放由 sws_getContext 函数创建的图像格式转换上下文
        sws_freeContext(swsContext);
        
        qDebug() << "AVPacket packet;";
        // 编码并写入视频帧
        AVPacket packet;
        av_init_packet(&packet);
        packet.data = nullptr;
        packet.size = 0;
        
        int code = -1;
        // 接收输出包
        while (code < 0) {
            ret = avcodec_send_frame(codecContext, frame);
            qDebug() << "avcodec_send_frame ret===="<<ret;
            code = avcodec_receive_packet(codecContext, &packet);
            qDebug() << "while avcodec_receive_packet====" << code;
            if(code == 0){
                // 处理输出包
                ret = av_interleaved_write_frame(formatContext, &packet);
                qDebug() << "av_interleaved_write_frame==================" << ret;
                av_packet_unref(&packet);
            }
        }
        
        qDebug() << "av_frame_free(&frame);";
        av_frame_free(&frame);
        qDebug()<<"num==============================================="<<num;
        ++num;
    }
    
    
    //写入尾部信息
    ret = av_write_trailer(formatContext);
    qDebug() << "av_write_trailer(formatContext) ret==="<<ret;
    //av_frame_free(&frame);
    qDebug()<<"=============================================================stop=========================================================================";

img


这是我的视频:love.sunchip-ad.com/qt/ffmpeg1609507548763.mp4

  • 写回答

7条回答 默认 最新

  • 技术宅program 2023-08-23 11:26
    关注

    需要在循环中为每一帧创建一个新的QImage对象,而不是使用同一个QImage对象,可以使用QDir类来遍历存储图片的文件夹,并使用QImage的构造函数来从文件名创建QImage对象

    试试

    qDebug()<<"start";
    int ret;
    int imagewidth = 0;
    int imageheight = 0;
    
    avdevice_register_all();
    
    formatContext = avformat_alloc_context();
    
    qint64 timeT = QDateTime::currentMSecsSinceEpoch();
    QString outputFileName = QString("/sdcard/").append("ffmpeg").append(QString::number(timeT)).append(".mp4");
    
    ret = avformat_alloc_output_context2(&formatContext, nullptr, nullptr, outputFileName.toUtf8().constData());
    
    formatContext->oformat = av_guess_format(nullptr, outputFileName.toUtf8().constData(), nullptr);
    
    if (avio_open(&formatContext->pb, outputFileName.toUtf8().constData(), AVIO_FLAG_WRITE) < 0) {
      return;
    }
    
    AVStream* stream = avformat_new_stream(formatContext, nullptr);
    if (!stream) {
      return; 
    }
    
    AVCodecParameters* codecParameters = stream->codecpar;
    codecParameters->codec_type = AVMEDIA_TYPE_VIDEO;
    codecParameters->codec_id = AV_CODEC_ID_H264;
    
    const AVCodec* codec = avcodec_find_encoder(codecParameters->codec_id);
    AVCodecContext* codecContext = avcodec_alloc_context3(codec);
    
    codecContext->pix_fmt = AV_PIX_FMT_YUV420P;
    codecContext->time_base = {1, 30};
    codecContext->framerate = {30, 1};
    
    ret = avcodec_open2(codecContext, codec, nullptr);
    if(ret < 0){
      return;
    }
    
    ret = avcodec_parameters_from_context(codecParameters, codecContext);
    if (ret < 0) {
      return;
    }
    
    const AVPixelFormat* pixFmt = codec->pix_fmts;
    while (*pixFmt != AV_PIX_FMT_NONE) {
      ++pixFmt;
    }
    
    avformat_write_header(formatContext, nullptr);  
    
    int num = 0;
    
    QDir dir("/sdcard/photo");
    QStringList imageFiles = dir.entryList(QStringList() << "*.jpg" << "*.png", QDir::Files);
    
    while (num < imageFiles.size()) {
    
      QImage image(dir.filePath(imageFiles.at(num)));
      
      imagewidth = image.width();
      imageheight = image.height();
      
      codecContext->width = imagewidth;
      codecContext->height = imageheight;
    
      AVFrame* frame = av_frame_alloc();
      if (!frame) {
        return;
      }
    
      frame->format = AV_PIX_FMT_YUV420P;
      frame->width = imagewidth;
      frame->height = imageheight;
            
      frame->pts = av_rescale_q(stream->nb_frames, stream->time_base, codecContext->time_base);
            
      if (av_frame_get_buffer(frame, 0) < 0) {
        av_frame_free(&frame);
        return;
      }
    
      SwsContext* swsContext = sws_getContext(imagewidth, imageheight, AV_PIX_FMT_BGR32,
                                              frame->width, frame->height, AV_PIX_FMT_YUV420P,
                                              SWS_BICUBIC, nullptr, nullptr, nullptr);
      if (!swsContext) {
        av_frame_free(&frame);
        return;
      }
      
      uint8_t* destData[4] = {frame->data[0], frame->data[1], frame->data[2], nullptr};
      int destLinesize[4] = {frame->linesize[0], frame->linesize[1], frame->linesize[2], 0};
            
      image = image.convertToFormat(QImage::Format_RGB32);
      const uchar* bits = image.constBits();
      int bytesPerLine = image.bytesPerLine();
            
      ret = sws_scale(swsContext, &bits, &bytesPerLine, 0, image.height(), destData, destLinesize);
            
      sws_freeContext(swsContext);
    
      AVPacket packet;
      av_init_packet(&packet);
      packet.data = nullptr;
      packet.size = 0;
            
      int code = -1;
      while (code < 0) {
        ret = avcodec_send_frame(codecContext, frame);
        code = avcodec_receive_packet(codecContext, &packet);
        if(code == 0){
          ret = av_interleaved_write_frame(formatContext, &packet);
          av_packet_unref(&packet);
        }
      }
    
      av_frame_free(&frame);
    
      ++num;
    }
    
    ret = av_write_trailer(formatContext);
    
    avcodec_free_context(&codecContext);
    avformat_free_context(formatContext);
    
    avio_close(formatContext->pb);
    
    qDebug()<<"stop";
    
    
    评论

报告相同问题?

问题事件

  • 已结题 (查看结题原因) 8月23日
  • 修改了问题 8月23日
  • 创建了问题 8月23日