(四)qt中使用ffmpeg播放视频,可暂停恢复
一、在qt中添加ffmpeg库及头文件
INCLUDEPATH += /usr/local/ffmpeg/include LIBS += -L/usr/local/lib -lavutil -lavcodec -lavformat -lswscale
二、详细代码
FFempegVideoDecode 视频解码类(放入线程中)
ffmpegvideodecode.h
#ifndef FFMPEGVIDEODECODE_H
#define FFMPEGVIDEODECODE_H
#include
#include
#include
#include
struct AVCodec;
struct AVCodecContext;
struct AVFrame;
struct AVFormatContext;
struct SwsContext;
struct AVPacket;
class FFmpegVideoDecode : public QObject
{
Q_OBJECT
public:
explicit FFmpegVideoDecode(QObject *parent = nullptr);
~FFmpegVideoDecode();
bool initFFmpeg(QString fileName);
void clear();
private:
void inputError(int ret, QString funName);
signals:
void sigToStart();
void sigToUpdateImage(const QImage &image);
public slots:
void onStartPlay(QString name);
void onStopPlay();
void onFinish();
void onUpdateRead();
private:
AVFormatContext* pFormatCtx = nullptr;
AVCodecContext* pCodecCtx = nullptr;
AVFrame* pAvFrame = nullptr;
AVFrame* pFrameRGB32 = nullptr;
AVPacket* packet = nullptr;
uint8_t *out_buffer = nullptr;
SwsContext *img_convert_ctx = nullptr;
int videoIndex;
bool is_stop = false;
bool is_finish = true;
};
#endif // FFMPEGVIDEODECODE_H
ffmpegvideodecode.cpp
#include "ffmpegvideodecode.h"
extern "C"
{
#include "libavutil/avutil.h"
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
}
#include
#include
#include
/**
* @brief 非阻塞延时
* @param msec 延时毫秒
*/
void sleepMsec(int msec)
{
if(msec duration / (AV_TIME_BASE / 1000); // 计算视频总时长(毫秒)
qDebug() nb_streams; ++i)
{
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoIndex = i;
break;
}
}
*/
if (videoIndex streams[videoIndex]->codecpar;
/*
* avcodec_find_decoder 通过解码器ID获取视频解码器(新版本返回值必须使用const)
*/
const AVCodec* _pCodec = avcodec_find_decoder(parmeter->codec_id);
if (_pCodec == NULL)
{
inputError(AVERROR_DECODER_NOT_FOUND,"avcodec_find_decoder");
clear();
return false;
}
/*
* avcodec_alloc_context3 用于分配一个编解码器上下文(AVCodecContext)结构体
* AVCodecContext结构的主要作用是设置编码过程的参数
*/
pCodecCtx = avcodec_alloc_context3(_pCodec);//初始化一个编解码上下文
/*
* avcodec_parameters_to_context 该函数用于将流里面的参数,也就是AVStream里面的参数直接复制到AVCodecContext的上下文当中
*/
ret = avcodec_parameters_to_context(pCodecCtx, parmeter);
if (ret 对分配内存瓜分
*/
int size = av_image_get_buffer_size(AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height,4);
out_buffer = (uint8_t *)av_malloc(size);
ret = av_image_fill_arrays(pFrameRGB32->data,pFrameRGB32->linesize, out_buffer, AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height,1);
if (ret width * pCodecCtx->height);//分配packet的有效载荷并初始化其字段
//qDebug() width, pCodecCtx->height, QImage::Format_RGB32);
Q_EMIT sigToUpdateImage(image);
//sleepMsec->非阻塞(点击播放可直接重头播放) QThread::msleep(40)->阻塞
//方法1:由于子线程阻塞式延时读取视频帧,无法修改视频的暂停状态,可通过修改connect的连接方式在主线程中修改暂停状态
QThread::msleep(40);
//方法2:将子线程阻塞式延时修改未非阻塞式,可直接修改视频的暂停状态
// sleepMsec(40);
}
/*
* av_packet_free和av_packet_unref都是FFmpeg库中的函数,用于释放AVPacket结构体中的资源。
* av_packet_free函数会释放AVPacket结构体中的所有资源,包括存储数据的缓冲区等。
* av_packet_unref函数只会释放AVPacket结构体中的部分资源,例如释放存储数据的缓冲区等,但不会释放AVPacket结构体本身。
*/
av_packet_unref(packet);
}
else
{
qDebug()moveToThread(thread_decode);
connect(thread_decode,&QThread::finished,ffmpeg_decode,&FFmpegVideoDecode::deleteLater);
connect(thread_decode,&QThread::finished,thread_decode,&QThread::deleteLater);
connect(ffmpeg_decode,&FFmpegVideoDecode::sigToUpdateImage,this,&VideoPlayer::onUpdateImage);
connect(this,&VideoPlayer::sigToStart,ffmpeg_decode,&FFmpegVideoDecode::onStartPlay);
connect(this,&VideoPlayer::sigToFinish,ffmpeg_decode,&FFmpegVideoDecode::onFinish,Qt::DirectConnection);
//方法1:由于子线程阻塞式延时读取视频帧,无法修改视频的暂停状态,可通过修改connect的连接方式在主线程中修改暂停状态
connect(this,&VideoPlayer::sigToStop,ffmpeg_decode,&FFmpegVideoDecode::onStopPlay,Qt::DirectConnection);
//方法2:将子线程阻塞式延时修改未非阻塞式,可直接修改视频的暂停状态
// connect(this,&VideoPlayer::sigToStop,ffmpeg_decode,&FFmpegVideoDecode::onStopPlay);
thread_decode->start();
}
VideoPlayer::~VideoPlayer()
{
delete ui;
Q_EMIT sigToFinish();
thread_decode->quit();
thread_decode->wait();
}
void VideoPlayer::onUpdateImage(const QImage &image)
{
m_mutex.lock();
pixmap_video = QPixmap::fromImage(image);
m_mutex.unlock();
update();
}
void VideoPlayer::paintEvent(QPaintEvent *event)
{
if(!pixmap_video.isNull())
{
QPainter painter(this);
m_mutex.lock();
QPixmap pixmap = pixmap_video.scaled(this->size(), Qt::KeepAspectRatio);
m_mutex.unlock();
int x = (this->width() - pixmap.width()) / 2;
int y = (this->height() - pixmap.height()) / 2;
painter.drawPixmap(x, y, pixmap);
}
QWidget::paintEvent(event);
}
Widget类(主界面)
widget.h
#include "videoplayer.h"
#include "ui_videoplayer.h"
#include
VideoPlayer::VideoPlayer(QWidget *parent) :
QWidget(parent),
ui(new Ui::VideoPlayer)
{
ui->setupUi(this);
ffmpeg_decode = new FFmpegVideoDecode;
thread_decode = new QThread;
ffmpeg_decode->moveToThread(thread_decode);
connect(thread_decode,&QThread::finished,ffmpeg_decode,&FFmpegVideoDecode::deleteLater);
connect(thread_decode,&QThread::finished,thread_decode,&QThread::deleteLater);
connect(ffmpeg_decode,&FFmpegVideoDecode::sigToUpdateImage,this,&VideoPlayer::onUpdateImage);
connect(this,&VideoPlayer::sigToStart,ffmpeg_decode,&FFmpegVideoDecode::onStartPlay);
connect(this,&VideoPlayer::sigToFinish,ffmpeg_decode,&FFmpegVideoDecode::onFinish,Qt::DirectConnection);
//方法1:由于子线程阻塞式延时读取视频帧,无法修改视频的暂停状态,可通过修改connect的连接方式在主线程中修改暂停状态
connect(this,&VideoPlayer::sigToStop,ffmpeg_decode,&FFmpegVideoDecode::onStopPlay,Qt::DirectConnection);
//方法2:将子线程阻塞式延时修改未非阻塞式,可直接修改视频的暂停状态
// connect(this,&VideoPlayer::sigToStop,ffmpeg_decode,&FFmpegVideoDecode::onStopPlay);
thread_decode->start();
}
VideoPlayer::~VideoPlayer()
{
delete ui;
Q_EMIT sigToFinish();
thread_decode->quit();
thread_decode->wait();
}
void VideoPlayer::onUpdateImage(const QImage &image)
{
m_mutex.lock();
pixmap_video = QPixmap::fromImage(image);
m_mutex.unlock();
update();
}
void VideoPlayer::paintEvent(QPaintEvent *event)
{
if(!pixmap_video.isNull())
{
QPainter painter(this);
m_mutex.lock();
QPixmap pixmap = pixmap_video.scaled(this->size(), Qt::KeepAspectRatio);
m_mutex.unlock();
int x = (this->width() - pixmap.width()) / 2;
int y = (this->height() - pixmap.height()) / 2;
painter.drawPixmap(x, y, pixmap);
}
QWidget::paintEvent(event);
}
widget.cpp
#include "widget.h"
#include "ui_widget.h"
#include
Widget::Widget(QWidget *parent) :
QWidget(parent),
ui(new Ui::Widget)
{
ui->setupUi(this);
player = new VideoPlayer(ui->widget_play);
ui->horizontalLayout->addWidget(player);
}
Widget::~Widget()
{
delete ui;
}
void Widget::on_pushButton_start_clicked()
{
Q_EMIT player->sigToFinish();
Q_EMIT player->sigToStart(ui->lineEdit_path->text());
ui->pushButton_stop->setText("暂停");
}
void Widget::on_pushButton_stop_clicked()
{
Q_EMIT player->sigToStop();
if(ui->pushButton_stop->text() == "暂停")
ui->pushButton_stop->setText("恢复");
else if(ui->pushButton_stop->text() == "恢复")
ui->pushButton_stop->setText("暂停");
}
void Widget::on_pushButton_select_clicked()
{
QString fileName = QFileDialog::getOpenFileName(this, tr("Open File"),
"/home",
tr("Video (*.mp4 *.flv)"));
if(fileName.isEmpty()) return;
ui->lineEdit_path->setText(fileName);
qDebug()
免责声明:我们致力于保护作者版权,注重分享,被刊用文章因无法核实真实出处,未能及时与作者取得联系,或有版权异议的,请联系管理员,我们会立即处理! 部分文章是来自自研大数据AI进行生成,内容摘自(百度百科,百度知道,头条百科,中国民法典,刑法,牛津词典,新华词典,汉语词典,国家院校,科普平台)等数据,内容仅供学习参考,不准确地方联系删除处理! 图片声明:本站部分配图来自人工智能系统AI生成,觅知网授权图片,PxHere摄影无版权图库和百度,360,搜狗等多加搜索引擎自动关键词搜索配图,如有侵权的图片,请第一时间联系我们,邮箱:ciyunidc@ciyunshuju.com。本站只作为美观性配图使用,无任何非法侵犯第三方意图,一切解释权归图片著作权方,本站不承担任何责任。如有恶意碰瓷者,必当奉陪到底严惩不贷!
