基本的工作原理:首先通过FFmpeg获取视频,可以是通过摄像头,文件,截屏等。然后利用QT中的OpenGL组件显示视频。主要的工作是利用着色器进行视频格式转换,因为获取到的视频格式一般是YUV格式,显示到显示器上的一般是RGB格式。这里的代码只实现了YUV422格式的转换,其他格式类似。
下面是视频获取类VideoSource.h头文件:
#pragma once
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "libavutil/imgutils.h"
#include "libavutil/error.h"
#include "libavfilter/avfilter.h"
#include "libavutil/time.h"
#include "libavutil/timestamp.h"
}
class VideoSource
{
public:
VideoSource();
~VideoSource();
VideoSource(const VideoSource&) = delete;
VideoSource& operator=(const VideoSource&) = delete;
void initContext(const char* url);
void openCamera();
void closeCamera();
AVFrame* getImageData();
private:
AVFormatContext* avFormatContex = nullptr;
AVCodecContext* avCodecContext = nullptr;
const AVCodec* avCodec = nullptr;
AVPacket* avPacket = nullptr;
bool bIsCameraOpened = false;
AVFrame* avFrame = nullptr;
};
videoSource.cpp文件:
#include "VideoSource.h"
#pragma comment(lib, "avdevice.lib")
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avutil.lib")
#pragma comment(lib, "avcodec.lib")
VideoSource::VideoSource()
{
avdevice_register_all();
avFormatContex = avformat_alloc_context();
avPacket = av_packet_alloc();
avFrame = av_frame_alloc();
}
VideoSource::~VideoSource()
{
if (avFormatContex)
{
avformat_free_context(avFormatContex);
}
if (avPacket)
{
av_packet_free(&avPacket);
}
if (avFrame)
{
av_frame_free(&avFrame);
}
}
void VideoSource::initContext(const char* url)
{
const AVInputFormat* avInput = av_find_input_format(url);
int ret = avformat_open_input(&avFormatContex, url, avInput, NULL);
ret = avformat_find_stream_info(avFormatContex, NULL);
int videoOfIndex = -1;
for (int i = 0; i < avFormatContex->nb_streams; i++)
{
if (avFormatContex->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoOfIndex = i;
break;
}
}
if (videoOfIndex >= 0)
{
AVCodecParameters* avCodecParam = avFormatContex->streams[videoOfIndex]->codecpar;
avCodec = avcodec_find_decoder(avCodecParam->codec_id);
avCodecContext = avcodec_alloc_context3(avCodec);
avCodecContext->pix_fmt = AVPixelFormat(avCodecParam->format);
avCodecContext->height = avCodecParam->height;
avCodecContext->width = avCodecParam->width;
}
}
void VideoSource::openCamera()
{
if (avCodecContext && avCodec)
{
avcodec_open2(avCodecContext, avCodec, NULL);
bIsCameraOpened = true;
}
}
void VideoSource::closeCamera()
{
if (avCodecContext && avCodec)
{
avcodec_close(avCodecContext);
bIsCameraOpened = false;
}
}
AVFrame* VideoSource::getImageData()
{
if (bIsCameraOpened)
{
int ret = av_read_frame(avFormatContex, avPacket);
if (ret)
goto error;
ret = avcodec_send_packet(avCodecContext, avPacket);
if (ret)
goto error;
ret = avcodec_receive_frame(avCodecContext, avFrame);
if (ret)
goto error;
return avFrame;
}
error:
return nullptr;
}
视频显示类VideoWidget.h头文件:
#pragma once
#include <QtOpenGLWidgets\qopenglwidget.h>
#include <qopenglfunctions.h>
#include "ui_VideoWidget.h"
#include <qopenglbuffer.h>
#include "libavutil\pixfmt.h"
QT_FORWARD_DECLARE_CLASS(QOpenGLShader)
QT_FORWARD_DECLARE_CLASS(QOpenGLShaderProgram)
QT_FORWARD_DECLARE_CLASS(QOpenGLTexture)
class VideoSource;
class VideoWidget : public QOpenGLWidget, protected QOpenGLFunctions
{
Q_OBJECT
public:
VideoWidget(QWidget *parent = nullptr);
~VideoWidget();
void initializeGL() override;
void paintGL() override;
void resizeGL(int w, int h) override;
public slots:
void Tick();
private:
class RenderEvent : public QEvent
{
public:
enum Type
{
Render_Event = QEvent::User + 1,
};
RenderEvent(Type type) : QEvent(QEvent::Type(type)) {}
};
private:
void initializeShader();
void initializeProgram();
void initializeVertexData();
void initializeTexture();
void updateTexture();
void updateTexture(AVPixelFormat format, int width, int height, const int* lineSize, unsigned char* data[]);
void updateTextureY(int width, int height, int lineSize, const unsigned char* data);
void updateTextureU(int width, int height, int lineSize, const unsigned char* data);
void updateTextureV(int width, int height, int lineSize, const unsigned char* data);
private:
Ui::VideoWidgetClass ui;
QOpenGLShader* vsShader = nullptr;
QOpenGLShader* fsShader = nullptr;
QOpenGLShaderProgram* shaderProgram = nullptr;
QOpenGLTexture* textureY = nullptr;
QOpenGLTexture* textureU = nullptr;
QOpenGLTexture* textureV = nullptr;
QOpenGLBuffer vertexBuffer;
QOpenGLBuffer indexBuffer;
float vertices[32] =
{
-1.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f,
-1.0f, -1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f,
1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f
};
GLuint indices[6] =
{
0, 1, 2,
0, 2, 3
};
VideoSource* videoSource = nullptr;
};
videoWidget.cpp文件:
#include "VideoWidget.h"
#include "qsurfaceformat.h"
#include "qcolorspace.h"
#include "QtOpenGL\qopenglshaderprogram.h"
#include "qstring.h"
#include "qopengltexture.h"
#include "VideoSource.h"
#include "qdatetime.h"
#include "qtimer.h"
#include "qopenglpixeltransferoptions.h"
VideoWidget::VideoWidget(QWidget *parent)
: QOpenGLWidget(parent)
, shaderProgram(new QOpenGLShaderProgram())
, indexBuffer(QOpenGLBuffer::IndexBuffer)
{
ui.setupUi(this);
QTimer* timer = new QTimer();
connect(timer, &QTimer::timeout, this, &VideoWidget::Tick);
timer->start(20);
}
VideoWidget::~VideoWidget()
{
makeCurrent();
delete shaderProgram;
delete vsShader;
delete fsShader;
delete textureY;
delete textureU;
delete textureV;
vertexBuffer.destroy();
indexBuffer.destroy();
doneCurrent();
delete videoSource;
}
void VideoWidget::initializeGL()
{
initializeOpenGLFunctions();
initializeShader();
initializeProgram();
initializeVertexData();
initializeTexture();
shaderProgram->enableAttributeArray("inPos");
shaderProgram->enableAttributeArray("inColor");
shaderProgram->enableAttributeArray("inTex");
vertexBuffer.bind();
shaderProgram->setAttributeBuffer(0, GL_FLOAT, 0, 3, 8 * sizeof(GLfloat));
shaderProgram->setAttributeBuffer(1, GL_FLOAT, 3 * sizeof(float), 3, 8 * sizeof(GLfloat));
shaderProgram->setAttributeBuffer(2, GL_FLOAT, 6 * sizeof(float), 2, 8 * sizeof(GLfloat));
vertexBuffer.release();
}
void VideoWidget::paintGL()
{
glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
if (textureY->isStorageAllocated())
{
shaderProgram->setUniformValue("texY", 0);
textureY->bind(0);
shaderProgram->setUniformValue("texU", 1);
textureU->bind(1);
shaderProgram->setUniformValue("texV", 2);
textureV->bind(2);
glDrawElements(GL_TRIANGLES, ARRAYSIZE(indices), GL_UNSIGNED_INT, indices);
}
}
void VideoWidget::resizeGL(int w, int h)
{
}
void VideoWidget::initializeShader()
{
const char* vs = "#version 330 core\n"
"layout(location = 0) in vec3 inPos;\n"
"layout(location = 1) in vec3 inColor;\n"
"layout(location = 2) in vec2 inTex;\n"
"out vec3 Color;\n"
"out vec2 Texcoord;\n"
"void main() {\n"
"Color = inColor;\n"
"Texcoord = inTex;\n"
"gl_Position = vec4(inPos, 1.0);\n"
"}\n";
const char* fs = "#version 330 core\n"
"// YUV offset \n"
"const vec3 offset = vec3(-0.0627451017, -0.501960814, -0.501960814);\n"
"\n"
"// RGB coefficients \n"
"const vec3 Rcoeff = vec3(1.1644, 0.000, 1.596);\n"
"const vec3 Gcoeff = vec3(1.1644, -0.3918, -0.813);\n"
"const vec3 Bcoeff = vec3(1.1644, 2.0172, 0.000);\n"
"\n"
"in vec3 Color;\n"
"in vec2 Texcoord;\n"
"uniform sampler2D texY;\n"
"uniform sampler2D texU;\n"
"uniform sampler2D texV;\n"
"out vec4 FragColor;\n"
"void main() {\n"
"vec3 yuv;\n"
"yuv.x = texture2D(texY, Texcoord).r;\n"
"yuv.y = texture2D(texU, Texcoord).r;\n"
"yuv.z = texture2D(texV, Texcoord).r;\n"
"yuv += offset;\n"
"vec3 rgb;\n"
"rgb.r = dot(yuv, Rcoeff);\n"
"rgb.g = dot(yuv, Gcoeff);\n"
"rgb.b = dot(yuv, Bcoeff);\n"
"FragColor = vec4(rgb, 1.0f);\n"
"}\n";
vsShader = (new QOpenGLShader(QOpenGLShader::Vertex));
bool success = vsShader->compileSourceCode(vs);
if (!success)
{
QString error = vsShader->log();
throw;
}
fsShader = (new QOpenGLShader(QOpenGLShader::Fragment));
success = fsShader->compileSourceCode(fs);
if(!success)
{
QString error = vsShader->log();
throw;
}
}
void VideoWidget::initializeProgram()
{
if (vsShader->isCompiled() && fsShader->isCompiled())
{
shaderProgram->addShader(vsShader);
shaderProgram->addShader(fsShader);
shaderProgram->link();
QString ret = shaderProgram->log();
if (ret.isEmpty())
{
shaderProgram->bind();
}
}
}
void VideoWidget::initializeVertexData()
{
vertexBuffer.create();
vertexBuffer.bind();
vertexBuffer.allocate(vertices, ARRAYSIZE(vertices) * sizeof(float));
vertexBuffer.release();
indexBuffer.create();
indexBuffer.bind();
indexBuffer.allocate(indices, ARRAYSIZE(indices) * sizeof(GLuint));
indexBuffer.release();
}
void VideoWidget::initializeTexture()
{
videoSource = new VideoSource();
videoSource->initContext("vfwcap");
videoSource->openCamera();
textureY = new QOpenGLTexture(QOpenGLTexture::Target2D);
textureU = new QOpenGLTexture(QOpenGLTexture::Target2D);
textureV = new QOpenGLTexture(QOpenGLTexture::Target2D);
}
void VideoWidget::Tick()
{
static long long lastts = 0;
if (lastts == 0) lastts = QDateTime::currentMSecsSinceEpoch();
float dt = QDateTime::currentMSecsSinceEpoch() - lastts;
lastts = QDateTime::currentMSecsSinceEpoch();
dt /= 1000;
updateTexture();
update();
}
void VideoWidget::updateTexture()
{
AVFrame* avFrame = videoSource->getImageData();
if (avFrame)
updateTexture(AVPixelFormat(avFrame->format), avFrame->width, avFrame->height, avFrame->linesize, avFrame->data);
}
void VideoWidget::updateTexture(AVPixelFormat format, int width, int height, const int* lineSize, unsigned char* data[])
{
if (format == AV_PIX_FMT_YUV422P || format == AV_PIX_FMT_YUVJ422P)
{
updateTextureY(width, height, lineSize[0], data[0]);
updateTextureU(width / 2, height, lineSize[1], data[1]);
updateTextureV(width / 2, height, lineSize[2], data[2]);
}
}
void VideoWidget::updateTextureY(int width, int height, int lineSize, const unsigned char* data)
{
if (!textureY->isCreated())
{
textureY->create();
textureY->setSize(lineSize, height);
textureY->setFormat(QOpenGLTexture::R8_UNorm);
textureY->allocateStorage(QOpenGLTexture::Red, QOpenGLTexture::UInt8);
textureY->setMinificationFilter(QOpenGLTexture::Nearest);
textureY->setMagnificationFilter(QOpenGLTexture::Nearest);
textureY->setWrapMode(QOpenGLTexture::Repeat);
}
GLuint id = textureY->textureId();
id = textureY->boundTextureId(QOpenGLTexture::BindingTarget2D);
QOpenGLPixelTransferOptions pto;
pto.setRowLength(lineSize);
textureY->setData(0, 0, 0, width, height, 0, 0, QOpenGLTexture::Red, QOpenGLTexture::UInt8, data, &pto);
textureY->release();
}
void VideoWidget::updateTextureU(int width, int height, int lineSize, const unsigned char* data)
{
if (!textureU->isCreated())
{
textureU->create();
textureU->setSize(lineSize, height);
textureU->setFormat(QOpenGLTexture::R8_UNorm);
textureU->allocateStorage(QOpenGLTexture::Red, QOpenGLTexture::UInt8);
textureU->setMinificationFilter(QOpenGLTexture::Nearest);
textureU->setMagnificationFilter(QOpenGLTexture::Nearest);
textureU->setWrapMode(QOpenGLTexture::Repeat);
}
GLuint id = textureY->textureId();
QOpenGLPixelTransferOptions pto;
pto.setRowLength(lineSize);
textureU->setData(0, 0, 0, width, height, 0, 0, QOpenGLTexture::Red, QOpenGLTexture::UInt8, data, &pto);
textureU->release();
}
void VideoWidget::updateTextureV(int width, int height, int lineSize, const unsigned char* data)
{
if (!textureV->isCreated())
{
textureV->create();
textureV->setSize(lineSize, height);
textureV->setFormat(QOpenGLTexture::R8_UNorm);
textureV->allocateStorage(QOpenGLTexture::Red, QOpenGLTexture::UInt8);
textureV->setMinificationFilter(QOpenGLTexture::Nearest);
textureV->setMagnificationFilter(QOpenGLTexture::Nearest);
textureV->setWrapMode(QOpenGLTexture::Repeat);
}
GLuint id = textureY->textureId();
QOpenGLPixelTransferOptions pto;
pto.setRowLength(lineSize);
textureV->setData(0, 0, 0, width, height, 0, 0, QOpenGLTexture::Red, QOpenGLTexture::UInt8, data, &pto);
textureV->release();
}
在这个基础上可以进行功能扩展,比如实现录屏功能,添加网络通讯模块,就可以实现视频聊天功能等。