Jean-Yves Didier

ffmpeg compatible

# Project modified by ARCS1to2
unix: TEMPLATE = lib
win32: TEMPLATE = vclib
TARGET = arcsffmpeg
HEADERS = videoencoder.h
HEADERS+= ffmpeglib.h
HEADERS+= videodecoder.h
HEADERS+= samplefilter.h
HEADERS+= videofilter.h
HEADERS+= logofilter.h
HEADERS+= flipvertfilter.h
HEADERS+= textscreenfilter.h
HEADERS+= videoviewer.h
HEADERS+= videoglviewer.h
SOURCES = videoencoder.cpp
SOURCES+= ffmpeglib.cpp
SOURCES+= videodecoder.cpp
SOURCES+= samplefilter.cpp
SOURCES+= videofilter.cpp
SOURCES+= logofilter.cpp
SOURCES+= flipvertfilter.cpp
SOURCES+= textscreenfilter.cpp
SOURCES+= videoviewer.cpp
SOURCES+= videoglviewer.cpp
SOURCES+=
MOC_DIR = ./moc
OBJECTS_DIR = ./obj
CONFIG += qt thread release opengl
unix {
LIBS += -lavformat -lavcodec -lavutil
}
win32 {
INCLUDEPATH+=$$(ARCSDIR)/include
LIBS += -L$$(ARCSDIR)/lib -lavformat -lavcodec -lavutil -lswscale -lavdevice
CONFIG += dll exceptions
DEFINES += _CRT_SECURE_NO_DEPRECATE
}
win32: DLLDESTDIR = ../libs
unix: QMAKE_POST_LINK=mv *.so* ../libs
#The following line was inserted by qt3to4
QT += opengl qt3support
unix: TEMPLATE = lib
win32: TEMPLATE = vclib
TARGET = arcsffmpeg
HEADERS = videoencoder.h
HEADERS+= ffmpeglib.h
HEADERS+= videodecoder.h
HEADERS+= samplefilter.h
HEADERS+= videofilter.h
HEADERS+= logofilter.h
HEADERS+= flipvertfilter.h
HEADERS+= textscreenfilter.h
HEADERS+= videoviewer.h
HEADERS+= videoglviewer.h
SOURCES = videoencoder.cpp
SOURCES+= ffmpeglib.cpp
SOURCES+= videodecoder.cpp
SOURCES+= samplefilter.cpp
SOURCES+= videofilter.cpp
SOURCES+= logofilter.cpp
SOURCES+= flipvertfilter.cpp
SOURCES+= textscreenfilter.cpp
SOURCES+= videoviewer.cpp
SOURCES+= videoglviewer.cpp
SOURCES+=
MOC_DIR = ./moc
OBJECTS_DIR = ./obj
CONFIG += qt thread release opengl
unix {
LIBS += -lavformat -lavcodec -lavutil -lavfilter -lavutil -lswscale -lavdevice
}
win32 {
INCLUDEPATH+=$$(ARCSDIR)/include
LIBS += -L$$(ARCSDIR)/lib -lavformat -lavcodec -lavfilter -lavutil -lswscale -lavdevice
CONFIG += dll exceptions
DEFINES += _CRT_SECURE_NO_DEPRECATE
}
win32: DLLDESTDIR = ../libs
unix: QMAKE_POST_LINK=mv *.so* ../libs
#The following line was inserted by qt3to4
QT += opengl
#qt3support
ALXFILE = libffmpeg.alx
......
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE QtCreatorProject>
<!-- Written by Qt Creator 2.4.1, 2012-05-03T19:01:54. -->
<!-- Written by Qt Creator 2.4.1, 2012-05-04T11:55:36. -->
<qtcreator>
<data>
<variable>ProjectExplorer.Project.ActiveTarget</variable>
......
......@@ -2,10 +2,14 @@
#define __FFMPEGLIB_H__
// this is for ffmpeg v 0.10.2
extern "C" {
#define __STDC_CONSTANT_MACROS
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavfilter/avfilter.h>
}
class FFMPegLib
{
......@@ -14,7 +18,7 @@ static void init()
{
if (!initialized)
{
av_codec_register_all();
avcodec_register_all();
av_register_all();
avfilter_register_all();
}
......
......@@ -36,7 +36,7 @@ public slots:
/*! \brief Sets the image that will be displayed over the video stream.
* \param s Image file name
*/
void setImageFile(QString s) { img = QImage(s); if (img.depth() < 32) img = img.convertDepth(32); }
void setImageFile(QString s) { img = QImage(s); if (img.depth() < 32) img = img.convertToFormat(QImage::Format_RGB32); }
/*! \brief Sets the image that will be displayed over the video stream.
* \param i Image in a QImage format.
......
<application>
<libraries>
<library name="../libs/libcommon.so" />
<library name="../libs/libarcsffmpeg.so" />
</libraries>
<objects>
<object classname="TokenTrigger" persistent="true" id="tt" />
<object classname="VideoEncoder" persistent="true" id="ve" />
<object classname="SampleFilter" persistent="true" id="sf" />
<object classname="VideoDecoder" persistent="true" id="vd" />
</objects>
<sheets>
<sheet id="end" />
<sheet id="start" >
<preconnection>
<init value="sample.avi" type="string" slot="setFile(QString)" object="vd" />
<init value="test.avi" type="string" slot="setFile(QString)" object="ve" />
<init value="end" type="string" slot="setToken(QString)" object="tt" />
</preconnection>
<connection>
<wire objsource="vd" objdest="sf" signal="sendImage(int,int,char*)" slot="setImage(int,int,char*)" />
<wire objsource="sf" objdest="ve" signal="sendImage(int,int,char*)" slot="setImage(int,int,char*)" />
<wire objsource="vd" objdest="ve" signal="sendHeight(int)" slot="setHeight(int)" />
<wire objsource="vd" objdest="ve" signal="sendWidth(int)" slot="setWidth(int)" />
<wire objsource="vd" objdest="ve" signal="finished()" slot="close()" />
<wire objsource="vd" objdest="tt" signal="finished()" slot="tic()" />
</connection>
<postconnection>
<init value="" type="void" slot="initDecoder()" object="vd" />
<init value="" type="void" slot="initEncoder()" object="ve" />
<init value="" type="void" slot="start()" object="vd" />
</postconnection>
<tokensender object="tt" />
</sheet>
</sheets>
<statemachine terminal="end" initial="start" >
<transition token="end" stepA="start" stepB="end" />
</statemachine>
<application mode="gui">
<context>
<libraries>
<library path="../libs/libarcsffmpeg.so"/>
<library path="../libs/libcommon.so"/>
</libraries>
<components>
<component id="sf" type="SampleFilter"/>
<component id="vd" type="VideoDecoder"/>
<component id="tt" type="TokenTrigger"/>
<component id="ve" type="VideoEncoder"/>
<component id="__statemachine__" type="StateMachine">
<statemachine>
<first name="start"/>
<last name="end"/>
<transitions>
<transition source="start" destination="end" token="end"/>
</transitions>
</statemachine>
</component>
</components>
<constants/>
</context>
<processes>
<process controller="__statemachine__">
<sheet id="end">
<preconnections/>
<connections/>
<postconnections/>
</sheet>
<sheet id="start">
<preconnections>
<invoke destination="tt" slot="setToken(QString)" type="string">end</invoke>
<invoke destination="vd" slot="setFile(QString)" type="string">sample.avi</invoke>
<invoke destination="ve" slot="setFile(QString)" type="string">test.avi</invoke>
</preconnections>
<connections>
<link source="vd" signal="sendImage(int,int,char*)" destination="sf" slot="setImage(int,int,char*)"/>
<link source="sf" signal="sendImage(int,int,char*)" destination="ve" slot="setImage(int,int,char*)"/>
<link source="vd" signal="sendHeight(int)" destination="ve" slot="setHeight(int)"/>
<link source="vd" signal="sendWidth(int)" destination="ve" slot="setWidth(int)"/>
<link source="vd" signal="finished()" destination="ve" slot="close()"/>
<link source="vd" signal="finished()" destination="tt" slot="tic()"/>
<link source="tt" signal="sendToken(QString)" destination="__statemachine__" slot="setToken(QString)"/>
</connections>
<postconnections>
<invoke destination="vd" slot="initDecoder()" type="void"/>
<invoke destination="ve" slot="initEncoder()" type="void"/>
<invoke destination="vd" slot="start()" type="void"/>
</postconnections>
</sheet>
</process>
</processes>
</application>
......
......@@ -19,14 +19,14 @@ void TextScreenFilter::applyFilter()
QFontMetrics fmt(fnt);
int ntimes = text.contains('\\') + 1;
int ntimes = text.count('\\') + 1;
int fontwidth=0;
h = h ;
QPixmap pic(getWidth(),h);
QPainter p(&pic);
p.setBackgroundColor(bgColor);
p.setBackground(bgColor);
p.setFont(fnt);
p.setPen(fgColor);
......@@ -52,7 +52,7 @@ void TextScreenFilter::applyFilter()
}
p.end();
QImage image=pic.convertToImage().convertDepth(32);
QImage image=pic.toImage().convertToFormat(QImage::Format_RGB32);
for (int i=0; i < getWidth(); i++)
{
......
......@@ -15,9 +15,7 @@ VideoDecoder::VideoDecoder(QObject* parent) : QObject(parent)
pFrame = NULL;
pCodecCtx = NULL;
pFormatCtx = NULL;
#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
img_convert_ctx = NULL;
#endif
}
......@@ -42,10 +40,8 @@ VideoDecoder::~VideoDecoder()
if (pFormatCtx)
av_close_input_file(pFormatCtx);
#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
if (img_convert_ctx)
sws_freeContext(img_convert_ctx);
#endif
}
......@@ -103,104 +99,12 @@ void VideoDecoder::initDecoder()
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_RGB24,
SWS_BICUBIC,NULL, NULL, NULL);
emit sendFramerate((float)pCodecCtx->time_base.num/(float)pCodecCtx->time_base.den);
emit sendWidth(pCodecCtx->width);
emit sendHeight(pCodecCtx->height);
emit sendTotalFrames(pFormatCtx->streams[videoStream]->duration*pCodecCtx->time_base.num/pCodecCtx->time_base.den/AV_TIME_BASE);
/*
Start of very old code
// Ouverture du fichier video
if(av_open_input_file(&pFormatCtx, fileName.ascii(), NULL, 0, NULL)!=0)
return; // Erreur d'ouverture
// Recupration des informations sur les flux
if(av_find_stream_info(pFormatCtx)<0)
return; // Impossible de rcuprer les informations
// Dump des informations concernant le fichier video
dump_format(pFormatCtx, 0, fileName, 0);
// Recherche du premier flux video
videoStream=-1;
for(int i=0; i<pFormatCtx->nb_streams; i++)
{
#if ( LIBAVFORMAT_VERSION_INT > 5000)
if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO)
#else
if(pFormatCtx->streams[i]->codec.codec_type==CODEC_TYPE_VIDEO)
#endif
{
videoStream=i;
break;
}
}
if(videoStream==-1)
return; // Pas de flux video trouv
// Rcupre le pointeur sur un contexte de codec pour la video.
#if ( LIBAVFORMAT_VERSION_INT > 5000)
pCodecCtx=pFormatCtx->streams[videoStream]->codec;
#else
pCodecCtx=&pFormatCtx->streams[videoStream]->codec;
#endif
// Trouve le codec associ au fichier video
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL)
return; // Codec introuvable
// Ouvre le codec
if(avcodec_open(pCodecCtx, pCodec)<0)
return; // Imposible d'ouvrir le codec
// Petit hack pour rcuprer le bon framerate dans le cas o on a quelquechose d'absurde
#if ( LIBAVFORMAT_VERSION_INT > 5000)
if(pCodecCtx->time_base.num>1000 && pCodecCtx->time_base.den ==1)
pCodecCtx->time_base.den =1000;
#else
if(pCodecCtx->frame_rate>1000 && pCodecCtx->frame_rate_base==1)
pCodecCtx->frame_rate_base=1000;
#endif
// Allocation d'une image video
pFrame=avcodec_alloc_frame();
// Allocation d'une structure pour les images videos
pFrameRGB=avcodec_alloc_frame();
if(pFrameRGB==NULL)
return;
// Allocation du buffer ncessaire au stockage des images
buffer=(uint8_t*)malloc(sizeof(uint8_t) *
avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height));
// Allocation des parts appropries du buffer pour les images RGB
avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
pCodecCtx->width, pCodecCtx->height);
totalFrames = -1;
numFrames = 0;
std::cout << "[VDec] Succeeded in opening video file." << std::endl;
#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
if (img_convert_ctx == NULL)
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_RGB24,
SWS_BICUBIC,NULL, NULL, NULL);
#endif
#if ( LIBAVFORMAT_VERSION_INT > 5000)
emit sendFramerate((float)pCodecCtx->time_base.num/(float)pCodecCtx->time_base.den);
#else
emit sendFramerate((float)pCodecCtx->frame_rate/(float)pCodecCtx->frame_rate_base);
#endif
emit sendWidth(pCodecCtx->width);
emit sendHeight(pCodecCtx->height);
#if ( LIBAVFORMAT_VERSION_INT > 5000)
emit sendTotalFrames(pFormatCtx->streams[videoStream]->duration*pCodecCtx->time_base.num/pCodecCtx->time_base.den/AV_TIME_BASE);
#else
emit sendTotalFrames(pFormatCtx->streams[videoStream]->duration*pCodecCtx->frame_rate/pCodecCtx->frame_rate_base/AV_TIME_BASE);
#endif
*/
}
......@@ -215,14 +119,11 @@ void VideoDecoder::start()
started = true;
totalFrames = -1;
std::cout << "[VDec] timer is set to " << pCodecCtx->time_base.den / pCodecCtx->time_base.num << " ms" << std::endl;
if (threaded)
{
connect(&timer,SIGNAL(timeout()), this, SLOT(timeTrigger()));
#if ( LIBAVFORMAT_VERSION_INT > 5000)
timer.start(1000 / pCodecCtx->time_base.num);
#else
timer.start(1000 / pCodecCtx->frame_rate);
#endif
connect(&timer,SIGNAL(timeout()), this, SLOT(timeTrigger()));
timer.start(1000* pCodecCtx->time_base.num / pCodecCtx->time_base.den);
}
else
{
......@@ -235,19 +136,14 @@ void VideoDecoder::start()
}
void VideoDecoder::seekFrame(long n)
{
#if ( LIBAVFORMAT_VERSION_INT > 5000)
long time = n * pCodecCtx->time_base.den * AV_TIME_BASE / pCodecCtx->time_base.num;
if (av_seek_frame(pFormatCtx,videoStream, (int64_t)time,0)>0)
#else
long time = n * pCodecCtx->frame_rate_base * AV_TIME_BASE / pCodecCtx->frame_rate;
if (av_seek_frame(pFormatCtx,videoStream, (int64_t)time)>0)
#endif
{
numFrames = n - 1;
queryImage();
}
}
{
long time = n * pCodecCtx->time_base.den * AV_TIME_BASE / pCodecCtx->time_base.num;
if (av_seek_frame(pFormatCtx,videoStream, (int64_t)time,0)>0)
{
numFrames = n - 1;
queryImage();
}
}
void VideoDecoder::queryImage()
......@@ -265,11 +161,7 @@ void VideoDecoder::queryImage()
{
totalFrames = numFrames;
if (loop)
#if ( LIBAVFORMAT_VERSION_INT > 5000)
av_seek_frame(pFormatCtx,videoStream,0,0);
#else
av_seek_frame(pFormatCtx,videoStream,0);
#endif
else
{
numFrames = 0;
......@@ -284,22 +176,15 @@ void VideoDecoder::queryImage()
//if(packet.stream_index==videoStream)
//{
// Dcodage des images video
avcodec_decode_video(pCodecCtx, pFrame, &frameFinished,
packet.data, packet.size);
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
// Est-ce que l'image video est rcupre ?
if(frameFinished)
{
// Conversion de l'image au format RGB
#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
pFrameRGB->data, pFrameRGB->linesize);
#else
img_convert((AVPicture *)pFrameRGB, PIX_FMT_RGB24,
(AVPicture*)pFrame, pCodecCtx->pix_fmt, pCodecCtx->width,
pCodecCtx->height);
#endif
}
//}
......
......@@ -4,13 +4,14 @@
#include <QObject>
#include <QTimer>
#include <QMutex>
#include <ffmpeg/avcodec.h>
#include <ffmpeg/avformat.h>
#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
#include <ffmpeg/swscale.h>
#endif
extern "C" {
#define __STDC_CONSTANT_MACROS
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
}
/*! \defgroup video Video
* \brief A list of video processing tools to decode, filter and encode video.
......@@ -115,8 +116,8 @@ AVFrame *pFrame;
AVFrame *pFrameRGB;
uint8_t *buffer;
#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
struct SwsContext *img_convert_ctx;
#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
struct SwsContext *img_convert_ctx;
#endif
QString fileName;
......
This diff is collapsed. Click to expand it.
......@@ -2,11 +2,16 @@
#define __VIDEOENCODER_H__
#include <QObject>
extern "C" {
#define __STDC_CONSTANT_MACROS
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
#include <libswscale/swscale.h>
}
/*! \brief A video encoder component.
*
......@@ -70,16 +75,14 @@ AVFrame *picture, *tmp_picture;
uint8_t *video_outbuf;
int video_outbuf_size;
#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
struct SwsContext *img_convert_ctx;
#endif
struct SwsContext *img_convert_ctx;
int width;
int height;
float framerate;
int bitrate;
int pixelFormat;
PixelFormat pixelFormat;
bool started;
bool closed;
......
......@@ -6,8 +6,8 @@
using namespace std;
VideoGLViewer::VideoGLViewer(QObject* obj, const char* name) :
QGLWidget(NULL, name)
VideoGLViewer::VideoGLViewer(QObject* obj) :
QGLWidget()
{
makeCurrent();
QGLFormat glfmt = context()->format();
......
......@@ -2,7 +2,7 @@
void VideoViewer::setImage(int w,int h, char* buffer)
{
img = QImage(w,h,32);
img = QImage(w,h,QImage::Format_RGB32);
for (int j=0; j < h ; j++)
for (int i=0; i < w; i++)
......@@ -10,5 +10,5 @@ void VideoViewer::setImage(int w,int h, char* buffer)
img.setPixel(i,j,qRgb(buffer[(j*w+i)*3],buffer[(j*w+i)*3+1],buffer[(j*w+i)*3+2]));
}
// update();
repaint(0,0,width(),height(),false);
repaint(0,0,width(),height());
}
......
#ifndef __VIDEOVIEWER_H__
#define __VIDEOVIEWER_H__
#include <Q3Frame>
#include <QFrame>
#include <QImage>
#include <QPainter>
/*#include <qgl.h>
#include <GL/glu.h>
#include <GL/gl.h>*/
class VideoViewer : public Q3Frame
class VideoViewer : public QFrame
{
Q_OBJECT
public:
VideoViewer(QObject* parent=0) : //QGLWidget(NULL, name) {}
Q3Frame(NULL, name,Qt::WNoAutoErase ) {} //!< ARCS Constructor
QFrame( ) {} //!< ARCS Constructor
public slots:
void setImage(int w, int h, char* buffer);
......@@ -25,11 +25,16 @@ protected:
/*void resizeGL(int width, int height)
{
glViewport(0,width,0,height);*/
void paintEvent(QPaintEvent *){
QPainter painter(this);
painter.drawImage(0,0,img);
}
void drawContents(QPainter* p) {
p->drawImage(0,0,img);
p->flush();
//p->flush();
}
private:
......
<application>
<defines/>
<libraries>
<library name="../libs/libarcsffmpeg.so" />
</libraries>
<objects>
<object classname="VideoViewer" persistent="true" id="vw" />
<object classname="VideoDecoder" persistent="true" id="vd" />
</objects>
<sheets>
<sheet id="start" >
<preconnection>
<init value="sample.avi" type="string" slot="setFile(QString)" object="vd" />
<init value="" type="void" slot="show()" object="vw" />
<init value="true" type="bool" slot="setThreaded(bool)" object="vd" />
</preconnection>
<connection>
<wire objsource="vd" objdest="vw" signal="sendHeight(int)" slot="setHeight(int)" />
<wire objsource="vd" objdest="vw" signal="sendWidth(int)" slot="setWidth(int)" />
<wire objsource="vd" objdest="vw" signal="sendImage(int,int,char*)" slot="setImage(int,int,char*)" />
</connection>
<postconnection>
<init value="" type="void" slot="initDecoder()" object="vd" />
<init value="" type="void" slot="start()" object="vd" />
</postconnection>
</sheet>
</sheets>
<statemachine terminal="end" initial="start" />
<application mode="gui">
<context>
<libraries>
<library path="../libs/libarcsffmpeg.so"/>
</libraries>
<components>
<component id="vd" type="VideoDecoder"/>
<component id="vw" type="VideoViewer"/>
<component id="__statemachine__" type="StateMachine">
<statemachine>
<first name="start"/>
<last name="end"/>
<transitions/>
</statemachine>
</component>
</components>
<constants/>
</context>
<processes>
<process controller="__statemachine__">
<sheet id="start">
<preconnections>
<invoke destination="vd" slot="setFile(QString)" type="string">sample.avi</invoke>
<invoke destination="vw" slot="show()" type="void"/>
<invoke destination="vd" slot="setThreaded(bool)" type="bool">true</invoke>
</preconnections>
<connections>
<link source="vd" signal="sendHeight(int)" destination="vw" slot="setHeight(int)"/>
<link source="vd" signal="sendWidth(int)" destination="vw" slot="setWidth(int)"/>
<link source="vd" signal="sendImage(int,int,char*)" destination="vw" slot="setImage(int,int,char*)"/>
</connections>
<postconnections>
<invoke destination="vd" slot="initDecoder()" type="void"/>
<invoke destination="vd" slot="start()" type="void"/>
</postconnections>
</sheet>
</process>
</processes>
</application>
......