Jean-Yves Didier

premier dump ffmpeg

1 +Built against ffmpeg v0.8.7
1 +# Project modified by ARCS1to2
2 +
3 +unix: TEMPLATE = lib
4 +win32: TEMPLATE = vclib
5 +
6 +
7 +TARGET = arcsffmpeg
8 +
9 +HEADERS = videoencoder.h
10 +HEADERS+= ffmpeglib.h
11 +HEADERS+= videodecoder.h
12 +HEADERS+= samplefilter.h
13 +HEADERS+= videofilter.h
14 +HEADERS+= logofilter.h
15 +HEADERS+= flipvertfilter.h
16 +HEADERS+= textscreenfilter.h
17 +HEADERS+= videoviewer.h
18 +HEADERS+= videoglviewer.h
19 +
20 +SOURCES = videoencoder.cpp
21 +SOURCES+= ffmpeglib.cpp
22 +SOURCES+= videodecoder.cpp
23 +SOURCES+= samplefilter.cpp
24 +SOURCES+= videofilter.cpp
25 +SOURCES+= logofilter.cpp
26 +SOURCES+= flipvertfilter.cpp
27 +SOURCES+= textscreenfilter.cpp
28 +SOURCES+= videoviewer.cpp
29 +SOURCES+= videoglviewer.cpp
30 +SOURCES+=
31 +
32 +MOC_DIR = ./moc
33 +OBJECTS_DIR = ./obj
34 +
35 +CONFIG += qt thread release opengl
36 +
37 +unix {
38 +LIBS += -lavformat -lavcodec -lavutil
39 +}
40 +
41 +win32 {
42 +INCLUDEPATH+=$$(ARCSDIR)/include
43 +LIBS += -L$$(ARCSDIR)/lib -lavformat -lavcodec -lavutil -lswscale -lavdevice
44 +CONFIG += dll exceptions
45 +DEFINES += _CRT_SECURE_NO_DEPRECATE
46 +}
47 +
48 +
49 +win32: DLLDESTDIR = ../libs
50 +unix: QMAKE_POST_LINK=mv *.so* ../libs
51 +#The following line was inserted by qt3to4
52 +QT += opengl qt3support
53 +
54 +
55 +ALXFILE = libffmpeg.alx
56 +OTHER_FILES += libffmpeg.alx
57 +arcslibrary.output = alm_${QMAKE_FILE_BASE}.cpp
58 +arcslibrary.input = ALXFILE
59 +arcslibrary.commands = arcslibmaker ${QMAKE_FILE_NAME}
60 +arcslibrary.variable_out = SOURCES
61 +QMAKE_EXTRA_COMPILERS += arcslibrary
62 +INCLUDEPATH += $$(ARCSDIR)/include
63 +LIBS += -L$$(ARCSDIR)/lib -larcs
64 + CONFIG += dll
This diff is collapsed. Click to expand it.
1 +#include "ffmpeglib.h"
2 +
3 +bool FFMPegLib::initialized = false;
1 +#ifndef __FFMPEGLIB_H__
2 +#define __FFMPEGLIB_H__
3 +
4 +// this is for ffmpeg v 0.10.2
5 +
6 +#include <libavformat/avformat.h>
7 +#include <libavcodec/avcodec.h>
8 +#include <libavfilter/avfilter.h>
9 +
10 +class FFMPegLib
11 +{
12 +public:
13 +static void init()
14 +{
15 + if (!initialized)
16 + {
17 + av_codec_register_all();
18 + av_register_all();
19 + avfilter_register_all();
20 + }
21 + initialized = true;
22 +}
23 +
24 +private :
25 +static bool initialized;
26 +
27 +};
28 +
29 +#endif
1 +#include "flipvertfilter.h"
2 +
3 +void FlipVertFilter::applyFilter()
4 +{
5 + for (int j=0; j < getHeight(); j++)
6 + {
7 + for (int i=0; i < getWidth(); i++)
8 + {
9 + imgOut(i,getHeight()-j-1,R, imgIn(i,j,R));
10 + imgOut(i,getHeight()-j-1,G, imgIn(i,j,G));
11 + imgOut(i,getHeight()-j-1,B, imgIn(i,j,B));
12 + }
13 + }
14 +}
1 +#ifndef __FLIPVERTFILTER_H__
2 +#define __FLIPVERTFILTER_H__
3 +
4 +#include "videofilter.h"
5 +
6 +/*! \brief Vertical Flip VideoFilter class.
7 + *
8 + * This filter flips vertically any image.
9 + * \author Jean-Yves Didier
10 + * \date November, the 24th, 2007
11 + * \ingroup video
12 + */
13 +
14 +
15 +
16 +
17 +class FlipVertFilter : public VideoFilter
18 +{
19 +Q_OBJECT
20 + public:
21 +FlipVertFilter(QObject* parent=0) : VideoFilter(parent) {} //<!ARCS Constructor
22 +
23 +protected:
24 +void applyFilter();
25 +
26 +};
27 +
28 +
29 +
30 +
31 +
32 +#endif //__FLIPVERTFILTER_H__
1 +<library>
2 + <headers>
3 + <header name="videoencoder.h"/>
4 + <header name="videodecoder.h"/>
5 + <header name="samplefilter.h"/>
6 + <header name="logofilter.h"/>
7 + <header name="textscreenfilter.h"/>
8 + <header name="videoviewer.h"/>
9 + <header name="videoglviewer.h"/>
10 + <header name="flipvertfilter.h"/>
11 + </headers>
12 + <components>
13 + <component name="VideoEncoder"/>
14 + <component name="VideoDecoder"/>
15 + <component name="VideoViewer"/>
16 + <component name="VideoGLViewer"/>
17 + <component name="SampleFilter"/>
18 + <component name="LogoFilter"/>
19 + <component name="FlipVertFilter"/>
20 + <component name="TextScreenFilter"/>
21 + </components>
22 +</library>
1 +<library>
2 + <headers>
3 + <header name="videoencoder.h"/>
4 + <header name="videodecoder.h"/>
5 + <header name="samplefilter.h"/>
6 + <header name="logofilter.h"/>
7 + <header name="textscreenfilter.h"/>
8 + <header name="videoviewer.h"/>
9 + <header name="videoglviewer.h"/>
10 + <header name="flipvertfilter.h"/>
11 + </headers>
12 + <components>
13 + <component name="VideoEncoder"/>
14 + <component name="VideoDecoder"/>
15 + <component name="VideoViewer"/>
16 + <component name="VideoGLViewer"/>
17 + <component name="SampleFilter"/>
18 + <component name="LogoFilter"/>
19 + <component name="FlipVertFilter"/>
20 + <component name="TextScreenFilter"/>
21 + </components>
22 +</library>
1 +<application>
2 + <libraries>
3 + <library name="../libs/libcommon.so" />
4 + <library name="../libs/libarcsffmpeg.so" />
5 + </libraries>
6 + <objects>
7 + <object classname="TokenTrigger" persistent="true" id="tt" />
8 + <object classname="VideoEncoder" persistent="true" id="ve" />
9 + <object classname="LogoFilter" persistent="true" id="sf" />
10 + <object classname="VideoDecoder" persistent="true" id="vd" />
11 + </objects>
12 + <sheets>
13 + <sheet id="end" />
14 + <sheet id="start" >
15 + <preconnection>
16 + <init value="sample.avi" type="string" slot="setFile(QString)" object="vd" />
17 + <init value="test.avi" type="string" slot="setFile(QString)" object="ve" />
18 + <init value="1400000" type="int" slot="setFramerate(int)" object="ve" />
19 + <init value="100" type="int" slot="setXOffset(int)" object="sf"/>
20 + <init value="100" type="int" slot="setYOffset(int)" object="sf"/>
21 + <init value="lsc.png" type="string" slot="setImage(QString)" object="sf" />
22 + <init value="end" type="string" slot="setToken(QString)" object="tt" />
23 + </preconnection>
24 + <connection>
25 + <wire objsource="vd" objdest="sf" signal="sendImage(int,int,char*)" slot="setImage(int,int,char*)" />
26 + <wire objsource="sf" objdest="ve" signal="sendImage(int,int,char*)" slot="setImage(int,int,char*)" />
27 + <wire objsource="vd" objdest="ve" signal="sendHeight(int)" slot="setHeight(int)" />
28 + <wire objsource="vd" objdest="ve" signal="sendWidth(int)" slot="setWidth(int)" />
29 + <wire objsource="vd" objdest="ve" signal="sendFramerate(float)" slot="setFramerate(float)"/>
30 + <wire objsource="vd" objdest="ve" signal="finished()" slot="close()" />
31 + <wire objsource="vd" objdest="tt" signal="finished()" slot="tic()" />
32 + </connection>
33 + <postconnection>
34 + <init value="" type="void" slot="initDecoder()" object="vd" />
35 + <init value="" type="void" slot="initEncoder()" object="ve" />
36 + <init value="" type="void" slot="start()" object="vd" />
37 + </postconnection>
38 + <tokensender object="tt" />
39 + </sheet>
40 + </sheets>
41 + <statemachine terminal="end" initial="start" >
42 + <transition token="end" stepA="start" stepB="end" />
43 + </statemachine>
44 +</application>
1 +#include "logofilter.h"
2 +
3 +
4 +void LogoFilter::applyFilter()
5 +{
6 + if (img.isNull())
7 + return ;
8 +
9 + duplicateImage();
10 +
11 +
12 + for (int i=0; i < img.width(); i++)
13 + {
14 + for (int j=0; j< img.height() ; j++)
15 + {
16 + if (safeIndex(xoffset()+i,yoffset()+j))
17 + {
18 + uchar* col = img.scanLine(j)+(i*4);
19 + uchar alpha = *(col+3);
20 + float r = ((float)alpha * (float)(*(col+2)) + (255.0-(float)alpha)*(unsigned char)imgIn(xoffset()+i,yoffset()+j,R))/256.0;
21 + float g = ((float)alpha * (float)(*(col+1)) + (255.0-(float)alpha)*(unsigned char)imgIn(xoffset()+i,yoffset()+j,G))/256.0;
22 + float b = ((float)alpha * (float)(*(col)) + (255.0-(float)alpha)*(unsigned char)imgIn(xoffset()+i,yoffset()+j,B))/256.0;
23 +
24 + imgOut(xoffset()+i, yoffset()+j, R, (int)r);
25 + imgOut(xoffset()+i, yoffset()+j, G, (int)g);
26 + imgOut(xoffset()+i, yoffset()+j, B, (int)b);
27 + }
28 + }
29 + }
30 +}
31 +
32 +
1 +#ifndef __LOGOFILTER_H__
2 +#define __LOGOFILTER_H__
3 +
4 +#include "videofilter.h"
5 +#include <QImage>
6 +
7 +/*! \brief A VideoFilter that could be used to display images in video.
8 + *
9 + * This filter is usefull to insert logos in videos for example.
10 + * If the selected image has an alpha channel, then the image would be blended
11 + * with the video stream.
12 + * \author Jean-Yves Didier
13 + * \date September, the 21st, 2006
14 + * \ingroup video
15 + */
16 +class LogoFilter: public VideoFilter
17 +{
18 +Q_OBJECT
19 +Q_PROPERTY(int xoffset READ xoffset WRITE setXOffset )
20 +Q_PROPERTY(int yoffset READ yoffset WRITE setYOffset )
21 +Q_PROPERTY(QImage image READ image WRITE setImage )
22 +Q_CLASSINFO( "xoffset", "X Offset for placing logo")
23 +Q_CLASSINFO( "yoffset", "Y Offset for placing logo")
24 +Q_CLASSINFO( "image" , "Image to display on top of video")
25 +Q_CLASSINFO( "author" , "Jean-Yves Didier")
26 +Q_CLASSINFO( "description", "This filter inserts an image inside a video")
27 +
28 +public:
29 +LogoFilter(QObject* parent=0) : VideoFilter(parent) { setXOffset(0); setYOffset(0);} //!< ARCS Constructor
30 +int xoffset() const {return xoff;} //!< Returns the x-offset of the displayed logo relative to the coordinates of image.
31 +int yoffset() const {return yoff;} //!< Returnss the y-offset of the displayed logo relative to the coordinates of image.
32 +QImage image() const { return img; } //!< Returns the logo to display over video stream.
33 +
34 +
35 +public slots:
36 +/*! \brief Sets the image that will be displayed over the video stream.
37 + * \param s Image file name
38 + */
39 + void setImageFile(QString s) { img = QImage(s); if (img.depth() < 32) img = img.convertDepth(32); }
40 +
41 +/*! \brief Sets the image that will be displayed over the video stream.
42 + * \param i Image in a QImage format.
43 + */
44 +void setImage(QImage i) { img = i; }
45 +void applyFilter();
46 +void setXOffset(int i) { xoff=i; } //!< Sets the x-offset of the displayed logo relative to the coordinates of image.
47 +void setYOffset(int i) { yoff=i; } //!< Sets the y-offset of the displayed logo relative to the coordinates of image.
48 +
49 +private:
50 +QImage img;
51 +int xoff;
52 +int yoff;
53 +};
54 +
55 +#endif //__LOGOFILTER_H__
1 +<application>
2 + <libraries>
3 + <library name="../libs/libcommon.so" />
4 + <library name="../libs/libarcsffmpeg.so" />
5 + </libraries>
6 + <objects>
7 + <object classname="TokenTrigger" persistent="true" id="tt" />
8 + <object classname="VideoEncoder" persistent="true" id="ve" />
9 + <object classname="SampleFilter" persistent="true" id="sf" />
10 + <object classname="VideoDecoder" persistent="true" id="vd" />
11 + </objects>
12 + <sheets>
13 + <sheet id="end" />
14 + <sheet id="start" >
15 + <preconnection>
16 + <init value="sample.avi" type="string" slot="setFile(QString)" object="vd" />
17 + <init value="test.avi" type="string" slot="setFile(QString)" object="ve" />
18 + <init value="end" type="string" slot="setToken(QString)" object="tt" />
19 + </preconnection>
20 + <connection>
21 + <wire objsource="vd" objdest="sf" signal="sendImage(int,int,char*)" slot="setImage(int,int,char*)" />
22 + <wire objsource="sf" objdest="ve" signal="sendImage(int,int,char*)" slot="setImage(int,int,char*)" />
23 + <wire objsource="vd" objdest="ve" signal="sendHeight(int)" slot="setHeight(int)" />
24 + <wire objsource="vd" objdest="ve" signal="sendWidth(int)" slot="setWidth(int)" />
25 + <wire objsource="vd" objdest="ve" signal="finished()" slot="close()" />
26 + <wire objsource="vd" objdest="tt" signal="finished()" slot="tic()" />
27 + </connection>
28 + <postconnection>
29 + <init value="" type="void" slot="initDecoder()" object="vd" />
30 + <init value="" type="void" slot="initEncoder()" object="ve" />
31 + <init value="" type="void" slot="start()" object="vd" />
32 + </postconnection>
33 + <tokensender object="tt" />
34 + </sheet>
35 + </sheets>
36 + <statemachine terminal="end" initial="start" >
37 + <transition token="end" stepA="start" stepB="end" />
38 + </statemachine>
39 +</application>
1 +#include "samplefilter.h"
2 +
3 +void SampleFilter::applyFilter()
4 +{
5 + for (int j=0; j < getHeight() ; j++)
6 + {
7 + for (int i=0; i < getWidth(); i++)
8 + {
9 + int mean = (unsigned char)imgIn(i,j,R) + (unsigned char)imgIn(i,j,G) + (unsigned char)imgIn(i,j,B);
10 + mean = mean / 3;
11 + imgOut(i,j,R, mean);
12 + imgOut(i,j,G, mean);
13 + imgOut(i,j,B, mean);
14 + }
15 + }
16 +}
17 +
18 +
1 +#ifndef __SAMPLEFILTER_H__
2 +#define __SAMPLEFILTER_H__
3 +
4 +#include "videofilter.h"
5 +
6 +/*! \brief Simple VideoFilter class.
7 + *
8 + * This filter transforms color images in black'n white images by doing the mean of each R, G, B values.
9 + * \author Jean-Yves Didier
10 + * \date September, the 21st, 2006
11 + * \ingroup video
12 + */
13 +
14 +class SampleFilter : public VideoFilter
15 +{
16 +Q_OBJECT
17 + public:
18 +SampleFilter(QObject* parent=0) : VideoFilter(parent) {} //!< ARCS Constructor
19 +
20 +protected:
21 +void applyFilter();
22 +
23 +
24 +};
25 +
26 +#endif //__SAMPLEFILTER_H__
1 +<application>
2 + <libraries>
3 + <library name="../libs/libcommon.so" />
4 + <library name="../libs/libarcsffmpeg.so" />
5 + </libraries>
6 + <objects>
7 + <object classname="TokenTrigger" persistent="true" id="tt" />
8 + <object classname="VideoEncoder" persistent="true" id="ve" />
9 + <object classname="TextScreenFilter" persistent="true" id="sf" />
10 + <object classname="VideoDecoder" persistent="true" id="vd" />
11 + </objects>
12 + <sheets>
13 + <sheet id="end" />
14 + <sheet id="start" >
15 + <preconnection>
16 + <init value="sample.avi" type="string" slot="setFile(QString)" object="vd" />
17 + <init value="test.avi" type="string" slot="setFile(QString)" object="ve" />
18 + <!--init value="1400000" type="int" slot="setFramerate(int)" object="ve" /-->
19 + <!--init value="100" type="int" slot="setXOffset(int)" object="sf"/>
20 + <init value="100" type="int" slot="setYOffset(int)" object="sf"/>
21 + <init value="lsc.png" type="string" slot="setImage(QString)" object="sf" /-->
22 + <init value="Hello\\world !" type="string" slot="setText(QString)" object="sf"/>
23 + <init value="25" type="int" slot="setFrameDuration(int)" object="sf"/>
24 + <init value="10" type="int" slot="setStartingFrame(int)" object="sf"/>
25 + <init value="end" type="string" slot="setToken(QString)" object="tt" />
26 + </preconnection>
27 + <connection>
28 + <wire objsource="vd" objdest="sf" signal="sendFrameNumber(int)" slot="setNumFrame(int)" />
29 + <wire objsource="vd" objdest="sf" signal="sendImage(int,int,char*)" slot="setImage(int,int,char*)" />
30 + <wire objsource="sf" objdest="ve" signal="sendImage(int,int,char*)" slot="setImage(int,int,char*)" />
31 + <wire objsource="vd" objdest="ve" signal="sendHeight(int)" slot="setHeight(int)" />
32 + <wire objsource="vd" objdest="ve" signal="sendWidth(int)" slot="setWidth(int)" />
33 + <wire objsource="vd" objdest="ve" signal="sendFramerate(float)" slot="setFramerate(float)"/>
34 + <wire objsource="vd" objdest="ve" signal="finished()" slot="close()" />
35 + <wire objsource="vd" objdest="tt" signal="finished()" slot="tic()" />
36 + </connection>
37 + <postconnection>
38 + <init value="" type="void" slot="initDecoder()" object="vd" />
39 + <init value="" type="void" slot="initEncoder()" object="ve" />
40 + <init value="" type="void" slot="start()" object="vd" />
41 + </postconnection>
42 + <tokensender object="tt" />
43 + </sheet>
44 + </sheets>
45 + <statemachine terminal="end" initial="start" >
46 + <transition token="end" stepA="start" stepB="end" />
47 + </statemachine>
48 +</application>
1 +#include "textscreenfilter.h"
2 +#include <QPainter>
3 +#include <QImage>
4 +//Added by qt3to4:
5 +#include <QPixmap>
6 +
7 +void TextScreenFilter::applyFilter()
8 +{
9 + if (frameNumber() == startingFrame)
10 + {
11 + int l;
12 + int h = getHeight();
13 +
14 +
15 + fnt.setPointSize(h/lineNumber);
16 +
17 + while (QFontMetrics(fnt).lineSpacing() > h/lineNumber)
18 + fnt.setPointSize(fnt.pointSize()-1);
19 +
20 + QFontMetrics fmt(fnt);
21 +
22 + int ntimes = text.contains('\\') + 1;
23 + int fontwidth=0;
24 + h = h ;
25 +
26 +
27 + QPixmap pic(getWidth(),h);
28 + QPainter p(&pic);
29 + p.setBackgroundColor(bgColor);
30 + p.setFont(fnt);
31 + p.setPen(fgColor);
32 +
33 + if (alignRight)
34 + {
35 + for (l=0; l < ntimes ; l++)
36 + {
37 + QString section = text.section('\\',l,l);
38 + if (fontwidth < fmt.width(section))
39 + fontwidth = fmt.width(section);
40 + }
41 + for (l=0; l < ntimes ; l++)
42 + p.drawText((getWidth()-fontwidth)/2,(lineNumber-ntimes+2*l)*h/(2*lineNumber), text.section('\\',l,l));
43 +
44 + }
45 + else
46 + {
47 + for (l=0; l < ntimes ; l++)
48 + {
49 + fontwidth = fmt.width(text.section('\\',l,l));
50 + p.drawText((getWidth()-fontwidth)/2,(lineNumber-ntimes+2*l)*h/(2*lineNumber), text.section('\\',l,l));
51 + }
52 + }
53 + p.end();
54 +
55 + QImage image=pic.convertToImage().convertDepth(32);
56 +
57 + for (int i=0; i < getWidth(); i++)
58 + {
59 + for (int j=0; j < getHeight() ; j++)
60 + {
61 + imgOut(i,j,R, *((uchar*)image.scanLine(j) + (i*4) + 2));
62 + imgOut(i,j,G, *((uchar*)image.scanLine(j) + (i*4) + 1));
63 + imgOut(i,j,B, *((uchar*)image.scanLine(j) + (i*4) + 0));
64 + }
65 +
66 + }
67 + for (int k=0; k < (frameDuration-1); k++)
68 + emit sendImage(getWidth(),h,getImageOut());
69 +
70 + }
71 + else
72 + duplicateImage();
73 +}
1 +#ifndef __TEXTSCREENFILTER_H__
2 +#define __TEXTSCREENFILTER_H__
3 +
4 +#include "videofilter.h"
5 +#include <QColor>
6 +#include <QFont>
7 +
8 +
9 +/*! \brief A VideoFilter designed to display text fillers in videos.
10 + *
11 + * \author Jean-Yves Didier
12 + * \ingroup video
13 + * \date September, the 21st, 2006
14 + */
15 +class TextScreenFilter : public VideoFilter
16 +{
17 +Q_OBJECT
18 +Q_CLASSINFO("backgroundColor","Color of the background of the text filler")
19 +Q_CLASSINFO("foregroundColor","Color of the text filler")
20 +Q_CLASSINFO("rightAlign", "Tells wether the text is to be aligned on the right or not.")
21 +Q_CLASSINFO("font", "Sets the appropriate font for the text to display")
22 +Q_PROPERTY( QColor backgroundColor READ backgroundColor WRITE setBackgroundColor)
23 +Q_PROPERTY( QColor foregroundColor READ foregroundColor WRITE setForegroundColor)
24 +Q_PROPERTY( bool rightAlign READ rightAlign WRITE setAlignRight)
25 +Q_PROPERTY(QFont font READ font WRITE setFont )
26 +
27 +public:
28 +/*! \brief ARCS Constructor
29 + */
30 +TextScreenFilter(QObject* parent=0) : VideoFilter(parent)
31 + {
32 + startingFrame = 0; frameDuration = 0; lineNumber=25; alignRight=false;
33 + bgColor = Qt::black;
34 + fgColor = Qt::white;
35 + fnt = QFont("Sans Serif");
36 + fnt.setStyleHint(QFont::SansSerif);
37 + fnt.setWeight(QFont::Black);
38 + }
39 +
40 +QColor backgroundColor() const { return bgColor; } //!< Gives the background color
41 +QColor foregroundColor() const { return fgColor; } //!< Gives the text color
42 +bool rightAlign() const { return alignRight; } //!< \todo change this property name
43 +QFont font() const { return fnt; } //!< Gives the font of the text
44 +
45 +public slots:
46 + void setStartingFrame(int n) {startingFrame = n; } //!< Sets the starting frame \todo should be deprecated later
47 +void setFrameDuration(int n) { frameDuration = n;} //!< Sets the frameduration of text filler
48 +void setText(QString s) { text = s; } //!< Sets the text to display. Each line must be separated by a '\'.
49 +void setFont(QFont f) { fnt = f; } //!< Sets the font of the text
50 +void setLineNumber(int n) { lineNumber = n ;} //!< Sets the number of lines to display.
51 +void setAlignRight(bool b) { alignRight = b; } //!< \todo to adjust
52 +void setBackgroundColor(QColor col) { bgColor = col; } //!< Sets the background color
53 +void setForegroundColor(QColor col) { fgColor = col; } //!< Sets the text color.
54 +
55 +protected:
56 +void applyFilter();
57 +
58 +private:
59 +int startingFrame;
60 +int frameDuration;
61 +int lineNumber;
62 +bool alignRight;
63 +QString text;
64 +QColor bgColor;
65 +QColor fgColor;
66 +QFont fnt;
67 +
68 +};
69 +
70 +#endif //__TEXTSCREENFILTER_H__
1 +#include "videodecoder.h"
2 +#include <iostream>
3 +#include "ffmpeglib.h"
4 +
5 +VideoDecoder::VideoDecoder(QObject* parent) : QObject(parent)
6 +{
7 + FFMPegLib::init();
8 + //av_register_all();
9 + threaded = false;
10 + started = false;
11 + loop = false;
12 +
13 + buffer = NULL;
14 + pFrameRGB = NULL;
15 + pFrame = NULL;
16 + pCodecCtx = NULL;
17 + pFormatCtx = NULL;
18 + #if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
19 + img_convert_ctx = NULL;
20 +#endif
21 +}
22 +
23 +
24 +VideoDecoder::~VideoDecoder()
25 +{
26 + // Libre les images RGB
27 + if (buffer)
28 + free(buffer);
29 +
30 + if (pFrameRGB)
31 + av_free(pFrameRGB);
32 +
33 + // Libre l'image YUV
34 + if (pFrame)
35 + av_free(pFrame);
36 +
37 + // Fermeture du codec
38 + if (pCodecCtx)
39 + avcodec_close(pCodecCtx);
40 +
41 + // Fermeture du fichier video
42 + if (pFormatCtx)
43 + av_close_input_file(pFormatCtx);
44 +
45 +#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
46 + if (img_convert_ctx)
47 + sws_freeContext(img_convert_ctx);
48 +#endif
49 +}
50 +
51 +
52 +
53 +void VideoDecoder::initDecoder()
54 +{
55 + // opens video file
56 + if (avformat_open_input(&pFormatCtx, qPrintable(fileName), NULL,NULL) < 0)
57 + return ;
58 +
59 + // looks for information (warning: deprecated!)
60 + if (av_find_stream_info(pFormatCtx) < 0)
61 + return ;
62 + // dumps informations on the file
63 + av_dump_format(pFormatCtx,0,qPrintable(fileName),0);
64 +
65 + // look for video stream
66 + videoStream = av_find_best_stream(pFormatCtx,AVMEDIA_TYPE_VIDEO,-1,-1,&pCodec,0);
67 +
68 + if (videoStream < 0)
69 + return;
70 +
71 + //recovers the codec context
72 + pCodecCtx = pFormatCtx->streams[videoStream]->codec;
73 +
74 + if (avcodec_open(pCodecCtx,pCodec) < 0)
75 + return;
76 +
77 + // look for the correct framerate
78 + if(pCodecCtx->time_base.num>1000 && pCodecCtx->time_base.den ==1)
79 + pCodecCtx->time_base.den =1000;
80 +
81 + // allocate video image
82 + pFrame=avcodec_alloc_frame();
83 +
84 + // allocate resulting video image
85 + pFrameRGB=avcodec_alloc_frame();
86 + if(pFrameRGB==NULL)
87 + return;
88 +
89 + buffer=(uint8_t*)malloc(sizeof(uint8_t) *
90 + avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height));
91 +
92 + // Allocation des parts appropries du buffer pour les images RGB
93 + avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
94 + pCodecCtx->width, pCodecCtx->height);
95 +
96 + totalFrames = -1;
97 + numFrames = 0;
98 +
99 + std::cout << "[VDec] Succeeded in opening video file." << std::endl;
100 +
101 + if (img_convert_ctx == NULL)
102 + img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
103 + pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_RGB24,
104 + SWS_BICUBIC,NULL, NULL, NULL);
105 +
106 +
107 +
108 +/*
109 + Start of very old code
110 + // Ouverture du fichier video
111 + if(av_open_input_file(&pFormatCtx, fileName.ascii(), NULL, 0, NULL)!=0)
112 + return; // Erreur d'ouverture
113 +
114 + // Recupration des informations sur les flux
115 + if(av_find_stream_info(pFormatCtx)<0)
116 + return; // Impossible de rcuprer les informations
117 +
118 + // Dump des informations concernant le fichier video
119 + dump_format(pFormatCtx, 0, fileName, 0);
120 +
121 + // Recherche du premier flux video
122 + videoStream=-1;
123 + for(int i=0; i<pFormatCtx->nb_streams; i++)
124 + {
125 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
126 + if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO)
127 +#else
128 + if(pFormatCtx->streams[i]->codec.codec_type==CODEC_TYPE_VIDEO)
129 +#endif
130 + {
131 + videoStream=i;
132 + break;
133 + }
134 + }
135 + if(videoStream==-1)
136 + return; // Pas de flux video trouv
137 +
138 + // Rcupre le pointeur sur un contexte de codec pour la video.
139 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
140 + pCodecCtx=pFormatCtx->streams[videoStream]->codec;
141 +#else
142 + pCodecCtx=&pFormatCtx->streams[videoStream]->codec;
143 +#endif
144 +
145 + // Trouve le codec associ au fichier video
146 + pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
147 + if(pCodec==NULL)
148 + return; // Codec introuvable
149 +
150 + // Ouvre le codec
151 + if(avcodec_open(pCodecCtx, pCodec)<0)
152 + return; // Imposible d'ouvrir le codec
153 +
154 + // Petit hack pour rcuprer le bon framerate dans le cas o on a quelquechose d'absurde
155 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
156 + if(pCodecCtx->time_base.num>1000 && pCodecCtx->time_base.den ==1)
157 + pCodecCtx->time_base.den =1000;
158 +#else
159 + if(pCodecCtx->frame_rate>1000 && pCodecCtx->frame_rate_base==1)
160 + pCodecCtx->frame_rate_base=1000;
161 +#endif
162 +
163 + // Allocation d'une image video
164 + pFrame=avcodec_alloc_frame();
165 +
166 + // Allocation d'une structure pour les images videos
167 + pFrameRGB=avcodec_alloc_frame();
168 + if(pFrameRGB==NULL)
169 + return;
170 +
171 + // Allocation du buffer ncessaire au stockage des images
172 + buffer=(uint8_t*)malloc(sizeof(uint8_t) *
173 + avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height));
174 +
175 + // Allocation des parts appropries du buffer pour les images RGB
176 + avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
177 + pCodecCtx->width, pCodecCtx->height);
178 +
179 + totalFrames = -1;
180 + numFrames = 0;
181 +
182 + std::cout << "[VDec] Succeeded in opening video file." << std::endl;
183 +
184 +#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
185 + if (img_convert_ctx == NULL)
186 + img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
187 + pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_RGB24,
188 + SWS_BICUBIC,NULL, NULL, NULL);
189 +#endif
190 +
191 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
192 + emit sendFramerate((float)pCodecCtx->time_base.num/(float)pCodecCtx->time_base.den);
193 +#else
194 + emit sendFramerate((float)pCodecCtx->frame_rate/(float)pCodecCtx->frame_rate_base);
195 +#endif
196 + emit sendWidth(pCodecCtx->width);
197 + emit sendHeight(pCodecCtx->height);
198 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
199 + emit sendTotalFrames(pFormatCtx->streams[videoStream]->duration*pCodecCtx->time_base.num/pCodecCtx->time_base.den/AV_TIME_BASE);
200 +#else
201 + emit sendTotalFrames(pFormatCtx->streams[videoStream]->duration*pCodecCtx->frame_rate/pCodecCtx->frame_rate_base/AV_TIME_BASE);
202 +#endif
203 +*/
204 +}
205 +
206 +
207 +void VideoDecoder::start()
208 +{
209 + if (buffer == NULL)
210 + {
211 + std::cerr << "[VDec] Cannot start video decoding" << std::endl;
212 + return;
213 + }
214 +
215 + started = true;
216 + totalFrames = -1;
217 +
218 + if (threaded)
219 + {
220 + connect(&timer,SIGNAL(timeout()), this, SLOT(timeTrigger()));
221 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
222 + timer.start(1000 / pCodecCtx->time_base.num);
223 +#else
224 + timer.start(1000 / pCodecCtx->frame_rate);
225 +#endif
226 + }
227 + else
228 + {
229 + while(totalFrames == -1)
230 + {
231 + queryImage();
232 + }
233 + std::cerr << "[VDec] End of reading." << std::endl;
234 + }
235 +}
236 +
237 +void VideoDecoder::seekFrame(long n)
238 + {
239 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
240 + long time = n * pCodecCtx->time_base.den * AV_TIME_BASE / pCodecCtx->time_base.num;
241 + if (av_seek_frame(pFormatCtx,videoStream, (int64_t)time,0)>0)
242 +#else
243 + long time = n * pCodecCtx->frame_rate_base * AV_TIME_BASE / pCodecCtx->frame_rate;
244 + if (av_seek_frame(pFormatCtx,videoStream, (int64_t)time)>0)
245 +#endif
246 + {
247 + numFrames = n - 1;
248 + queryImage();
249 + }
250 + }
251 +
252 +
253 +void VideoDecoder::queryImage()
254 +{
255 + int frameFinished;
256 + AVPacket packet;
257 + packet.stream_index = ! videoStream ;
258 + if (numFrames%10 == 0)
259 + std::cout << "[VDec] reading frame " << numFrames << std::endl;
260 +
261 + while (packet.stream_index != videoStream)
262 + {
263 + // Dcodage d'une image du flux vido
264 + if (av_read_frame(pFormatCtx, &packet) < 0)
265 + {
266 + totalFrames = numFrames;
267 + if (loop)
268 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
269 + av_seek_frame(pFormatCtx,videoStream,0,0);
270 +#else
271 + av_seek_frame(pFormatCtx,videoStream,0);
272 +#endif
273 + else
274 + {
275 + numFrames = 0;
276 + //totalFrames = -1;
277 + emit finished();
278 + return;
279 + }
280 + }
281 + }
282 +
283 + // Vrification du paquet (est ce bien un paquet video ?)
284 + //if(packet.stream_index==videoStream)
285 + //{
286 + // Dcodage des images video
287 + avcodec_decode_video(pCodecCtx, pFrame, &frameFinished,
288 + packet.data, packet.size);
289 +
290 + // Est-ce que l'image video est rcupre ?
291 + if(frameFinished)
292 + {
293 + // Conversion de l'image au format RGB
294 +#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
295 + sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
296 + pFrameRGB->data, pFrameRGB->linesize);
297 +
298 +#else
299 + img_convert((AVPicture *)pFrameRGB, PIX_FMT_RGB24,
300 + (AVPicture*)pFrame, pCodecCtx->pix_fmt, pCodecCtx->width,
301 + pCodecCtx->height);
302 +#endif
303 +
304 + }
305 + //}
306 +
307 + // Libre le paquet allou par av_read_frame
308 + av_free_packet(&packet);
309 + emit sendFrameNumber(numFrames);
310 +
311 + numFrames++;
312 +
313 + mutex.lock();
314 + emit sendImage(pCodecCtx->width, pCodecCtx->height, (char*)pFrameRGB->data[0]);
315 + mutex.unlock();
316 +}
317 +
318 +
319 +void VideoDecoder::timeTrigger()
320 +{
321 + if (!started)
322 + return;
323 + queryImage();
324 +}
325 +
326 +
327 +void VideoDecoder::stop()
328 +{
329 + started = false;
330 +
331 + if (buffer == NULL)
332 + {
333 + std::cerr << "[VDec] You should start video playing first." << std::endl;
334 + return;
335 + }
336 +
337 + if (threaded)
338 + timer.stop();
339 +
340 + numFrames = 0;
341 + totalFrames = -1;
342 +}
1 +#ifndef __VIDEODECODER_H__
2 +#define __VIDEODECODER_H__
3 +
4 +#include <QObject>
5 +#include <QTimer>
6 +#include <QMutex>
7 +#include <ffmpeg/avcodec.h>
8 +#include <ffmpeg/avformat.h>
9 +
10 +#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
11 +#include <ffmpeg/swscale.h>
12 +#endif
13 +
14 +
15 +/*! \defgroup video Video
16 + * \brief A list of video processing tools to decode, filter and encode video.
17 + *
18 + * This component is based on ffmpeg API. This is a tool to decode video files.
19 + *
20 + */
21 +
22 +
23 +/*! \brief A video decoder component.
24 + *
25 + * This component is relying on ffmpeg library to open and decode video files.
26 + * First, you will give a filename using setFile() then initialize the decoder
27 + * using initDecoder(). Once it is performed, you can either use start() or queryImage().
28 + *
29 + * \author Jean-Yves Didier
30 + * \date September, the 8th, 2006
31 + * \ingroup video
32 + */
33 +class VideoDecoder : public QObject
34 +{
35 +Q_OBJECT
36 + public:
37 +VideoDecoder(QObject* parent=0); //!< ARCS Constructor
38 +~VideoDecoder(); //!< ARCS Destructor
39 +
40 +public slots:
41 + /*! \brief Sets the video file name to decode.
42 + * This is intended for a use as a pre-connection initialization.
43 + * It doesn't perform anything by itself. However, it is needed by
44 + * initDecoder().
45 + * \param s The video file name.
46 + */
47 + void setFile(QString s) { fileName = s ; }
48 +/*! \brief Starts the video decoding
49 + *
50 + * The behaviour of this slot depends on the fact setThreaded() was called or not.
51 + * By default, it will decode all the frames, without caring about the framerate of the video.
52 + * On the contrary, if setThreaded() was called and <tt>true</tt> passed as a parameter,
53 + * The images will be decoded at given times, taking into account framerate.
54 + * Anyway, this slot results in multiple calls of queryImage().
55 + * If setLoop() was enabled, the video will restart to play from the beginning.
56 + */
57 +void start();
58 +
59 +void seekFrame(long n); //!< Seeks the n-th frame in the video.
60 +
61 +void stop(); //!< Stops the video decoding
62 +/*! \brief Performs all the initializations needed to decode video.
63 + *
64 + * When called, this slot triggers several signals with some characteristics of the
65 + * decoded video. It triggers : sendFramerate(), sendWidth(), sendHeight(), sendTotalFrames().
66 + */
67 +void initDecoder();
68 +/*! \brief Decodes an image from the video stream.
69 + *
70 + * Once the image is decoded, the next call to this function will decode the next image
71 + * in the stream. This slot triggers sendFrameNumber() then sendImage() once per call.
72 + * Once images are not available
73 + * anymore, the will result in a finished() signal.
74 + */
75 +void queryImage();
76 +/*! \brief Tells wether the image decoding should be threaded or not.
77 + *
78 + * \param b Set it to <tt>true</tt> if you want a threaded behaviour, <tt>false</tt> otherwise.
79 + * \sa start()
80 + */
81 +void setThreaded(bool b) { threaded = b ;}
82 +/*! \brief Tells wether the video decoding should be looped or not.
83 + *
84 + * \param b Set it to <tt>true</tt> if you want a looped behaviour, <tt>false</tt> otherwise.
85 + * \sa start()
86 + */
87 +void setLoop(bool b) { loop = b; }
88 +
89 +protected slots:
90 + void timeTrigger(); //!< For threading "purpose".
91 +
92 +signals:
93 +/*! \brief Sends a decoded image from the video stream.
94 + *
95 + * The image is in RGB24 format.
96 + * \param w width of the image,
97 + * \param h height of the image,
98 + * \param buffer image buffer in RGB24 pixel packing.
99 + * \sa queryImage()
100 + */
101 +void sendImage(int w,int h, char* buffer);
102 +void sendWidth(int); //!< Sends the video width.
103 +void sendHeight(int);//!< Sends the video heigth.
104 +void sendFramerate(float); //!< Sends the video framerate
105 +void sendFrameNumber(long);//!< Sends the number of the current decoded frame.
106 +void sendTotalFrames(long);//!< Sends the total number of frames stored in video if available.
107 +void finished(); //!< Tells when the video has been entirely decoded
108 +
109 +private:
110 +AVFormatContext *pFormatCtx;
111 +int videoStream;
112 +AVCodecContext *pCodecCtx;
113 +AVCodec *pCodec;
114 +AVFrame *pFrame;
115 +AVFrame *pFrameRGB;
116 +uint8_t *buffer;
117 +
118 +#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
119 + struct SwsContext *img_convert_ctx;
120 +#endif
121 +
122 +QString fileName;
123 +QTimer timer;
124 +QMutex mutex;
125 +bool threaded;
126 +bool started;
127 +bool loop;
128 +
129 +long numFrames;
130 +long totalFrames;
131 +};
132 +
133 +#endif //__VIDEODECODER_H__
1 +#include "videoencoder.h"
2 +#include <iostream>
3 +#include <stdio.h>
4 +#include "ffmpeglib.h"
5 +//#include <string.h>
6 +
7 +
8 +VideoEncoder::VideoEncoder(QObject* parent) : QObject(parent)
9 +{
10 + FFMPegLib::init();
11 +// av_register_all();
12 + width = 320;
13 + height = 240;
14 + framerate=25;
15 + bitrate = 900000;
16 +
17 + closed = true;
18 + started = false;
19 +
20 + pixelFormat = PIX_FMT_RGB24 ;
21 +
22 +#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
23 + img_convert_ctx = NULL;
24 +#endif
25 +}
26 +
27 +VideoEncoder::~VideoEncoder()
28 +{
29 + if (!closed)
30 + close();
31 +}
32 +
33 +
34 +AVStream* VideoEncoder::addVideoStream(AVFormatContext *oc, CodecID codec_id)
35 +{
36 + AVCodecContext *c;
37 + AVStream *st= NULL;
38 +
39 + st = av_new_stream(oc, 0);
40 + if (st == NULL)
41 + return st;
42 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
43 + c = st->codec;
44 +#else
45 + c = &st->codec;
46 +#endif
47 + c->codec_id = codec_id;
48 + c->codec_type = CODEC_TYPE_VIDEO;
49 +
50 + /* put sample parameters */
51 + c->bit_rate = bitrate;
52 + //c->bit_rate_tolerance = 100000;
53 + /* resolution must be a multiple of two */
54 + c->width = width;
55 + c->height = height;
56 + /* time base: this is the fundamental unit of time (in seconds) in terms
57 + of which frame timestamps are represented. for fixed-fps content,
58 + timebase should be 1/framerate and timestamp increments should be
59 + identically 1. */
60 + if ((int)(framerate*1000)/1000 != framerate)
61 + {
62 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
63 + c->time_base.num = 1000;
64 + c->time_base.den = 1000*(int)framerate;
65 +#else
66 + c->frame_rate = 1000*(int)framerate;
67 + c->frame_rate_base = 1000;
68 +#endif
69 + }
70 + else
71 + {
72 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
73 + c->time_base.num = 1;
74 + c->time_base.den = framerate;
75 +#else
76 + c->frame_rate = framerate;
77 + c->frame_rate_base = 1;
78 +#endif
79 + }
80 + c->gop_size = 12; /* emit one intra frame every twelve frames at most */
81 + c->pix_fmt = PIX_FMT_YUV420P;//STREAM_PIX_FMT;
82 + if (c->codec_id == CODEC_ID_MPEG2VIDEO) {
83 + /* just for testing, we also add B frames */
84 + c->max_b_frames = 2;
85 + }
86 + if (c->codec_id == CODEC_ID_MPEG1VIDEO){
87 + /* needed to avoid using macroblocks in which some coeffs overflow
88 + this doesnt happen with normal video, it just happens here as the
89 + motion of the chroma plane doesnt match the luma plane */
90 + c->mb_decision=2;
91 + }
92 + // some formats want stream headers to be seperate
93 + if(!strcmp(oc->oformat->name, "mp4") || !strcmp(oc->oformat->name, "mov") || !strcmp(oc->oformat->name, "3gp"))
94 + c->flags |= CODEC_FLAG_GLOBAL_HEADER;
95 +
96 + return st;
97 +}
98 +
99 +AVFrame *VideoEncoder::allocPicture(int pix_fmt, int width, int height)
100 +{
101 + AVFrame *picture;
102 + uint8_t *picture_buf;
103 + int size;
104 +
105 + picture = avcodec_alloc_frame();
106 + if (!picture)
107 + return NULL;
108 + size = avpicture_get_size(pix_fmt, width, height);
109 + picture_buf = (uint8_t*)malloc(size);
110 + if (!picture_buf) {
111 + av_free(picture);
112 + return NULL;
113 + }
114 + avpicture_fill((AVPicture *)picture, picture_buf,
115 + pix_fmt, width, height);
116 + return picture;
117 +}
118 +
119 +
120 +
121 +int VideoEncoder::openVideo(AVFormatContext *oc, AVStream *st)
122 +{
123 + AVCodec *codec;
124 + AVCodecContext *c;
125 +
126 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
127 + c = st->codec;
128 +#else
129 + c = &st->codec;
130 +#endif
131 +
132 + /* find the video encoder */
133 + codec = avcodec_find_encoder(c->codec_id);
134 + if (!codec)
135 + {
136 + std::cerr << "Couldn't find codec" << std::endl;
137 + return -1;
138 + }
139 +
140 + /* open the codec */
141 + if (avcodec_open(c, codec) < 0)
142 + {
143 + std::cerr << "Couldn't open codec" << std::endl;
144 + return -1;
145 + }
146 +
147 + video_outbuf = NULL;
148 + if (!(oc->oformat->flags & AVFMT_RAWPICTURE))
149 + {
150 + /* allocate output buffer */
151 + /* XXX: API change will be done */
152 + video_outbuf_size = 200000;
153 + video_outbuf = (uint8_t*)malloc(video_outbuf_size);
154 + }
155 +
156 + /* allocate the encoded raw picture */
157 + picture = allocPicture(c->pix_fmt, c->width, c->height);
158 + if (!picture)
159 + {
160 + std::cerr << "Cannot allocate picture " << std::endl;
161 + return -1;
162 + }
163 +
164 + /* if the output format is not YUV420P, then a temporary YUV420P
165 + picture is needed too. It is then converted to the required
166 + output format */
167 + tmp_picture = NULL;
168 + tmp_picture = allocPicture(pixelFormat, c->width, c->height);
169 + if (tmp_picture== NULL)
170 + {
171 + std::cout << " Cannot allocate tmp_picture" << std::endl;
172 + return -1;
173 + }
174 +
175 + // creating sws context
176 +#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
177 + if (img_convert_ctx == NULL)
178 + img_convert_ctx = sws_getContext(c->width, c->height,
179 + c->pix_fmt, c->width, c->height, pixelFormat,
180 + SWS_BICUBIC,NULL, NULL, NULL);
181 +#endif
182 +
183 +
184 + return 0;
185 +}
186 +
187 +void VideoEncoder::initEncoder()
188 +{
189 + if (fileName.isEmpty())
190 + return ;
191 + std::cout << "[VEnc] File name defined." << std::endl;
192 + fmt = guess_format(NULL, fileName.ascii(), NULL);
193 + std::cout << "[VEnc] File name " << fileName.ascii() << std::endl;
194 +
195 + if (fmt == NULL)
196 + {
197 + fmt = guess_format("mpeg", NULL, NULL);
198 + if (fmt == NULL)
199 + {
200 + std::cerr << "[VEnc] Could not guess format" << std::endl;
201 + return;
202 + }
203 + }
204 + std::cout << "[VEnc] Format properly guessed." << std::endl;
205 + if (QString(fmt->extensions) == "avi")
206 + fmt->video_codec = CODEC_ID_MSMPEG4V2 ;
207 +
208 +
209 + oc = av_alloc_format_context();
210 + if (oc == NULL)
211 + return;
212 + oc->oformat = fmt;
213 +
214 +#ifdef WIN32
215 + _snprintf(oc->filename, sizeof(oc->filename), "%s", fileName.ascii());
216 +#else
217 + snprintf(oc->filename, sizeof(oc->filename), "%s", fileName.ascii());
218 +#endif
219 +
220 +
221 + video_st = NULL;
222 +
223 + if (fmt->video_codec != CODEC_ID_NONE)
224 + video_st = addVideoStream(oc, fmt->video_codec);
225 +
226 + if (video_st == NULL)
227 + {
228 + std::cerr << "Failed to create video stream." << std::endl;
229 + return;
230 + }
231 +
232 + if (oc == NULL)
233 + {
234 + std::cerr << "Alerte gnrale" << std::endl;
235 + }
236 +
237 + if (av_set_parameters(oc, NULL) < 0)
238 + return ;
239 +
240 + dump_format(oc, 0, fileName.ascii(), 1);
241 + if ( openVideo(oc, video_st) != 0)
242 + {
243 + std::cerr << "Couldn't open video" << std::endl;
244 + return ;
245 + }
246 + if (!(fmt->flags & AVFMT_NOFILE))
247 + {
248 + if (url_fopen(&oc->pb, fileName.ascii(), URL_WRONLY))
249 + return ;
250 + }
251 +
252 + av_write_header(oc);
253 + started = true;
254 + closed = false;
255 + std::cout << "[VEnc] Encoder properly initialized." << std::endl;
256 +}
257 +
258 +
259 +void VideoEncoder::setImage(int w, int h, char* buffer)
260 +{
261 + if (!started)
262 + return;
263 +
264 + if ((w!=width)||(h!=height))
265 + return;
266 +
267 + int out_size, ret;
268 + AVCodecContext *c;
269 +
270 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
271 + c = video_st->codec;
272 +#else
273 + c = &video_st->codec;
274 +#endif
275 +
276 + if (pixelFormat == PIX_FMT_RGB24)
277 + memcpy( tmp_picture->data[0], (uint8_t*)buffer, width*height*3);
278 + else
279 + memcpy( tmp_picture->data[0], (uint8_t*)buffer, width*height*4);
280 +
281 +// tmp_picture->data[0] = (uint8_t*)buffer;
282 +
283 +#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
284 + sws_scale(img_convert_ctx, tmp_picture->data, tmp_picture->linesize, 0, c->height,
285 + picture->data, picture->linesize);
286 +
287 +#else
288 + img_convert((AVPicture *)picture, c->pix_fmt,
289 + (AVPicture *)tmp_picture, pixelFormat,
290 + c->width, c->height);
291 +#endif
292 +
293 + if (oc->oformat->flags & AVFMT_RAWPICTURE)
294 + {
295 + /* raw video case. The API will change slightly in the near
296 + futur for that */
297 + AVPacket pkt;
298 + av_init_packet(&pkt);
299 +
300 + pkt.flags |= PKT_FLAG_KEY;
301 + pkt.stream_index= video_st->index;
302 + pkt.data= (uint8_t *)picture;
303 + pkt.size= sizeof(AVPicture);
304 +
305 + ret = av_write_frame(oc, &pkt);
306 + }
307 + else
308 + {
309 + /* encode the image */
310 + out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
311 + /* if zero size, it means the image was buffered */
312 + if (out_size > 0)
313 + {
314 + AVPacket pkt;
315 + av_init_packet(&pkt);
316 +
317 + pkt.pts= c->coded_frame->pts;//av_rescale_q(c->coded_frame->pts, c->frame_rate_base, video_st->frame_rate_base);
318 + if(c->coded_frame->key_frame)
319 + pkt.flags |= PKT_FLAG_KEY;
320 + pkt.stream_index= video_st->index;
321 + pkt.data= video_outbuf;
322 + pkt.size= out_size;
323 +
324 + /* write the compressed frame in the media file */
325 + ret = av_write_frame(oc, &pkt);
326 + }
327 + else
328 + {
329 + ret = 0;
330 + }
331 + }
332 +
333 + if (ret != 0)
334 + return ;
335 +}
336 +
337 +
338 +
339 +
340 +void VideoEncoder::close()
341 +{
342 + if (closed)
343 + return;
344 +#if ( LIBAVFORMAT_VERSION_INT > 5000)
345 + avcodec_close(video_st->codec);
346 +#else
347 + avcodec_close(&video_st->codec);
348 +#endif
349 + av_free(picture->data[0]);
350 + av_free(picture);
351 + if (tmp_picture)
352 + {
353 + av_free(tmp_picture->data[0]);
354 + av_free(tmp_picture);
355 + }
356 + av_free(video_outbuf);
357 +
358 + av_write_trailer(oc);
359 +
360 + for(int i = 0; i < oc->nb_streams; i++)
361 + av_freep(&oc->streams[i]);
362 +
363 + if (!(fmt->flags & AVFMT_NOFILE))
364 +#if ( LIBAVFORMAT_VERSION_INT >= 52<<16 )
365 + url_fclose(oc->pb);
366 +#else
367 + url_fclose(&oc->pb);
368 +#endif
369 +
370 + av_free(oc);
371 +
372 +#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
373 + if (img_convert_ctx)
374 + sws_freeContext(img_convert_ctx);
375 +#endif
376 +
377 + closed = true;
378 +}
1 +#ifndef __VIDEOENCODER_H__
2 +#define __VIDEOENCODER_H__
3 +
4 +#include <QObject>
5 +#include <libavcodec/avcodec.h>
6 +#include <libavformat/avformat.h>
7 +
8 +#include <libswscale/swscale.h>
9 +
10 +
11 +/*! \brief A video encoder component.
12 + *
13 + * This component is relying on ffmpeg library to open and encode video files.
14 + * First, you will give a filename using setFile() then initialize the encoder
15 + * using initEncoder(). Once it is performed, you can feed the encoder with frames
16 + * by the slot setImage(). Once all the frames have been sent, you can close() the
17 + * video stream.
18 + *
19 + * \author Jean-Yves Didier
20 + * \date September, the 8th, 2006
21 + * \ingroup video
22 + */
23 +
24 +class VideoEncoder : public QObject
25 +{
26 +Q_OBJECT
27 + public:
28 +VideoEncoder(QObject* parent=0); //!< ARCS Constructor
29 +~VideoEncoder(); //!< ARCS Destructor
30 +
31 +public slots:
32 + void setFile(QString s) {fileName = s; } //!< Sets the video file in which to encode.
33 +void initEncoder(); //!< Inits the encoder and prepare the video.
34 +/*! \brief Sets the image to encode inside video stream.
35 + *
36 + * The image must be packed in RGB 24 format.
37 + * \param w video width
38 + * \param h video height
39 + * \param buffer image buffer in RGB 24 format.
40 + */
41 +void setImage(int w, int h, char* buffer);
42 +/*! \brief Close the video and write down the video file.
43 + *
44 + * This step is necessary: when closed index informations are written down.
45 + * You will need this later if you want to be able to seek frames in the video.
46 + */
47 +void close();
48 +void setWidth(int w) { width=w; } //!< Sets the video width. Should be called before initEncoder()
49 +void setHeight(int h) { height=h;}//!< Sets the video height. Should be called before initEncoder()
50 +void setFramerate(float f) { framerate=f; }//!< Sets the framerate. Should be called before initEncoder()
51 +void setBitrate(int b) { bitrate = b; } //!< Sets the video bitrate. Should be called before initEncoder()
52 +/*! \brief Set the input image format to RGB32
53 + *
54 + * By default, this encoder is working with RGB24 images
55 + * You must call this before initEncoder()
56 + */
57 +void setRGBA() { pixelFormat = PIX_FMT_RGB32 ;}
58 +
59 +
60 +private:
61 +AVStream* addVideoStream(AVFormatContext *oc, CodecID codec_id);
62 +AVFrame *allocPicture(int pix_fmt, int width, int height);
63 +
64 +int openVideo(AVFormatContext *oc, AVStream *st);
65 +QString fileName;
66 +AVOutputFormat *fmt;
67 +AVFormatContext *oc;
68 +AVStream* video_st;
69 +AVFrame *picture, *tmp_picture;
70 +uint8_t *video_outbuf;
71 +int video_outbuf_size;
72 +
73 +#if ( LIBAVFORMAT_VERSION_INT > 52<<16 )
74 + struct SwsContext *img_convert_ctx;
75 +#endif
76 +
77 +int width;
78 +int height;
79 +float framerate;
80 +int bitrate;
81 +
82 +int pixelFormat;
83 +
84 +bool started;
85 +bool closed;
86 +};
87 +
88 +#endif //__VIDEOENCODER_H__
1 +#include "videofilter.h"
2 +
3 +void VideoFilter::setImage(int w, int h, char* img)
4 +{
5 + imageIn = img;
6 + width = w;
7 + height = h;
8 +
9 + if ( previousSize != width*height*3 )
10 + {
11 + if (imageOut != NULL)
12 + delete[] imageOut;
13 + imageOut = new char[width*height*3];
14 + previousSize = width*height*3 ;
15 + }
16 +
17 + applyFilter();
18 +
19 +
20 + emit sendImage(width,height,imageOut);
21 +}
1 +#ifndef __VIDEOFILTER_H__
2 +#define __VIDEOFILTER_H__
3 +
4 +#include <QObject>
5 +
6 +/*! \brief A simple video filter empty shell to develop many video filters
7 + *
8 + * All video filter may inherit from this class.
9 + * In each child, applyFilter() needs to be reimplemented.
10 + * In some special cases, it may also be the case of setImage().
11 + *
12 + * We recommend to look at the other developped filters.
13 + * At the same time we may add some recommendations about meta-data to append to each filters.
14 + *
15 + * \author Jean-Yves Didier
16 + * \date September, the 8th, 2006
17 + * \ingroup video
18 + */
19 +class VideoFilter : public QObject
20 +{
21 +Q_OBJECT
22 +public:
23 +VideoFilter(QObject* parent=0) : QObject(parent) { imageIn = NULL; imageOut = NULL; previousSize = 0; } //!< ARCS Constructor
24 +
25 +/*! \brief Simple enum for red, green, blue components of a pixel
26 + * \sa imgIn()
27 + * \sa imgOut()
28 + */
29 +enum RGB
30 + {
31 + R = 0, //!< Red component
32 + G = 1, //!< Green component
33 + B = 2, //!< Blue component
34 + };
35 +
36 +public slots:
37 + void setFramerate(float f) { framerate = f; } //!< Sets the framerate of the video.
38 +void setNumFrame(long n) { framenumber = n ; } //!< Sets the framenumber of the video.
39 +/*! \brief Sets the input image to process.
40 + *
41 + * Be aware that the image should be in RGB-24 bits format.
42 + * This member take the input image, allocate in memory, if needed, the output image.
43 + * Then it calls applyFilter(), then sends the output image using signal sendImage().
44 + * \param w image width
45 + * \param h image height
46 + * \param imgData raw image data
47 + */
48 +virtual void setImage(int w,int h,char* imgData);
49 +
50 +signals:
51 +void sendImage(int,int,char*); //!< Sends the output image.
52 +
53 +protected:
54 +/*!\brief This virtual function needs to be implemented for each video filter.
55 + *
56 + * Note that for a simple use, there is no need to send output image.
57 + * This member will, in most of the cases, only contain code to transform one input image to one output image.
58 + * See also other protected members of this class to see how to obtain the data you want to process.
59 + * Be aware the bytes in output images are, at the begining, at random state. So you should properly intialize your
60 + * output image if needed.
61 + */
62 +virtual void applyFilter() = 0;
63 +float elapsedTime() { return (float)framenumber/framerate; } //!< Returns the elapsed time since the begining of the video stream.
64 +float frameRate() { return framerate; } //!< Returns the frame rate of the video stream
65 +long frameNumber() { return framenumber; } //!< Gives the frame number in the video stream
66 +int getWidth() { return width; } //!< Gives the width of the video
67 +int getHeight() { return height; } //!< Gives the height of the video
68 +/*! \brief Gives the value of a coloured pixel in image obtained from input.
69 + *
70 + * \param i x coordinate
71 + * \param j y coordinate
72 + * \param k some of the RGB indices
73 + * \return a byte representing a monochrome pixel value
74 + */
75 +char imgIn(int i, int j, RGB k) { return imageIn[(j*width+i)*3+k]; }
76 +/*! \brief Sets the value of a coloured pixel in output image.
77 + *
78 + * \param i x coordinate
79 + * \param j y coordinate
80 + * \param k some of the RGB indices
81 + * \param b a byte representing a monochrome pixel value
82 + */
83 +void imgOut(int i, int j, RGB k, char b) { imageOut[(j*width+i)*3+k] = b; }
84 +/*! \brief Tests wether a point is really in the video images or not.
85 + *
86 + * \param i x coordinate
87 + * \param j y coordinate
88 + * \return <tt>true</tt> if inside image, <tt>false</tt> otherwise
89 + */
90 +bool safeIndex(int i , int j) { return ((i < width)&&(i >= 0)&&(j<height)&&(j>=0)); }
91 +void duplicateImage() { memcpy(imageOut,imageIn,previousSize); } //!< Duplicate input image inside output image.
92 +char* getImageOut() { return imageOut; } //!< Gives the pointer on output image raw data
93 +
94 +private:
95 +float framerate;
96 +long framenumber;
97 +int width;
98 +int height;
99 +char* imageIn;
100 +char* imageOut;
101 +int previousSize;
102 +
103 +};
104 +
105 +#endif //__VIDEOFILTER_H__
1 +#include "videoglviewer.h"
2 +#include <GL/gl.h>
3 +#include <GL/glu.h>
4 +#include <iostream>
5 +#include <string.h>
6 +
7 +using namespace std;
8 +
9 +VideoGLViewer::VideoGLViewer(QObject* obj, const char* name) :
10 + QGLWidget(NULL, name)
11 +{
12 + makeCurrent();
13 + QGLFormat glfmt = context()->format();
14 + image = NULL;
15 + resize(320,200);
16 +}
17 +
18 +void VideoGLViewer::setImage(int w, int h,char* buffer)
19 +{
20 + makeCurrent();
21 +
22 + if ( (w != wth) && ( h != hht))
23 + if (image != NULL)
24 + {
25 + delete[] image;
26 + image = NULL;
27 + }
28 +
29 + if (image == NULL)
30 + image = new char[w*h*3];
31 +
32 + memcpy(image, buffer, w*h*3);
33 + wth = w;
34 + hht = h;
35 + updateGL();
36 + //paintGL();
37 +}
38 +
39 +
40 +void VideoGLViewer::initializeGL()
41 +{
42 + glClearColor(1.0,1.0,1.0,0.0);
43 +}
44 +
45 +void VideoGLViewer::resizeGL(int w, int h)
46 +{
47 + std::cout << "Call to resize" << std::endl;
48 + glViewport(0,0,w,h);
49 + glMatrixMode(GL_PROJECTION);
50 + glLoadIdentity();
51 + gluOrtho2D(0,w,0,h);
52 +}
53 +
54 +
55 +void VideoGLViewer::paintGL()
56 +{
57 + glMatrixMode(GL_MODELVIEW);
58 + glClear(GL_COLOR_BUFFER_BIT);
59 +
60 +
61 + if (image != NULL)
62 + {
63 + glRasterPos3f(0.0,hht-0.001,-0.5);
64 + glPixelZoom(1.0,-1.0);
65 + glDrawPixels(wth, hht, GL_RGB, GL_UNSIGNED_BYTE, image);
66 + }
67 +
68 + swapBuffers();
69 +}
70 +
1 +#ifndef __VIDEOGLVIEWER_H__
2 +#define __VIDEOGLVIEWER_H__
3 +
4 +#include <opencv/cv.h>
5 +#include <qgl.h>
6 +#include <QString>
7 +#include <QEvent>
8 +#include <QImage>
9 +//#include <qstatusbar.h>
10 +
11 +/*! \brief A component to display and select points on an image.
12 + *
13 + * This component can work in selection point mode or not.
14 + * Selection point mode allows the user to select a certain amount
15 + * of points (given by setPointNumber()). Each time a point is selected
16 + * by the user a signal give2DPoint() is emitted giving the 2D location
17 + * of the selected point. If the required number of points is reached,
18 + * it will then emit a signal give2Dpoints() giving the whole set of
19 + * selected points. Then a last signal is emitted which clicked(), indicating
20 + * the user has selected something on the image.
21 + *
22 + * This component can also display some points (using give2Dpoints()) during
23 + * a certain amount of time (set by setDelay()).
24 + *
25 + * \author Jean-Yves Didier
26 + * \date December, the 8th, 2004
27 + * \ingroup opencv
28 + */
29 +class VideoGLViewer : public QGLWidget
30 +{
31 + Q_OBJECT
32 +
33 +public:
34 + VideoGLViewer(QObject* obj=0);//!< ARCS Constructor
35 +
36 +public slots:
37 +/*! \brief Sets the image on which points will be selected.
38 + * \param img OpenCV image to display.
39 + */
40 +void setImage(int w, int h, char* buffer);
41 +void setHeight(int h) { hht = h; resize(width(), h);}
42 +void setWidth(int w) { wth = w; resize(w,height() );}
43 +
44 +
45 +protected:
46 +void initializeGL();
47 +void resizeGL(int w, int h);
48 +void paintGL();
49 +void drawImage();
50 +
51 +
52 +private:
53 +char* image;
54 +int wth;
55 +int hht;
56 +
57 +};
58 +
59 +#endif //__VIDEOGLVIEWER_H__
1 +#include "videoviewer.h"
2 +
3 +void VideoViewer::setImage(int w,int h, char* buffer)
4 +{
5 + img = QImage(w,h,32);
6 +
7 + for (int j=0; j < h ; j++)
8 + for (int i=0; i < w; i++)
9 + {
10 + img.setPixel(i,j,qRgb(buffer[(j*w+i)*3],buffer[(j*w+i)*3+1],buffer[(j*w+i)*3+2]));
11 + }
12 +// update();
13 + repaint(0,0,width(),height(),false);
14 +}
1 +#ifndef __VIDEOVIEWER_H__
2 +#define __VIDEOVIEWER_H__
3 +
4 +#include <Q3Frame>
5 +#include <QImage>
6 +#include <QPainter>
7 +/*#include <qgl.h>
8 +#include <GL/glu.h>
9 +#include <GL/gl.h>*/
10 +
11 +class VideoViewer : public Q3Frame
12 +{
13 +Q_OBJECT
14 +
15 +public:
16 +VideoViewer(QObject* parent=0) : //QGLWidget(NULL, name) {}
17 + Q3Frame(NULL, name,Qt::WNoAutoErase ) {} //!< ARCS Constructor
18 +
19 +public slots:
20 + void setImage(int w, int h, char* buffer);
21 +void setWidth(int w) { resize(w, height()); }
22 +void setHeight(int h) { resize(width(), h); }
23 +
24 +protected:
25 +/*void resizeGL(int width, int height)
26 +{
27 + glViewport(0,width,0,height);*/
28 +
29 +
30 +void drawContents(QPainter* p) {
31 +p->drawImage(0,0,img);
32 +p->flush();
33 +}
34 +
35 +private:
36 +QImage img;
37 +};
38 +
39 +
40 +#endif //__VIDEOVIEWER_H__
1 +<application>
2 + <defines/>
3 + <libraries>
4 + <library name="../libs/libarcsffmpeg.so" />
5 + </libraries>
6 + <objects>
7 + <object classname="VideoViewer" persistent="true" id="vw" />
8 + <object classname="VideoDecoder" persistent="true" id="vd" />
9 + </objects>
10 + <sheets>
11 + <sheet id="start" >
12 + <preconnection>
13 + <init value="sample.avi" type="string" slot="setFile(QString)" object="vd" />
14 + <init value="" type="void" slot="show()" object="vw" />
15 + <init value="true" type="bool" slot="setThreaded(bool)" object="vd" />
16 + </preconnection>
17 + <connection>
18 + <wire objsource="vd" objdest="vw" signal="sendHeight(int)" slot="setHeight(int)" />
19 + <wire objsource="vd" objdest="vw" signal="sendWidth(int)" slot="setWidth(int)" />
20 + <wire objsource="vd" objdest="vw" signal="sendImage(int,int,char*)" slot="setImage(int,int,char*)" />
21 + </connection>
22 + <postconnection>
23 + <init value="" type="void" slot="initDecoder()" object="vd" />
24 + <init value="" type="void" slot="start()" object="vd" />
25 + </postconnection>
26 + </sheet>
27 + </sheets>
28 + <statemachine terminal="end" initial="start" />
29 +</application>