File kmediafactory-ffmpeg.patch of Package kmediafactory

--- cmake/modules/FindFfmpeg.cmake	2011-08-01 09:47:08.000000000 +0200
+++ cmake/modules/FindFfmpeg.cmake	2017-04-08 11:39:29.055071184 +0200
@@ -8,26 +8,28 @@
 #  FFMPEG_SWSCALE_FOUND  - FFMPEG also has SWSCALE
 #   
 
-SET( FFMPEG_HEADERS avformat.h avcodec.h avutil.h )
-SET( FFMPEG_PATH_SUFFIXES libavformat libavcodec libavutil )
+SET( FFMPEG_HEADERS avcodec.h avfilter.h avformat.h avutil.h )
+SET( FFMPEG_PATH_SUFFIXES libavcodec libavfilter libavformat libavutil )
 SET( FFMPEG_SWS_HEADERS swscale.h )
 SET( FFMPEG_SWS_PATH_SUFFIXES libswscale )
-
-SET( FFMPEG_LIBRARIES avformat avcodec avutil )
+SET( FFMPEG_LIBRARIES avcodec avfilter avformat avutil )
 SET( FFMPEG_SWS_LIBRARIES swscale )
 INCLUDE(FindPkgConfig)
 if ( PKG_CONFIG_FOUND )
-    pkg_check_modules( AVFORMAT libavformat )
     pkg_check_modules( AVCODEC libavcodec )
+    pkg_check_modules( AVFILTER libavfilter )
+    pkg_check_modules( AVFORMAT libavformat )
     pkg_check_modules( AVUTIL libavutil )
     pkg_check_modules( SWSCALE libswscale )
 endif ( PKG_CONFIG_FOUND )
 
-SET( FFMPEG_LIBRARY_DIR   ${AVFORMAT_LIBRARY_DIRS}
-                            ${AVCODEC_LIBRARY_DIRS}
+SET( FFMPEG_LIBRARY_DIR     ${AVCODEC_LIBRARY_DIRS}
+                            ${AVFILTER_LIBRARY_DIRS}
+                            ${AVFORMAT_LIBRARY_DIRS}
                             ${AVUTIL_LIBRARY_DIRS} )
-SET( FFMPEG_INCLUDE_PATHS ${AVFORMAT_INCLUDE_DIRS}
-                            ${AVCODEC_INCLUDE_DIRS}
+SET( FFMPEG_INCLUDE_PATHS   ${AVCODEC_INCLUDE_DIRS}
+                            ${AVFILTER_INCLUDE_DIRS}
+                            ${AVFORMAT_INCLUDE_DIRS}
                             ${AVUTIL_INCLUDE_DIRS} )
 
 # add in swscale if found
--- lib/videofile.h	2011-08-01 09:47:08.000000000 +0200
+++ lib/videofile.h	2017-04-08 12:28:50.423114180 +0200
@@ -25,11 +25,17 @@
 #include <inttypes.h>
 #include <QtGui/QImage>
 #include <QtCore/QString>
+extern "C" {
+#include <libavformat/avformat.h>
+#include <libavfilter/buffersink.h>
+}
 
 struct AVFormatContext;
 struct AVCodecContext;
 struct AVCodec;
 struct AVStream;
+struct AVFilterContext;
+struct AVFilterGraph;
 struct AVFrame;
 struct AVPacket;
 
@@ -63,18 +69,28 @@
     bool decodeVideoPacket();
     bool getVideoPacket();
     bool convertFrame();
-
+    void delete_filter_graph();
+    int  init_filter_graph(enum AVPixelFormat pixelformat, int width, int height);
+    int  process_filter_graph(AVPicture *destination, const AVPicture *source,
+			      enum AVPixelFormat pixelformat, int width, int height);
+    
     private:
 
-    int             videoStreamId;
-    AVFormatContext *avFromatContext;
-    AVCodecContext  *avVideoCodecContext;
-    AVCodec         *avVideoCodec;
-    AVStream        *avVideoStream;
-    AVFrame         *avFrame;
-    uint8_t         *frameBuffer;
-    AVPacket        *avPacket;
-    int             numAudioStreams;
+    int                videoStreamId;
+    AVFormatContext    *avFromatContext;
+    AVCodecContext     *avVideoCodecContext;
+    AVCodec            *avVideoCodec;
+    AVStream           *avVideoStream;
+    AVFilterContext    *avBufferSinkContext;
+    AVFilterContext    *avBufferSourceContext;
+    AVFilterGraph      *avFilterGraph;
+    int                frameWidth;
+    int                frameHeight;
+    enum AVPixelFormat avPixelFormat;
+    AVFrame            *avFrame;
+    uint8_t            *frameBuffer;
+    AVPacket           *avPacket;
+    int                numAudioStreams;
 };
 
 #endif
--- lib/videofile.cpp	2011-08-01 09:47:08.000000000 +0200
+++ lib/videofile.cpp	2017-04-08 12:29:48.683115740 +0200
@@ -30,7 +30,8 @@
 
 extern "C" {
 #include <libavcodec/avcodec.h>
-#include <libavformat/avformat.h>
+#include <libavfilter/avfilter.h>
+#include <libavfilter/buffersrc.h>
 #include <libavutil/log.h>
 #include <libswscale/swscale.h>
 }
@@ -41,6 +42,12 @@
          , avVideoCodecContext(0L)
          , avVideoCodec(0L)
          , avVideoStream(0L)
+         , avBufferSinkContext(0L)
+         , avBufferSourceContext(0L)
+         , avFilterGraph(0L)
+         , frameWidth(0)
+	 , frameHeight(0)
+         , avPixelFormat((AVPixelFormat) -1)
          , avFrame(0L)
          , frameBuffer(0L)
          , avPacket(0L)
@@ -58,7 +65,6 @@
     static bool init=false;
     if(!init) {
         av_register_all();
-        avcodec_init();
         avcodec_register_all();
         av_log_set_level(0);
         init=true;
@@ -66,13 +72,13 @@
 
     close();
 
-    if ( av_open_input_file(&avFromatContext, QFile::encodeName(filename).constData(), 0L, 0, 0L) != 0 ||
-         av_find_stream_info(avFromatContext) < 0) {
+    if ( avformat_open_input(&avFromatContext, QFile::encodeName(filename).constData(), NULL, NULL) != 0 ||
+         avformat_find_stream_info(avFromatContext, NULL) < 0) {
         close();
     }
     else {
         if(initialize()) {
-            avFrame = avcodec_alloc_frame();
+            avFrame = av_frame_alloc();
             return decodeVideoFrame();
         }
     }
@@ -88,7 +94,7 @@
     }
 
     if (avFromatContext) {
-        av_close_input_file(avFromatContext);
+        avformat_close_input(&avFromatContext);
         avFromatContext = 0L;
     }
 
@@ -144,7 +150,7 @@
 
     avVideoCodecContext->workaround_bugs = 1;
 
-    return avcodec_open(avVideoCodecContext, avVideoCodec)>=0;
+    return avcodec_open2(avVideoCodecContext, avVideoCodec, NULL)>=0;
 }
 
 int VideoFile::getWidth()
@@ -241,7 +247,7 @@
         return false;
     }
 
-    avcodec_get_frame_defaults(avFrame);
+    av_frame_unref(avFrame);
     
     int frameFinished;
     
@@ -278,10 +284,90 @@
     return frameDecoded;
 }
 
+void VideoFile::delete_filter_graph() {
+    if (avFilterGraph) {
+        av_frame_free(&avFrame);
+        avfilter_graph_free(&avFilterGraph);
+    }
+}
+
+int VideoFile::init_filter_graph(enum AVPixelFormat pixelFormat, int width, int height) {
+    AVFilterInOut *inputs = NULL, *outputs = NULL;
+    char args[512];
+    int res;
+
+    delete_filter_graph();
+    avFilterGraph = avfilter_graph_alloc();
+    snprintf(args, sizeof(args),
+             "buffer=video_size=%dx%d:pix_fmt=%d:time_base=1/1:pixel_aspect=0/1[in];"
+             "[in]yadif[out];"
+             "[out]buffersink",
+             width, height, pixelFormat);
+
+    res = avfilter_graph_parse2(avFilterGraph, args, &inputs, &outputs);
+    if (res < 0) {
+        return res;
+    }
+    if(inputs || outputs) {
+        return -1;
+    }
+
+    res = avfilter_graph_config(avFilterGraph, NULL);
+    if (res < 0) {
+        return res;
+    }
+
+    avBufferSourceContext = avfilter_graph_get_filter(avFilterGraph, "Parsed_buffer_0");
+    avBufferSinkContext = avfilter_graph_get_filter(avFilterGraph, "Parsed_buffersink_2");
+    if (!avBufferSourceContext || !avBufferSinkContext) {
+        return -1;
+    }
+
+    avFrame = av_frame_alloc();
+    frameWidth = width;
+    frameHeight = height;
+    avPixelFormat = pixelFormat;
+
+    return 0;
+}
+
+int VideoFile::process_filter_graph(AVPicture *pictureDestination, const AVPicture *pictureSource,
+				    enum AVPixelFormat pixelFormat, int width, int height) {
+    int res;
+
+    if (!avFilterGraph || width != frameWidth || height != frameHeight || pixelFormat != avPixelFormat) {
+        res = init_filter_graph(pixelFormat, width, height);
+        if (res < 0) {
+            return res;
+	}
+    }
+
+    memcpy(avFrame->data, pictureSource->data, sizeof(pictureSource->data));
+    memcpy(avFrame->linesize, pictureSource->linesize, sizeof(pictureSource->linesize));
+    avFrame->width = width;
+    avFrame->height = height;
+    avFrame->format = pixelFormat;
+
+    res = av_buffersrc_add_frame(avBufferSourceContext, avFrame);
+    if (res < 0) {
+        return res;
+    }
+
+    res = av_buffersink_get_frame(avBufferSinkContext, avFrame);
+    if (res < 0) {
+        return res;
+    }
+
+    av_picture_copy(pictureDestination, (const AVPicture *) avFrame, pixelFormat, width, height);
+    av_frame_unref(avFrame);
+
+    return 0;
+}
+
 QImage VideoFile::getFrame()
 {
     if (avFrame->interlaced_frame) {
-        avpicture_deinterlace((AVPicture*) avFrame, (AVPicture*) avFrame, avVideoCodecContext->pix_fmt,
+        process_filter_graph((AVPicture*) avFrame, (AVPicture*) avFrame, avVideoCodecContext->pix_fmt,
                               avVideoCodecContext->width, avVideoCodecContext->height);
     }
 
@@ -304,16 +390,16 @@
                height=getHeight();
     SwsContext *scaleContext = sws_getContext(avVideoCodecContext->width, avVideoCodecContext->height,
                                               avVideoCodecContext->pix_fmt, width, height,
-                                              PIX_FMT_RGB24, SWS_BICUBIC, 0L, 0L, 0L);
+                                              AV_PIX_FMT_RGB24, SWS_BICUBIC, 0L, 0L, 0L);
 
     if (0L == scaleContext) {
         return false;
     }
 
-    AVFrame *convertedFrame       = avcodec_alloc_frame();
-    uint8_t *convertedFrameBuffer = reinterpret_cast<uint8_t*>(av_malloc(avpicture_get_size(PIX_FMT_RGB24, width, width)));
+    AVFrame *convertedFrame       = av_frame_alloc();
+    uint8_t *convertedFrameBuffer = reinterpret_cast<uint8_t*>(av_malloc(avpicture_get_size(AV_PIX_FMT_RGB24, width, width)));
 
-    avpicture_fill((AVPicture*) convertedFrame, convertedFrameBuffer, PIX_FMT_RGB24, width, height);
+    avpicture_fill((AVPicture*) convertedFrame, convertedFrameBuffer, AV_PIX_FMT_RGB24, width, height);
     sws_scale(scaleContext, avFrame->data, avFrame->linesize, 0, avVideoCodecContext->height,
               convertedFrame->data, convertedFrame->linesize);
     sws_freeContext(scaleContext);
openSUSE Build Service is sponsored by