gnash-commit
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[Gnash-commit] gnash ChangeLog libbase/embedVideoDecoderFfmpeg...


From: Bastiaan Jacques
Subject: [Gnash-commit] gnash ChangeLog libbase/embedVideoDecoderFfmpeg...
Date: Thu, 31 May 2007 01:52:13 +0000

CVSROOT:        /sources/gnash
Module name:    gnash
Changes by:     Bastiaan Jacques <bjacques>     07/05/31 01:52:13

Modified files:
        .              : ChangeLog 
        libbase        : embedVideoDecoderFfmpeg.cpp 
        server/asobj   : NetStreamFfmpeg.cpp 

Log message:
        Use libswscale instead of
        img_convert for YUV->RGB conversion, because it is deprecated and
        has been removed from recent ffmpeg.

CVSWeb URLs:
http://cvs.savannah.gnu.org/viewcvs/gnash/ChangeLog?cvsroot=gnash&r1=1.3416&r2=1.3417
http://cvs.savannah.gnu.org/viewcvs/gnash/libbase/embedVideoDecoderFfmpeg.cpp?cvsroot=gnash&r1=1.8&r2=1.9
http://cvs.savannah.gnu.org/viewcvs/gnash/server/asobj/NetStreamFfmpeg.cpp?cvsroot=gnash&r1=1.74&r2=1.75

Patches:
Index: ChangeLog
===================================================================
RCS file: /sources/gnash/gnash/ChangeLog,v
retrieving revision 1.3416
retrieving revision 1.3417
diff -u -b -r1.3416 -r1.3417
--- ChangeLog   30 May 2007 17:17:59 -0000      1.3416
+++ ChangeLog   31 May 2007 01:52:12 -0000      1.3417
@@ -1,3 +1,10 @@
+2007-05-31 Bastiaan Jacques <address@hidden>
+
+       * server/asobj/NetStreamFfmpeg.cpp, 
+       libbase/embedVideoDecoderFfmpeg.cpp: Use libswscale instead of
+       img_convert for YUV->RGB conversion, because it is deprecated and
+       has been removed from recent ffmpeg.
+
 2007-05-30 Sandro Santilli <address@hidden>
 
        * configure.ac: add missing quote in martin's commit (no ChangeLog

Index: libbase/embedVideoDecoderFfmpeg.cpp
===================================================================
RCS file: /sources/gnash/gnash/libbase/embedVideoDecoderFfmpeg.cpp,v
retrieving revision 1.8
retrieving revision 1.9
diff -u -b -r1.8 -r1.9
--- libbase/embedVideoDecoderFfmpeg.cpp 28 May 2007 15:41:01 -0000      1.8
+++ libbase/embedVideoDecoderFfmpeg.cpp 31 May 2007 01:52:13 -0000      1.9
@@ -24,6 +24,8 @@
 #include <cstring>
 
 #include "embedVideoDecoderFfmpeg.h"
+#include <ffmpeg/swscale.h>
+#include <boost/scoped_array.hpp>
 
 embedVideoDecoderFfmpeg::embedVideoDecoderFfmpeg() :
        codec(NULL),
@@ -85,6 +87,59 @@
        }
 }
 
+// FIXME: This function (and a lot of other code in this file) is
+//        duplicated in NetStreamFfmpeg.
+
+/// Convert the given srcFrame to RGB24 pixel format.
+//
+/// @param srcCtx The codec context with which srcFrame is associated.
+/// @param srcFrame The source frame to convert. The data and linesize members
+///                 of srcFrame will be changed to match the conversion.
+/// @return A pointer to the newly allocated and freshly converted video data.
+///         The caller owns the pointer! It must be freed with delete [] when
+///        the frame has been processed.
+uint8_t*
+convertRGB24(AVCodecContext* srcCtx, AVFrame* srcFrame)
+{
+       static SwsContext* context = NULL;
+       int width = srcCtx->width, height = srcCtx->height;
+
+       if (!context) {
+               context = sws_getContext(width, height, srcCtx->pix_fmt,
+                                        width, height, PIX_FMT_RGB24,
+                                        SWS_FAST_BILINEAR, NULL, NULL, NULL);
+               if (!context) {
+                       return NULL;
+               }
+       }
+
+       int bufsize = avpicture_get_size(PIX_FMT_RGB24, width, height);
+       if (bufsize == -1) {
+               return NULL;
+       }
+
+       uint8_t* buffer = new uint8_t[bufsize];
+       if (!buffer) {
+               return NULL;
+       }
+
+       AVPicture picture;
+
+       avpicture_fill(&picture, buffer, PIX_FMT_RGB24, width, height);
+
+
+       int rv = sws_scale(context, srcFrame->data, srcFrame->linesize, 0, 
+                          width, picture.data, picture.linesize);
+       if (rv == -1) {
+               delete [] buffer;
+               return NULL;
+       }
+
+       srcFrame->linesize[0] = picture.linesize[0];
+       srcFrame->data[0] = picture.data[0];
+
+       return buffer;
+}
 
 // gnash calls this when it wants you to decode the given videoframe
 image::image_base*
@@ -100,7 +155,7 @@
        avcodec_decode_video(cc, frame, &got, data, size);
 
        if (got) {
-               uint8_t *buffer = NULL;
+               boost::scoped_array<uint8_t> buffer;
 
                if (outputFormat == NONE) { // NullGui?
                        av_free(frame);
@@ -111,13 +166,7 @@
                        //img_convert((AVPicture*) pFrameYUV, PIX_FMT_YUV420P, 
(AVPicture*) pFrame, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height);
 
                } else if (outputFormat == RGB && cc->pix_fmt != PIX_FMT_RGB24) 
{
-                       AVFrame* frameRGB = avcodec_alloc_frame();
-                       unsigned int numBytes = 
avpicture_get_size(PIX_FMT_RGB24, cc->width, cc->height);
-                       buffer = new uint8_t[numBytes];
-                       avpicture_fill((AVPicture *)frameRGB, buffer, 
PIX_FMT_RGB24, cc->width, cc->height);
-                       img_convert((AVPicture*) frameRGB, PIX_FMT_RGB24, 
(AVPicture*) frame, cc->pix_fmt, cc->width, cc->height);
-                       av_free(frame);
-                       frame = frameRGB;
+                       buffer.reset(convertRGB24(cc, frame));
                }
 
                if (outputFormat == YUV) {
@@ -149,7 +198,6 @@
                                }
                        }
                }
-               delete [] buffer;
        } else {
                return decodedFrame;
        }

Index: server/asobj/NetStreamFfmpeg.cpp
===================================================================
RCS file: /sources/gnash/gnash/server/asobj/NetStreamFfmpeg.cpp,v
retrieving revision 1.74
retrieving revision 1.75
diff -u -b -r1.74 -r1.75
--- server/asobj/NetStreamFfmpeg.cpp    30 May 2007 12:48:21 -0000      1.74
+++ server/asobj/NetStreamFfmpeg.cpp    31 May 2007 01:52:13 -0000      1.75
@@ -17,7 +17,7 @@
 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
 //
 
-/* $Id: NetStreamFfmpeg.cpp,v 1.74 2007/05/30 12:48:21 strk Exp $ */
+/* $Id: NetStreamFfmpeg.cpp,v 1.75 2007/05/31 01:52:13 bjacques Exp $ */
 
 #ifdef HAVE_CONFIG_H
 #include "config.h"
@@ -35,6 +35,8 @@
 #include "sound_handler.h"
 //#include "action.h"
 #include <boost/scoped_array.hpp>
+#include <ffmpeg/swscale.h>
+
 
 #if defined(_WIN32) || defined(WIN32)
 # include <windows.h>  // for sleep()
@@ -862,6 +864,60 @@
        return true;
 }
 
+// FIXME: This function (and a lot of other code in this file) is
+//        duplicated in embedVideoDecoderFfmpeg.
+
+/// Convert the given srcFrame to RGB24 pixel format.
+//
+/// @param srcCtx The codec context with which srcFrame is associated.
+/// @param srcFrame The source frame to convert. The data and linesize members
+///                 of srcFrame will be changed to match the conversion.
+/// @return A pointer to the newly allocated and freshly converted video data.
+///         The caller owns the pointer! It must be freed with delete [] when
+///        the frame has been processed.
+uint8_t*
+convertRGB24(AVCodecContext* srcCtx, AVFrame* srcFrame)
+{
+       static SwsContext* context = NULL;
+       int width = srcCtx->width, height = srcCtx->height;
+
+       if (!context) {
+               context = sws_getContext(width, height, srcCtx->pix_fmt,
+                                        width, height, PIX_FMT_RGB24,
+                                        SWS_FAST_BILINEAR, NULL, NULL, NULL);
+               if (!context) {
+                       return NULL;
+               }
+       }
+
+       int bufsize = avpicture_get_size(PIX_FMT_RGB24, width, height);
+       if (bufsize == -1) {
+               return NULL;
+       }
+
+       uint8_t* buffer = new uint8_t[bufsize];
+       if (!buffer) {
+               return NULL;
+       }
+
+       AVPicture picture;
+
+       avpicture_fill(&picture, buffer, PIX_FMT_RGB24, width, height);
+
+
+       int rv = sws_scale(context, srcFrame->data, srcFrame->linesize, 0, 
+                          width, picture.data, picture.linesize);
+       if (rv == -1) {
+               delete [] buffer;
+               return NULL;
+       }
+
+       srcFrame->linesize[0] = picture.linesize[0];
+       srcFrame->data[0] = picture.data[0];
+
+       return buffer;
+}
+
 bool NetStreamFfmpeg::decodeVideo(AVPacket* packet)
 {
        if (!m_VCodecCtx) return false;
@@ -887,13 +943,7 @@
                        //img_convert((AVPicture*) pFrameYUV, PIX_FMT_YUV420P, 
(AVPicture*) pFrame, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height);
 
                } else if (m_videoFrameFormat == render::RGB && 
m_VCodecCtx->pix_fmt != PIX_FMT_RGB24) {
-                       AVFrame* frameRGB = avcodec_alloc_frame();
-                       unsigned int numBytes = 
avpicture_get_size(PIX_FMT_RGB24, m_VCodecCtx->width, m_VCodecCtx->height);
-                       buffer.reset(new uint8_t[numBytes]);
-                       avpicture_fill((AVPicture *)frameRGB, buffer.get(), 
PIX_FMT_RGB24, m_VCodecCtx->width, m_VCodecCtx->height);
-                       img_convert((AVPicture*) frameRGB, PIX_FMT_RGB24, 
(AVPicture*) m_Frame, m_VCodecCtx->pix_fmt, m_VCodecCtx->width, 
m_VCodecCtx->height);
-                       av_free(m_Frame);
-                       m_Frame = frameRGB;
+                       buffer.reset(convertRGB24(m_VCodecCtx, m_Frame));
                }
 
                raw_mediadata_t* video = new raw_mediadata_t;




reply via email to

[Prev in Thread] Current Thread [Next in Thread]