gnash-commit
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

Re: [Gnash-commit] /srv/bzr/gnash/trunk r9924: libmedia:


From: Bastiaan Jacques
Subject: Re: [Gnash-commit] /srv/bzr/gnash/trunk r9924: libmedia:
Date: Sun, 5 Oct 2008 01:27:06 -0700 (PDT)
User-agent: Alpine 1.00 (DEB 882 2007-12-20)

Diff too large for email (2294 lines, the limit is 1000).


=== modified file 'libcore/asobj/Makefile.am'
--- libcore/asobj/Makefile.am   2008-09-29 16:02:53 +0000
+++ libcore/asobj/Makefile.am   2008-10-05 00:08:38 +0000
@@ -132,8 +132,6 @@
        Selection.h \
        SharedObject.h \
        Sound.h \
-       SoundFfmpeg.h \
-       SoundGst.h \
        Stage.h \
        System_as.h \
        TextFormat.h \
@@ -179,7 +177,6 @@
        $(LIBXML_LIBS)

 if USE_FFMPEG_ENGINE
-libgnashasobjs_la_SOURCES += SoundFfmpeg.cpp
 AM_CPPFLAGS += $(FFMPEG_CFLAGS) \
                $(SDL_CFLAGS) \
                -I$(top_srcdir)/libmedia/ffmpeg
@@ -188,7 +185,6 @@
 endif

 if USE_GST_ENGINE
-libgnashasobjs_la_SOURCES += SoundGst.cpp
 AM_CPPFLAGS += $(GSTREAMER_CFLAGS) \
                -I$(top_srcdir)/libmedia/gst
 libgnashasobjs_la_LIBADD += $(GSTREAMER_LIBS) \

=== modified file 'libcore/asobj/NetStream.cpp'
--- libcore/asobj/NetStream.cpp 2008-10-01 15:04:46 +0000
+++ libcore/asobj/NetStream.cpp 2008-10-05 00:08:38 +0000
@@ -802,6 +802,9 @@
 NetStream::~NetStream()
 {
        close(); // close will also detach from sound handler
+       if (m_parser.get()) {
+               m_parser->join();
+       }
 }



=== modified file 'libcore/asobj/Sound.cpp'
--- libcore/asobj/Sound.cpp     2008-09-29 18:35:49 +0000
+++ libcore/asobj/Sound.cpp     2008-10-05 00:08:38 +0000
@@ -33,11 +33,10 @@
 #include "Object.h" // for getObjectInterface
 #include "VM.h"

-#ifdef USE_GST
-# include "SoundGst.h"
-#elif defined(USE_FFMPEG)
-# include "SoundFfmpeg.h"
-#endif
+#include "StreamProvider.h"
+
+
+#include "Sound.h"
 #include <string>

 namespace gnash {
@@ -68,8 +67,30 @@
        soundId(-1),
        externalSound(false),
        isStreaming(false),
-       _soundHandler(get_sound_handler())
-{
+       _soundHandler(get_sound_handler()),
+       _mediaHandler(media::MediaHandler::get()),
+       _startTime(0),
+       _leftOverData(),
+       _leftOverPtr(0),
+       _leftOverSize(0),
+       isAttached(false),
+ remainingLoops(0) +{
+}
+
+Sound::~Sound()
+{
+       //GNASH_REPORT_FUNCTION;
+
+       if (isAttached && _soundHandler)
+       {
+               _soundHandler->detach_aux_streamer(this);
+       }
+
+       if (_mediaParser)
+       {
+               _mediaParser->join();
+       }
 }

 void
@@ -88,14 +109,14 @@
 long
 Sound::getBytesLoaded()
 {
-       LOG_ONCE( log_unimpl("Sound.getBytesLoaded() [default impl]") );
+       if ( _mediaParser ) return _mediaParser->getBytesLoaded();
        return 0;
 }

 long
 Sound::getBytesTotal()
 {
-       LOG_ONCE( log_unimpl("Sound.getBytesTotal() [default impl]") );
+       if ( _mediaParser ) return _mediaParser->getBytesTotal();
        return -1;
 }

@@ -158,14 +179,75 @@
 }

 void
-Sound::loadSound(const std::string& file, bool /*streaming*/)
+Sound::loadSound(const std::string& file, bool streaming)
 {
-       log_debug(_("%s is still testing!"), __FUNCTION__);
-
-       if (connection) {
-               log_error(_("%s: This sound already has a connection?  (We try to 
handle this by overriding the old one...)"), __FUNCTION__);
-       }
-       externalURL = file;
+ if ( ! _mediaHandler || ! _soundHandler ) + {
+               log_debug("No media or sound handlers, won't load any sound");
+               return;
+       }
+
+       /// If we are already streaming stop doing so as we'll replace
+       /// the media parser
+       if ( isAttached )
+       {
+               _soundHandler->detach_aux_streamer(this);
+               isAttached = false;
+       }
+
+       /// Delete any media parser being used (make sure we have detached!)
+       if (_mediaParser)
+       {
+               _mediaParser->join();
+       }
+       _mediaParser.reset();
+
+       /// Start at offset 0, in case a previous ::start() call
+       /// changed that.
+       _startTime=0;
+
+       URL url(file, get_base_url());
+       externalURL = url.str(); // what for ? bah!
+
+       StreamProvider& streamProvider = StreamProvider::getDefaultInstance();
+       std::auto_ptr<IOChannel> inputStream( streamProvider.getStream( 
externalURL ) );
+       if ( ! inputStream.get() )
+       {
+               log_error( _("Gnash could not open this url: %s"), url );
+               return;
+       }
+
+       externalSound = true;
+       isStreaming = streaming;
+
+       _mediaParser.reset( 
_mediaHandler->createMediaParser(inputStream).release() );
+       if ( ! _mediaParser )
+       {
+               log_error(_("Unable to create parser for Sound input"));
+               // not necessarely correct, the stream might have been found...
+               return;
+       }
+       _mediaParser->setBufferTime(60000); // one minute buffer... should be 
fine
+
+       media::AudioInfo* audioInfo =  _mediaParser->getAudioInfo();
+       if (!audioInfo) {
+               log_debug("No audio in Sound input");
+               return;
+       }
+
+    try {
+           
_audioDecoder.reset(_mediaHandler->createAudioDecoder(*audioInfo).release());
+       }
+       catch (MediaException& e) {
+        assert(!_audioDecoder.get());
+               log_error(_("Could not create audio decoder: %s"), e.what());
+       }
+
+       // start playing ASAP, a call to ::start will just change _startTime
+       //log_debug("Attaching the aux streamer");
+       _soundHandler->attach_aux_streamer(getAudioWrapper, (void*) this);
+       isAttached = true;
+
 }

 void
@@ -224,66 +306,222 @@
 void
 Sound::start(int offset, int loops)
 {
-    if ( soundId == -1 )
-    {
-        // FIXME: find out what to do here
-        log_error("Sound.start() called against a Sound that has no sound handle 
attached");
-        return;
-    }
-
-    if (_soundHandler)
-    {
-        _soundHandler->play_sound(soundId, loops, offset, 0, NULL);
-    }
+       if ( ! _soundHandler )
+       {
+               log_error("No sound handler, nothing to start...");
+               return;
+       }
+
+       if (externalSound)
+       {
+               if ( ! _mediaParser )
+               {
+                       log_error("No MediaParser initialized, can't start an 
external sound");
+                       return;
+               }
+               if ( ! _audioDecoder )
+               {
+                       log_error("No AudioDecoder initialized, can't start an 
external sound");
+                       return;
+               }
+
+               if (offset > 0)
+               {
+                       _startTime=offset*1000;
+                       boost::uint32_t seekms = boost::uint32_t(offset*1000);
+                       // TODO: boost::mutex::scoped_lock 
parserLock(_parserMutex);
+                       _mediaParser->seek(seekms); // well, we try...
+               }
+
+               // Save how many loops to do (not when streaming)
+               if (! isStreaming && loops > 0)
+               {
+                       remainingLoops = loops;
+               }
+
+ if ( ! isAttached ) + {
+                       _soundHandler->attach_aux_streamer(getAudioWrapper, 
(void*) this);
+                       isAttached = true;
+               }
+       }
+       else
+       {
+               _soundHandler->play_sound(soundId, loops, offset, 0, NULL);
+       }
 }

 void
 Sound::stop(int si)
 {
-    if ( soundId == -1 )
-    {
-        // FIXME: find out what to do here
-        log_error("Sound.stop() called against a Sound that has no sound handle 
attached");
-        return;
-    }
+       if ( ! _soundHandler )
+       {
+               log_error("No sound handler, nothing to stop...");
+               return;
+       }

-       if (_soundHandler)
+       // stop the sound
+       if (si < 0)
        {
-           if (si > -1) {
+               if (externalSound)
+               {
+                       _soundHandler->detach_aux_streamer(this);
+               }
+               else
+               {
                        _soundHandler->stop_sound(soundId);
                }
-               else {
-                       _soundHandler->stop_sound(si);
-               }
+       }
+       else
+       {
+               _soundHandler->stop_sound(si);
        }
 }

 unsigned int
 Sound::getDuration()
 {
-    LOG_ONCE(log_error(_("%s: only works when a media handler is enabled"), 
__FUNCTION__));
+       if ( ! _soundHandler )
+       {
+               log_error("No sound handler, can't check duration...");
+               return 0;
+       }
+
+       // If this is a event sound get the info from the soundhandler
+       if (!externalSound)
+       {
+               return _soundHandler->get_duration(soundId);
+       }
+
+       // If we have a media parser (we'd do for an externalSound)
+       // try fetching duration from it
+       if ( _mediaParser )
+       {
+               media::AudioInfo* info = _mediaParser->getAudioInfo();
+               if ( info )
+               {
+                       return info->duration;
+               }
+       }
+
        return 0;
 }

 unsigned int
 Sound::getPosition()
 {
-    LOG_ONCE(log_error(_("%s: only works when a media handler is enabled"), 
__FUNCTION__));
+       if ( ! _soundHandler )
+       {
+               log_error("No sound handler, can't check position (we're likely not 
playing anyway)...");
+               return 0;
+       }
+
+       // If this is a event sound get the info from the soundhandler
+       if (!externalSound)
+       {
+               return _soundHandler->tell(soundId);
+       }
+
+       if ( _mediaParser )
+       {
+               boost::uint64_t ts;
+               if ( _mediaParser->nextAudioFrameTimestamp(ts) )
+               {
+                       return ts;
+               }
+       }
+
        return 0;
-}
+
+}
+
+
+
+
+
+
+
+
+bool
+Sound::getAudio(boost::uint8_t* stream, int len)
+{
+       //GNASH_REPORT_FUNCTION;
+
+       while (len > 0)
+       {
+               if ( ! _leftOverData )
+               {
+                       bool parsingComplete = 
_mediaParser->parsingCompleted(); // check *before* calling nextAudioFrame
+                       std::auto_ptr<media::EncodedAudioFrame> frame = 
_mediaParser->nextAudioFrame();
+                       if ( ! frame.get() )
+                       {
+                               // just wait some more if parsing isn't 
complete yet
+                               if ( ! parsingComplete )
+                               {
+                                       //log_debug("Parsing not complete and no 
more audio frames in input, try again later");
+                                       break;
+                               }
+
+                               // or detach and stop here...
+                               // (should really honour loopings if any)
+                               //if ( remainingLoops.. )
+                               //log_debug("Parsing complete and no more audio 
frames in input, detaching");
+                               return false; // will detach us (we should 
change isAttached, but need thread safety!)
+                       }
+
+                       // if we've been asked to start at a specific time, skip
+                       // any frame with earlier timestamp
+                       if ( frame->timestamp < _startTime ) continue;
+
+                       _leftOverData.reset( _audioDecoder->decode(*frame, 
_leftOverSize) );
+                       _leftOverPtr = _leftOverData.get();
+                       if ( ! _leftOverData )
+                       {
+                               log_error("No samples decoded from input of %d 
bytes", frame->dataSize);
+                               continue;
+                       }
+               }
+
+               int n = std::min<int>(_leftOverSize, len);
+               memcpy(stream, _leftOverPtr, n);
+               stream += n;
+               _leftOverPtr += n;
+               _leftOverSize -= n;
+               len -= n;
+
+               if (_leftOverSize == 0)
+               {
+                       _leftOverData.reset();
+                       _leftOverPtr = 0;
+               }
+
+       }
+
+       // drop any queued video frame
+       while (_mediaParser->nextVideoFrame().get()) {};
+
+       return true;
+}
+
+// audio callback is running in sound handler thread
+bool
+Sound::getAudioWrapper(void* owner, boost::uint8_t* stream, int len)
+{
+       Sound* so = static_cast<Sound*>(owner);
+       return so->getAudio(stream, len);
+}
+
+
+
+
+
+


 as_value
 sound_new(const fn_call& fn)
 {
-       Sound* sound_obj;
-#ifdef USE_GST
-       sound_obj = new SoundGst();
-#elif defined(USE_FFMPEG)
-       sound_obj = new SoundFfmpeg();
-#else
-       sound_obj = new Sound();
-#endif
+       Sound* sound_obj = new Sound();

     if ( fn.nargs )
     {

=== modified file 'libcore/asobj/Sound.h'
--- libcore/asobj/Sound.h       2008-08-27 11:07:07 +0000
+++ libcore/asobj/Sound.h       2008-10-05 00:08:38 +0000
@@ -32,6 +32,10 @@
 #include "as_object.h" // for inheritance
 #include "NetConnection.h"

+#include "MediaHandler.h"
+#include "MediaParser.h"
+#include "AudioDecoder.h"
+
 #include <boost/scoped_ptr.hpp>

 /// Forward declarations
@@ -58,20 +62,20 @@
        ///
        void attachCharacter(character* attachedChar);

-       virtual ~Sound() {}
-       virtual void attachSound(int si, const std::string& name);
+       ~Sound();
+       void attachSound(int si, const std::string& name);

        /// Get number of bytes loaded from the external sound (if any)
-       virtual long getBytesLoaded();
+       long getBytesLoaded();

        /// Get total number of bytes in the external sound being loaded
        //
        /// @return -1 if unknown
        ///
-       virtual long getBytesTotal();
+       long getBytesTotal();

-       virtual void getPan();
-       virtual void getTransform();
+       void getPan();
+       void getTransform();

        /// Get volume from associated resource
        //
@@ -82,17 +86,17 @@
        bool getVolume(int& volume);
        void setVolume(int volume);

-       virtual void loadSound(const std::string& file, bool streaming);
-       virtual void setPan();
-       virtual void setTransform();
-       virtual void start(int offset, int loops);
-       virtual void stop(int si);
-       virtual unsigned int getDuration();
-       virtual unsigned int getPosition();
-
-       std::string soundName;
-
-protected:
+       void loadSound(const std::string& file, bool streaming);
+       void setPan();
+       void setTransform();
+       void start(int offset, int loops);
+       void stop(int si);
+       unsigned int getDuration();
+       unsigned int getPosition();
+
+ std::string soundName; +
+private:

 #ifdef GNASH_USE_GC
        /// Mark all reachable resources of a Sound, for the GC
@@ -120,6 +124,27 @@
        bool isStreaming;

        media::sound_handler* _soundHandler;
+       media::MediaHandler* _mediaHandler;
+       boost::scoped_ptr<media::MediaParser> _mediaParser;
+       boost::scoped_ptr<media::AudioDecoder> _audioDecoder;
+
+       /// Number of milliseconds into the sound to start it
+       //
+       /// This is set by start()
+       boost::uint64_t _startTime;
+
+       boost::scoped_array<boost::uint8_t> _leftOverData;
+       boost::uint8_t* _leftOverPtr;
+       boost::uint32_t _leftOverSize;
+
+       static bool getAudioWrapper(void *owner, boost::uint8_t *stream, int 
len);
+
+       bool getAudio(boost::uint8_t *stream, int len);
+
+       // Are this sound attached to the soundhandler?
+       bool isAttached;
+
+       int remainingLoops;

 };


=== modified file 'libmedia/Makefile.am'
--- libmedia/Makefile.am        2008-09-29 16:02:53 +0000
+++ libmedia/Makefile.am        2008-10-05 00:08:38 +0000
@@ -99,11 +99,10 @@
                gst/MediaHandlerGst.cpp \
                gst/VideoDecoderGst.cpp \
                gst/AudioDecoderGst.cpp \
+               gst/MediaParserGst.cpp \
                gst/gstappsink.c \
                gst/gstappsrc.c \
-               gst/SoundHandlerGst.cpp \
                gst/GstUtil.cpp \
-               gst/SoundGst.cpp \
                gst/gstflvdemux.c \
                gst/gstflvparse.c \
                gst/gstbuffersrc.c \
@@ -114,11 +113,10 @@
                gst/MediaHandlerGst.h \
                gst/AudioDecoderGst.h \
                gst/VideoDecoderGst.h \
+               gst/MediaParserGst.h \
                gst/gstappsink.h \
                gst/gstappsrc.h \
-               gst/SoundHandlerGst.h \
                gst/GstUtil.h \
-               gst/SoundGst.h \
                gst/gstflvdemux.h \
                gst/gstflvparse.h \
                gst/gstbuffersrc.h \

=== modified file 'libmedia/MediaParser.cpp'
--- libmedia/MediaParser.cpp    2008-10-03 15:49:46 +0000
+++ libmedia/MediaParser.cpp    2008-10-05 00:08:38 +0000
@@ -202,13 +202,20 @@
        return _audioFrames.front();
 }

-MediaParser::~MediaParser()
+void
+MediaParser::join()
 {
        if ( _parserThread.get() )
        {
                requestParserThreadKill();
                _parserThread->join();
+               _parserThread.reset();
        }
+}
+
+MediaParser::~MediaParser()
+{
+       assert (! _parserThread.get() );

        for (VideoFrames::iterator i=_videoFrames.begin(),
                e=_videoFrames.end(); i!=e; ++i)
@@ -331,7 +338,7 @@
        bool pc=parsingCompleted();
        bool ic=indexingCompleted();
        bool bf=bufferFull();
-       if ( pc || (bf && ic) ) // TODO: or seekRequested ?
+       if (( pc || (bf && ic)) && !parserThreadKillRequested()) // TODO: or 
seekRequested ?
        {
 #ifdef GNASH_DEBUG_MEDIAPARSER
                log_debug("Parser thread waiting on wakeup lock, parsingComplete=%d, 
bufferFull=%d", pc, bf);

=== modified file 'libmedia/MediaParser.h'
--- libmedia/MediaParser.h      2008-10-01 14:00:35 +0000
+++ libmedia/MediaParser.h      2008-10-05 00:08:38 +0000
@@ -192,6 +192,14 @@

 DSOEXPORT std::ostream& operator << (std::ostream& os, const VideoInfo& vi);

+
+class EncodedExtraData {
+
+public:
+       virtual ~EncodedExtraData() {}
+
+};
+
 /// An encoded video frame
 class EncodedVideoFrame
 {
@@ -236,6 +244,8 @@
        /// Return video frame number
        unsigned frameNum() const { return _frameNum; }

+       // FIXME: should have better encapsulation for this sort of stuff.
+       std::auto_ptr<EncodedExtraData> extradata;
 private:

        boost::uint32_t _size;
@@ -251,6 +261,9 @@
        boost::uint32_t dataSize;
        boost::scoped_array<boost::uint8_t> data;
        boost::uint64_t timestamp;
+
+       // FIXME: should have better encapsulation for this sort of stuff.
+       std::auto_ptr<EncodedExtraData> extradata;
 };

/// The MediaParser class provides cursor-based access to encoded media frames @@ -274,6 +287,8 @@
        //
        virtual ~MediaParser();

+       void join();
+
        /// \brief
        /// Seeks to the closest possible position the given position,
        /// and returns the new position.
@@ -404,6 +419,22 @@

 protected:

+ /// Subclasses *must* set the following variables: @{ +
+       /// Info about the video stream (if any)
+       std::auto_ptr<VideoInfo> _videoInfo;
+
+       /// Info about the audio stream (if any)
+       std::auto_ptr<AudioInfo> _audioInfo;
+
+       /// Whether the parsing is complete or not
+       bool _parsingComplete;
+
+       /// Number of bytes loaded
+       boost::uint64_t _bytesLoaded;
+
+       /// }@
+
        /// Start the parser thread
        void startParserThread();

@@ -428,19 +459,10 @@
        ///
        const EncodedAudioFrame* peekNextAudioFrame() const;

-       /// Info about the video stream (if any)
-       std::auto_ptr<VideoInfo> _videoInfo;
-
-       /// Info about the audio stream (if any)
-       std::auto_ptr<AudioInfo> _audioInfo;
-
        /// The stream used to access the file
        std::auto_ptr<IOChannel> _stream;
        mutable boost::mutex _streamMutex;

-       /// Whether the parsing is complete or not
-       bool _parsingComplete;
-
        static void parserLoopStarter(MediaParser* mp)
        {
                mp->parserLoop();
@@ -471,11 +493,6 @@
        bool _parserThreadKillRequested;
        boost::condition _parserThreadWakeup;

-       /// On seek, this flag will be set, while holding a lock on 
_streamMutex.
-       /// The parser, when obtained a lock on _streamMutex, will check this
-       /// flag, if found to be true will clear the buffers and reset to false.
-       bool _seekRequest;
-
        /// Wait on the _parserThreadWakeup condition if buffer is full
        /// or parsing was completed.
/// @@ -488,8 +505,6 @@
        /// mutex protecting access to the a/v encoded frames queues
        mutable boost::mutex _qMutex;

-       /// Number of bytes loaded
-       boost::uint64_t _bytesLoaded;

        /// Mutex protecting _bytesLoaded (read by main, set by parser)
        mutable boost::mutex _bytesLoadedMutex;
@@ -503,6 +518,10 @@
        ///
        bool bufferFull() const;

+       /// On seek, this flag will be set, while holding a lock on 
_streamMutex.
+       /// The parser, when obtained a lock on _streamMutex, will check this
+       /// flag, if found to be true will clear the buffers and reset to false.
+       bool _seekRequest;
 private:

        typedef std::deque<EncodedVideoFrame*> VideoFrames;

=== modified file 'libmedia/gst/AudioDecoderGst.cpp'
--- libmedia/gst/AudioDecoderGst.cpp    2008-09-29 19:33:38 +0000
+++ libmedia/gst/AudioDecoderGst.cpp    2008-10-05 00:08:38 +0000
@@ -24,72 +24,89 @@

 #include "AudioDecoderGst.h"
 #include "MediaParser.h"
+#include "MediaParserGst.h"

 namespace gnash {
 namespace media {

+
+
 AudioDecoderGst::AudioDecoderGst(SoundInfo& info)
-    :
-        _samplerate(info.getSampleRate()),
-        _stereo(info.isStereo())
 {
-    setup(info);
+    GstCaps* srccaps = gst_caps_new_simple ("audio/mpeg",
+               "mpegversion", G_TYPE_INT, 1,
+               "layer", G_TYPE_INT, 3,
+               "rate", G_TYPE_INT, info.getSampleRate(),
+               "channels", G_TYPE_INT, info.isStereo() ? 2 : 1, NULL);
+
+    setup(srccaps);
+
+    // FIXME: should we handle other types?
 }

 AudioDecoderGst::AudioDecoderGst(AudioInfo& info)
-    :
-        _samplerate(info.sampleRate),
-        _stereo(info.stereo)
 {
-    setup(info);
+    GstCaps* srccaps;
+
+    if (info.type == FLASH && info.codec == AUDIO_CODEC_MP3)
+    {
+        srccaps = gst_caps_new_simple ("audio/mpeg",
+               "mpegversion", G_TYPE_INT, 1,
+               "layer", G_TYPE_INT, 3,
+               "rate", G_TYPE_INT, info.sampleRate,
+               "channels", G_TYPE_INT, info.stereo ? 2 : 1, NULL);
+        setup(srccaps);
+        return;
+    }
+ + if (info.type == FLASH && info.codec == AUDIO_CODEC_NELLYMOSER)
+    {
+        srccaps = gst_caps_new_simple ("audio/x-nellymoser",
+               "rate", G_TYPE_INT, info.sampleRate,
+               "channels", G_TYPE_INT, info.stereo ? 2 : 1, NULL);
+        setup(srccaps);
+        return;
+    }
+
+    if (info.type == FLASH) {
+        throw MediaException("AudioDecoderGst: cannot handle this codec!");
+    }
+
+    ExtraInfoGst* extraaudioinfo = 
dynamic_cast<ExtraInfoGst*>(info.extra.get());
+
+    if (!extraaudioinfo) {
+        throw MediaException("AudioDecoderGst: cannot handle this codec!");
+    }
+
+    setup(extraaudioinfo->caps);
 }

-
 AudioDecoderGst::~AudioDecoderGst()
 {
+    assert(g_queue_is_empty (_decoder.queue));
     swfdec_gst_decoder_push_eos(&_decoder);
     swfdec_gst_decoder_finish(&_decoder);
 }

-void AudioDecoderGst::setup(AudioInfo& info)
-{
-       if (info.type != FLASH || info.codec != AUDIO_CODEC_MP3)
-       {
-           throw MediaException("AudioDecoderGst: cannot handle this codec!");
-       }
-
-       setup();
-}
-
-void AudioDecoderGst::setup(SoundInfo& info)
-{
-       setup();
-}
-
-
-void AudioDecoderGst::setup()
+
+
+void AudioDecoderGst::setup(GstCaps* srccaps)
 {
     // init GStreamer
     gst_init (NULL, NULL);

-    GstCaps* sinkcaps;
- - GstCaps* srccaps = gst_caps_new_simple ("audio/mpeg",
-               "mpegversion", G_TYPE_INT, 1,
-               "layer", G_TYPE_INT, 3,
-               "rate", G_TYPE_INT, _samplerate,
-               "channels", G_TYPE_INT, _stereo ? 2 : 1, NULL);
     if (!srccaps) {
         throw MediaException(_("AudioDecoderGst: internal error (caps creation 
failed)"));
     }

-    sinkcaps = gst_caps_from_string ("audio/x-raw-int, endianness=byte_order, 
signed=(boolean)true, width=16, depth=16, rate=44100, channels=2");
+    GstCaps* sinkcaps = gst_caps_from_string ("audio/x-raw-int, 
endianness=byte_order, signed=(boolean)true, width=16, depth=16, rate=44100, 
channels=2");
     if (!sinkcaps) {
         throw MediaException(_("AudioDecoderGst: internal error (caps creation 
failed)"));
     }

     // TODO: we may want to prefer other modules over audioresample, like 
ffaudioresample, if they are
     // available.
+
     bool rv = swfdec_gst_decoder_init (&_decoder, srccaps, sinkcaps, "audioconvert", 
"audioresample", NULL);
     if (!rv) {
throw MediaException(_("AudioDecoderGst: initialisation failed.")); @@ -99,6 +116,48 @@
     gst_caps_unref (sinkcaps);
 }

+void
+buf_add(gpointer buf, gpointer data)
+{
+    boost::uint32_t* total = (boost::uint32_t*) data;
+
+    GstBuffer* buffer = (GstBuffer*) buf;
+    *total += GST_BUFFER_SIZE(buffer);
+}
+
+
+boost::uint8_t* +AudioDecoderGst::pullBuffers(boost::uint32_t& outputSize)
+{
+    outputSize = 0;
+ + g_queue_foreach(_decoder.queue, buf_add, &outputSize); + + if (!outputSize) {
+        log_debug(_("Pushed data, but there's nothing to pull (yet)"));
+ return 0; + } + + boost::uint8_t* rbuf = new boost::uint8_t[outputSize]; + + boost::uint8_t* ptr = rbuf; + + while (true) { + + GstBuffer* buffer = swfdec_gst_decoder_pull (&_decoder);
+        if (!buffer) {
+            break;
+        }
+ + memcpy(ptr, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
+        ptr += GST_BUFFER_SIZE(buffer);
+ + gst_buffer_unref (buffer);
+    }
+
+ return rbuf; +}
+
 boost::uint8_t*
 AudioDecoderGst::decode(boost::uint8_t* input, boost::uint32_t inputSize,
                         boost::uint32_t& outputSize,
@@ -112,41 +171,39 @@
     bool success = swfdec_gst_decoder_push(&_decoder, gstbuf);
     if (!success) {
         log_error(_("AudioDecoderGst: buffer push failed."));
-        return 0;
-    }
-
-    decodedData = inputSize;
-
-    GstBuffer * decodedbuf = swfdec_gst_decoder_pull (&_decoder);
-
-    if (!decodedbuf) {
-        outputSize = 0;
-        return 0;
-    }
-
-    outputSize = GST_BUFFER_SIZE(decodedbuf);
-    decodedData = inputSize;
-
-    boost::uint8_t* rbuf = new boost::uint8_t[outputSize];
-    memcpy(rbuf, GST_BUFFER_DATA(decodedbuf), outputSize);
-    gst_buffer_unref(decodedbuf);
-
-    return rbuf;
+        gst_buffer_unref(gstbuf);
+        return 0;
+    }
+
+    decodedData = inputSize;
+
+    return pullBuffers(outputSize);
 }

 boost::uint8_t*
 AudioDecoderGst::decode(const EncodedAudioFrame& ef, boost::uint32_t& 
outputSize)
 {
-    // Docs are not very helpful as to what the difference between these two 
is.
- boost::uint32_t output_size = 0; - boost::uint32_t decoded_data_size = 0;
-
-    uint8_t* rv = decode(ef.data.get(), ef.dataSize, output_size, 
decoded_data_size, false);
-
-    // my best guess is that outputSize in one method means outputSize in the 
other...
-    outputSize = output_size;
-
-    return rv;
+    outputSize = 0;
+ + GstBuffer* gstbuf; + + EncodedExtraGstData* extradata = dynamic_cast<EncodedExtraGstData*>(ef.extradata.get()); + + if (extradata) {
+        gstbuf = extradata->buffer;
+    } else {
+
+        gstbuf = gst_buffer_new_and_alloc(ef.dataSize);
+        memcpy (GST_BUFFER_DATA (gstbuf), ef.data.get(), ef.dataSize);
+    }
+
+    bool success = swfdec_gst_decoder_push(&_decoder, gstbuf);
+    if (!success) {
+        log_error(_("AudioDecoderGst: buffer push failed."));
+        return 0;
+    }
+
+    return pullBuffers(outputSize);
 }



=== modified file 'libmedia/gst/AudioDecoderGst.h'
--- libmedia/gst/AudioDecoderGst.h      2008-09-29 16:02:53 +0000
+++ libmedia/gst/AudioDecoderGst.h      2008-10-05 00:08:38 +0000
@@ -51,13 +51,11 @@

 private:

-    void setup(AudioInfo& info);
-    void setup(SoundInfo& info);
-    void setup();
+    boost::uint8_t* pullBuffers(boost::uint32_t&  outputSize);
+
+    void setup(GstCaps* caps);

     SwfdecGstDecoder _decoder;
-    boost::uint32_t _samplerate;
-    bool _stereo;

 };


=== modified file 'libmedia/gst/MediaHandlerGst.cpp'
--- libmedia/gst/MediaHandlerGst.cpp    2008-09-23 17:14:12 +0000
+++ libmedia/gst/MediaHandlerGst.cpp    2008-10-05 00:08:38 +0000
@@ -21,18 +21,42 @@
 #include "MediaHandlerGst.h"
 #include "VideoDecoderGst.h"
 #include "AudioDecoderGst.h"
+#include "MediaParserGst.h"
+#include "FLVParser.h"

 #include "IOChannel.h" // for visibility of destructor
 #include "MediaParser.h" // for visibility of destructor

+#include "MediaParserGst.h"
+
+
 namespace gnash {
 namespace media {

 std::auto_ptr<MediaParser>
 MediaHandlerGst::createMediaParser(std::auto_ptr<IOChannel> stream)
 {
-       // TODO: support more then just FLV...
-       return MediaHandler::createMediaParser(stream);
+       std::auto_ptr<MediaParser> parser;
+
+       if ( isFLV(*stream) )
+       {
+               parser.reset( new FLVParser(stream) );
+       }
+       else
+       {
+               try
+               {
+                       parser.reset(new MediaParserGst(stream));
+               }
+               catch (GnashException& ex)
+               {
+                       log_error("Could not create Gstreamer based media parser for 
"
+                    "input stream: %s", ex.what());
+                       assert(!parser.get());
+               }
+       }
+
+       return parser;
 }

 std::auto_ptr<VideoDecoder>
@@ -40,8 +64,12 @@
 {
        if ( info.type != FLASH )
        {
-               log_error("Non-flash video encoding not supported yet by GST 
VideoDecoder");
-               return std::auto_ptr<VideoDecoder>(0);
+               ExtraInfoGst* extrainfo = 
dynamic_cast<ExtraInfoGst*>(info.extra.get());
+               if (!extrainfo) {
+                       log_error(_("Wrong arguments given to GST 
VideoDecoder"));
+                       return std::auto_ptr<VideoDecoder>(0);
+               }
+               return std::auto_ptr<VideoDecoder>(new 
VideoDecoderGst(extrainfo->caps));
        }
        videoCodecType format = static_cast<videoCodecType>(info.codec);
        int width = info.width;

=== added file 'libmedia/gst/MediaParserGst.cpp'
--- libmedia/gst/MediaParserGst.cpp     1970-01-01 00:00:00 +0000
+++ libmedia/gst/MediaParserGst.cpp     2008-10-05 00:08:38 +0000
@@ -0,0 +1,538 @@
+// MediaParserFfmpeg.cpp: FFMPG media parsers, for Gnash
+//
+//   Copyright (C) 2007, 2008 Free Software Foundation, Inc.
+//
+// This program is free software; you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation; either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with this program; if not, write to the Free Software
+// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+//
+
+
+#include "MediaParserGst.h"
+#include "GnashException.h"
+#include "log.h"
+#include "IOChannel.h"
+
+
+
+#include "swfdec_codec_gst.h"
+#include <iostream>
+#include <fstream>
+#include <unistd.h>
+
+#define PUSHBUF_SIZE 1024
+#define MIN_PROBE_SIZE (PUSHBUF_SIZE * 3)
+
+#define GNASH_DEBUG_DATAFLOW
+
+
+
+namespace gnash {
+namespace media {
+
+
+
+
+
+
+MediaParserGst::MediaParserGst(std::auto_ptr<IOChannel> stream)
+    : MediaParser(stream),
+      _bin(NULL),
+      _srcpad(NULL),
+      _audiosink(NULL),
+      _videosink(NULL),
+      _demux_probe_ended(false)
+{
+    gst_init (NULL, NULL);
+
+    _bin = gst_bin_new ("NULL");
+    if (!_bin) {
+        throw GnashException(_("MediaParserGst couldn't create a bin"));
+    }
+
+    GstElement* typefind = gst_element_factory_make("typefind", NULL);
+    if (!typefind) {
+        throw GnashException(_("MediaParserGst couldn't create a typefind 
element."));
+    }
+ + gst_bin_add(GST_BIN(_bin), typefind); + + g_signal_connect (typefind, "have-type", G_CALLBACK (MediaParserGst::cb_typefound), this); + + GstCaps* srccaps = gst_caps_new_any();
+    _srcpad = swfdec_gst_connect_srcpad (typefind, srccaps);
+    gst_caps_unref(srccaps);
+
+    if (!gst_element_set_state (_bin, GST_STATE_PLAYING) == 
GST_STATE_CHANGE_SUCCESS) {
+        throw GnashException(_("MediaParserGst could not change element 
state"));
+    }
+ + SimpleTimer timer;
+
+    size_t counter = 0;
+    while (!probingConditionsMet(timer)) {
+
+        if (!pushGstBuffer()) {
+            ++counter;
+        }
+    }
+
+    log_debug(_("Needed %d dead iterations to detect audio type."), counter);
+ + if (! (_videoInfo.get() || _audioInfo.get()) ) { + throw MediaException(_("MediaParserGst failed to detect any stream types.")); + } + + if (!gst_element_set_state (_bin, GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS) {
+        throw MediaException(_("MediaParserGst could not change element 
state"));
+    }
+
+    // FIXME: threading decisions should not be up to the parser!
+    startParserThread();
+}
+
+MediaParserGst::~MediaParserGst()
+{
+    if (_bin) {
+        gst_element_set_state (_bin, GST_STATE_NULL);
+        g_object_unref (_bin);
+ } +
+    if (_srcpad) {
+        g_object_unref (_srcpad);
+    }
+
+    if (_videosink) {
+        g_object_unref (_videosink);
+    }
+ + if (_audiosink) {
+        g_object_unref (_audiosink);
+    }
+
+    // Sanity check for threading bug...
+    assert(_enc_video_frames.empty());
+    assert(_enc_audio_frames.empty());
+}
+
+bool
+MediaParserGst::seek(boost::uint32_t&)
+{
+    LOG_ONCE(log_unimpl("MediaParserGst::seek()"))
+
+    return false;
+}
+
+
+bool
+MediaParserGst::parseNextChunk()
+{
+    boost::mutex::scoped_lock streamLock(_streamMutex);
+ + emitEncodedFrames();
+
+    // FIXME: do we need to check this here?
+    if (_stream->eof()) {
+        log_debug (_("Stream EOF, emitting!"));
+        _parsingComplete = true;
+        return false;
+    }
+
+    pushGstBuffer();
+
+    emitEncodedFrames();
+
+    return true;
+
+}
+
+boost::uint64_t
+MediaParserGst::getBytesLoaded() const
+{
+    boost::mutex::scoped_lock streamLock(_streamMutex);
+
+    return _stream->tell();
+}
+
+bool
+MediaParserGst::pushGstBuffer()
+{
+    GstBuffer* buffer = gst_buffer_new_and_alloc(PUSHBUF_SIZE);
+
+    size_t ret = _stream->read(GST_BUFFER_DATA(buffer), PUSHBUF_SIZE);
+
+    if (ret == 0) {
+        if (!_stream->eof()) {
+            log_error(_("MediaParserGst failed to read the stream, but did not 
"
+                      "reach EOF!"));
+        } else {
+            _parsingComplete = true;
+        }
+        gst_buffer_unref(buffer);
+        return false;
+    }
+
+    if (ret < PUSHBUF_SIZE) {
+        if (!_stream->eof()) {
+            log_error(_("MediaParserGst failed to read the stream, but did not 
"
+                      "reach EOF!"));
+        } else {
+            _parsingComplete = true;
+ } +
+        GST_BUFFER_SIZE(buffer) = ret;
+    }
+
+    GstFlowReturn rv = gst_pad_push (_srcpad, buffer);
+    if (!GST_FLOW_IS_SUCCESS (rv)) {
+        log_error(_("MediaParserGst failed to push more data into the demuxer! 
"
+                    "Seeking back."));
+        _stream->seek(_stream->tell()-ret);
+        return false;
+    }
+ + return true;
+}
+
+void
+MediaParserGst::emitEncodedFrames()
+{
+    while (!_enc_audio_frames.empty()) {
+        EncodedAudioFrame* frame = _enc_audio_frames.front();
+        pushEncodedAudioFrame(std::auto_ptr<EncodedAudioFrame>(frame));
+       _enc_audio_frames.pop_front();
+    }
+ + while (!_enc_video_frames.empty()) {
+        EncodedVideoFrame* frame = _enc_video_frames.front();
+        pushEncodedVideoFrame(std::auto_ptr<EncodedVideoFrame>(frame));
+       _enc_video_frames.pop_front();
+    }
+}
+
+void
+MediaParserGst::rememberAudioFrame(EncodedAudioFrame* frame)
+{
+    _enc_audio_frames.push_back(frame);
+}
+
+void
+MediaParserGst::rememberVideoFrame(EncodedVideoFrame* frame)
+{
+    _enc_video_frames.push_back(frame);
+}
+
+/// Determines whether all multimedia streams have been found.
+//
+///   This can happen when
+///   the stream has a nondemuxable format, like MP3, or when the linked
+///   demuxer has signaled "no more pads", or when the first video and
+///   audio streams have been found.
+
+bool MediaParserGst::foundAllStreams()
+{
+    return _demux_probe_ended || (_videoInfo.get() && _audioInfo.get());
+}
+
+/// The idea here is that probingConditionsMet will return false, unless:
+/// a) all data types in the stream were found. +/// b) The timer (currently for 1 second) has expired, if and only if we
+///    succeeded in pushing MIN_PROBE_SIZE bytes into the bin. This should
+///    protect low-bandwidth cases from stopping the probe early.
+
+bool MediaParserGst::probingConditionsMet(const SimpleTimer& timer)
+{
+    return foundAllStreams() || (timer.expired() && getBytesLoaded() > 
MIN_PROBE_SIZE);
+}
+
+void print_caps(GstCaps* caps)
+{
+    if (!caps) {
+        return;
+    }
+
+    gchar* capsstr = gst_caps_to_string (caps);
+ + if (!capsstr) {
+        return;
+    }
+ + log_debug (_("MediaParserGst/typefound: Detected media type %s"), capsstr);
+
+    g_free(capsstr);
+}
+
+
+void
+MediaParserGst::link_to_fakesink(GstPad* pad)
+{
+    GstElement* fakesink = gst_element_factory_make("fakesink", NULL);
+ + if (!fakesink) {
+        throw MediaException(_("MediaParserGst Failed to create fakesink."));
+    }
+
+    gboolean success = gst_bin_add(GST_BIN(_bin), fakesink);
+ + if (!success) {
+        gst_object_unref(fakesink);
+        throw MediaException(_("MediaParserGst Failed to create fakesink."));
+    }
+ + GstPad* sinkpad = gst_element_get_static_pad (fakesink, "sink");
+    if (!sinkpad) {
+        gst_object_unref(fakesink);
+        throw MediaException(_("MediaParserGst: couldn't get the fakesink "
+                               "src element."));
+    }
+ + GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);
+    if (!GST_PAD_LINK_SUCCESSFUL(ret)) {
+        gst_object_unref(fakesink);
+        gst_object_unref(sinkpad);
+        throw MediaException(_("MediaParserGst: couln't link fakesink"));
+    }
+ + if (!gst_element_set_state (_bin, GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS) {
+        throw GnashException(_("MediaParserGst could not change element 
state"));
+    }
+}
+
+
+// static +void
+MediaParserGst::cb_typefound (GstElement* typefind, guint /* probability */,
+                              GstCaps* caps, gpointer data)
+{
+    print_caps(caps);
+
+    MediaParserGst* parser = static_cast<MediaParserGst*>(data);
+
+    GstElementFactory* demuxfactory = swfdec_gst_get_demuxer_factory (caps);
+ + if (!demuxfactory) { + + GstPad* srcpad = gst_element_get_static_pad (typefind, "src");
+        if (!srcpad) {
+            throw MediaException(_("MediaParserGst: couldn't get the typefind "
+                                   "src element."));
+        }
+
+        cb_pad_added(typefind, srcpad, parser);
+
+        gst_object_unref(GST_OBJECT(srcpad));
+ + parser->_demux_probe_ended = true;
+
+    } else {
+
+        GstElement* demuxer = gst_element_factory_create (demuxfactory, 
"demuxer");
+ + gst_object_unref(GST_OBJECT(demuxfactory));
+
+        if (!demuxer) {
+            throw MediaException(_("MediaParserGst: couldn't create the 
demuxer"));
+        }
+ + gboolean success = gst_bin_add(GST_BIN(parser->_bin), demuxer);
+        if (!success) {
+            log_error(_("MediaParserGst: failed adding demuxer to bin."));
+            // This error might not be fatal, so we'll continue.
+ } + + success = gst_element_link(typefind, demuxer);
+        if (!success) {
+            throw MediaException(_("MediaParserGst: failed adding demuxer to 
bin."));
+        }
+ + g_signal_connect (demuxer, "pad-added", G_CALLBACK (MediaParserGst::cb_pad_added), parser);
+        g_signal_connect (demuxer, "no-more-pads", G_CALLBACK 
(MediaParserGst::cb_no_more_pads), parser);
+ + if (!gst_element_set_state (parser->_bin, GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS) {
+            throw GnashException(_("MediaParserGst could not change element 
state"));
+        }
+    }
+
+}
+
+//static +void MediaParserGst::cb_pad_added(GstElement* /* element */, GstPad* new_pad,
+                                  gpointer data)
+{
+    MediaParserGst* parser = static_cast<MediaParserGst*>(data);
+
+ GstCaps* caps = gst_pad_get_caps(new_pad); + print_caps(caps);
+
+    GstElementFactory* parserfactory = swfdec_gst_get_parser_factory (caps);
+
+    if (!parserfactory) {
+        log_error(_("MediaParserGst: Failed to find a parser."));
+        parser->link_to_fakesink(new_pad);
+        return;
+    }
+ + GstElement* parserel = gst_element_factory_create (parserfactory, NULL);
+    gst_object_unref (parserfactory);
+    if (!parserel) {
+        log_error(_("MediaParserGst: Failed to find a parser. We'll continue, "
+                    "but either audio or video will not work!"));
+        parser->link_to_fakesink(new_pad);
+        return;
+    }
+ + gboolean success = gst_bin_add(GST_BIN(parser->_bin), parserel);
+    if (!success) {
+        gst_object_unref(parserel);
+        log_error(_("MediaParserGst: couldn't add parser."));
+        return;
+ } + + GstPad* sinkpad = gst_element_get_static_pad (parserel, "sink");
+    assert(sinkpad);
+ + GstPadLinkReturn ret = gst_pad_link(new_pad, sinkpad); + + gst_object_unref(GST_OBJECT(sinkpad));
+
+    if (!GST_PAD_LINK_SUCCESSFUL(ret)) {
+        log_error(_("MediaParserGst: couldn't link parser."));
+        return;
+    }
+ + GstStructure* str = gst_caps_get_structure (caps, 0);
+    if (!str) {
+        log_error(_("MediaParserGst: couldn't get structure name."));
+ return; + } + + const gchar* structure_name = gst_structure_get_name (str);
+
+    if (g_strrstr (structure_name, "audio")) {
+ + parser->_audiosink = swfdec_gst_connect_sinkpad (parserel, caps);
+        if (!parser->_audiosink) {
+            log_error(_("MediaParserGst: couldn't link \"fake\" sink."));
+ return; + } + + gst_pad_set_chain_function (parser->_audiosink, MediaParserGst::cb_chain_func_audio); + + g_object_set_data (G_OBJECT (parser->_audiosink), "mediaparser-obj", parser); + + AudioInfo* audioinfo = new AudioInfo(0, 0, 0, false, 0, FFMPEG);
+        audioinfo->extra.reset(new ExtraInfoGst(caps));
+
+        parser->_audioInfo.reset(audioinfo);
+
+    } else if (g_strrstr (structure_name, "video")) {
+ + parser->_videosink = swfdec_gst_connect_sinkpad (parserel, caps);
+        if (!parser->_videosink) {
+            log_error(_("MediaParserGst: couldn't link \"fake\" sink."));
+ return; + } + + gst_pad_set_chain_function (parser->_videosink, MediaParserGst::cb_chain_func_video); + + g_object_set_data (G_OBJECT (parser->_videosink), "mediaparser-obj", parser);
+
+        VideoInfo* videoinfo = new VideoInfo(0, 0, 0, false, 0, FFMPEG);
+        videoinfo->extra.reset(new ExtraInfoGst(caps));
+
+        parser->_videoInfo.reset(videoinfo);
+
+    } else {
+        log_error(_("AudioDecoderGst can't handle streams of type %s."),
+                  structure_name);
+        parser->link_to_fakesink(new_pad);
+ } + + if (!gst_element_set_state (parser->_bin, GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS) {
+        throw GnashException(_("MediaParserGst could not change element 
state"));
+    }
+}
+
+// static
+void
+MediaParserGst::cb_no_more_pads (GstElement* /* demuxer */, gpointer data)
+{
+    MediaParserGst* parser = static_cast<MediaParserGst*>(data);
+
+    parser->_demux_probe_ended = true;
+}
+
+
+
+// static
+GstFlowReturn
+MediaParserGst::cb_chain_func_video (GstPad *pad, GstBuffer *buffer)
+{
+    MediaParserGst* parser = (MediaParserGst*) g_object_get_data (G_OBJECT (pad), 
"mediaparser-obj");
+    assert(parser);
+
+    unsigned int frame_num = 0;
+    unsigned int timestamp = 0;
+
+    if (GST_BUFFER_TIMESTAMP_IS_VALID(buffer)) {
+        timestamp = GST_TIME_AS_MSECONDS(GST_BUFFER_TIMESTAMP(buffer));
+    }
+ + if (GST_BUFFER_OFFSET_IS_VALID(buffer)) {
+        frame_num = GST_BUFFER_OFFSET(buffer);
+    }
+
+    EncodedVideoFrame* frame = new EncodedVideoFrame(NULL, 
GST_BUFFER_SIZE(buffer), frame_num, timestamp);
+
+    frame->extradata.reset(new EncodedExtraGstData(buffer));
+ +#ifdef GNASH_DEBUG_DATAFLOW
+    log_debug("remembering video buffer with timestamp %d and frame number 
%d", timestamp, frame_num);
+#endif
+
+    parser->rememberVideoFrame(frame);
+
+    return GST_FLOW_OK;
+}
+
+
+// static
+GstFlowReturn
+MediaParserGst::cb_chain_func_audio (GstPad *pad, GstBuffer *buffer)
+{
+    MediaParserGst* parser = (MediaParserGst*) g_object_get_data (G_OBJECT (pad), 
"mediaparser-obj");
+    assert(parser);
+
+    EncodedAudioFrame* frame = new EncodedAudioFrame;
+    frame->dataSize = GST_BUFFER_SIZE(buffer);
+
+    if (GST_BUFFER_TIMESTAMP_IS_VALID(buffer)) {
+        frame->timestamp = GST_TIME_AS_MSECONDS(GST_BUFFER_TIMESTAMP(buffer));
+    } else {
+        frame->timestamp = 0;
+    }
+ + frame->extradata.reset(new EncodedExtraGstData(buffer)); + +#ifdef GNASH_DEBUG_DATAFLOW
+    log_debug("remembering video buffer with timestamp %d.", frame->timestamp);
+#endif
+
+    parser->rememberAudioFrame(frame);
+
+
+    return GST_FLOW_OK;
+}
+
+} // namespace media
+} // namespace gnash

=== added file 'libmedia/gst/MediaParserGst.h'
--- libmedia/gst/MediaParserGst.h       1970-01-01 00:00:00 +0000
+++ libmedia/gst/MediaParserGst.h       2008-10-05 00:08:38 +0000
@@ -0,0 +1,162 @@
+// MediaParserGst.h: gstreamer media parsers, for Gnash
+// +// Copyright (C) 2007, 2008 Free Software Foundation, Inc. +// +// This program is free software; you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation; either version 3 of the License, or
+// (at your option) any later version.
+// +// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with this program; if not, write to the Free Software
+// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+
+
+#ifndef __MEDIAPARSER_GST_H__
+#define __MEDIAPARSER_GST_H__
+
+#ifdef HAVE_CONFIG_H
+#include "gnashconfig.h"
+#endif
+
+#include "MediaParser.h" // for inheritance
+
+#include <vector>
+#include <boost/scoped_array.hpp>
+#include <memory>
+#include <queue>
+
+#include <gst/gst.h>
+#include "ClockTime.h"
+
+
+// Forward declaration
+namespace gnash {
+       class IOChannel;
+}
+
+namespace gnash {
+namespace media {
+
+/// Class to hold extra info found in any stream by the parser.
+struct ExtraInfoGst : public AudioInfo::ExtraInfo, VideoInfo::ExtraInfo, 
boost::noncopyable
+{
+    ExtraInfoGst(GstCaps* gstcaps)
+    :
+        caps(gstcaps)
+    {
+        gst_caps_ref(caps);
+    }
+
+    ~ExtraInfoGst()
+    {
+        gst_caps_unref(caps);
+    }
+
+    GstCaps* caps;
+};
+
+/// Class to hold GstBuffer. Takes ownership.
+struct EncodedExtraGstData : public EncodedExtraData, boost::noncopyable
+{
+    EncodedExtraGstData(GstBuffer* buf)
+    : buffer(buf)
+    {
+        gst_buffer_ref(buffer);
+    }
+    ~EncodedExtraGstData()
+    {
+        gst_buffer_unref(buffer);
+    }
+
+    GstBuffer* buffer;
+};
+
+
+class SimpleTimer : public boost::noncopyable
+{
+public:
+    SimpleTimer()
+        : _start_time(clocktime::getTicks())
+    {
+    }
+ + bool expired() const
+    {
+        return (clocktime::getTicks() - _start_time) > 1000;
+    }
+
+private:
+    boost::uint64_t _start_time;
+};
+
+
+
+/// Gstreamer based MediaParser
+///
+class MediaParserGst: public MediaParser
+{
+public:
+
+    /// Construct a Gstreamer-based media parser for given stream
+    //
+    /// Can throw a MediaException if input format couldn't be detected
+    ///
+    MediaParserGst(std::auto_ptr<IOChannel> stream);
+
+    ~MediaParserGst();
+
+    // See dox in MediaParser.h
+    bool seek(boost::uint32_t&);
+
+    // See dox in MediaParser.h
+    bool parseNextChunk();
+
+    // See dox in MediaParser.h
+    boost::uint64_t getBytesLoaded() const;
+
+    void rememberAudioFrame(EncodedAudioFrame* frame);
+    void rememberVideoFrame(EncodedVideoFrame* frame);
+
+private:
+    bool foundAllStreams();
+
+ bool probingConditionsMet(const SimpleTimer& timer); + + void link_to_fakesink(GstPad* pad);
+
+    static void cb_typefound (GstElement *typefind, guint probability,
+                              GstCaps *caps, gpointer data);
+ + static void cb_pad_added(GstElement* element,
+        GstPad* new_pad, gpointer user_data);
+    static void cb_no_more_pads (GstElement* element, gpointer data);
+
+    static GstFlowReturn cb_chain_func_audio (GstPad *pad, GstBuffer *buffer);
+    static GstFlowReturn cb_chain_func_video (GstPad *pad, GstBuffer *buffer);
+
+    bool pushGstBuffer();
+    void emitEncodedFrames();
+
+
+    GstElement* _bin;
+    GstPad* _srcpad;
+    GstPad* _audiosink;
+    GstPad* _videosink;
+ + bool _demux_probe_ended;
+
+    std::deque<EncodedAudioFrame*> _enc_audio_frames;
+    std::deque<EncodedVideoFrame*> _enc_video_frames;
+};
+
+
+} // gnash.media namespace +} // namespace gnash
+
+#endif // __MEDIAPARSER_GST_H__

=== modified file 'libmedia/gst/VideoDecoderGst.cpp'
--- libmedia/gst/VideoDecoderGst.cpp    2008-10-01 15:04:46 +0000
+++ libmedia/gst/VideoDecoderGst.cpp    2008-10-05 00:08:38 +0000
@@ -22,8 +22,7 @@
 #endif

 #include "VideoDecoderGst.h"
-#include "gstappsink.h"
-#include "gstappsrc.h"
+#include "MediaParserGst.h"


 namespace gnash {
@@ -33,22 +32,19 @@

 // TODO: implement proper seeking.

+VideoDecoderGst::VideoDecoderGst(GstCaps* caps)
+{
+    setup(caps);
+}
+
+
 VideoDecoderGst::VideoDecoderGst(videoCodecType codec_type, int width, int 
height)
- : _appsink(NULL),
-   _colorspace(NULL)
 {
   gst_init (NULL, NULL);
- - _pipeline = gst_pipeline_new (NULL);

-  _appsrc = gst_element_factory_make ("appsrc", NULL);
- - GstElement* decoder = NULL;
-
   GstCaps* caps;
   switch (codec_type) {
     case VIDEO_CODEC_H263:
-      decoder = gst_element_factory_make ("ffdec_flv", NULL);
       caps = gst_caps_new_simple ("video/x-flash-video",
                                       "width", G_TYPE_INT, width,
"height", G_TYPE_INT, height, @@ -56,7 +52,6 @@
       break;
     case VIDEO_CODEC_VP6:
     case VIDEO_CODEC_VP6A:
-      decoder = gst_element_factory_make ("ffdec_vp6f", NULL);
       caps = gst_caps_new_simple ("video/x-vp6-flash",
                                       "width", G_TYPE_INT, width,
"height", G_TYPE_INT, height, @@ -64,189 +59,119 @@
       break;
     case VIDEO_CODEC_SCREENVIDEO:
     case VIDEO_CODEC_SCREENVIDEO2:
-      decoder = gst_element_factory_make ("ffdec_flashsv", NULL);
       caps = gst_caps_new_simple ("video/x-flash-screen",
                                       "width", G_TYPE_INT, width,
                                       "height", G_TYPE_INT, height,
                                       NULL);
       break;
     case 0:
-      log_debug("Video codec is zero.  Streaming video expected later.");
-      gst_object_unref (GST_OBJECT (_pipeline));
-      _pipeline = NULL;
+      throw MediaException(_("Video codec is zero.  Streaming video expected 
later."));
       break;
     default:
-      log_error("No support for video codec %d (%s).", (int)codec_type, 
codec_type);
-      gst_object_unref (GST_OBJECT (_pipeline));
-      _pipeline = NULL;
+      boost::format msg = boost::format(_("No support for video codec %d.")) % 
(int)codec_type;
+      throw MediaException(msg.str());
+
       return;
   }

-  if (!decoder) {
-    log_error(_("Failed to initialize the video decoder for codec %d (%s). "
-                "Consider installing gstreamer-ffmpeg."),
-               (int)codec_type, codec_type);
-    gst_object_unref (GST_OBJECT (_pipeline));
-    _pipeline = NULL;
-    return;
-  }
- - gst_app_src_set_caps (GST_APP_SRC(_appsrc), caps);
-  gst_caps_unref(caps);
-
-  _colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
-
-  _appsink = gst_element_factory_make ("appsink", NULL);
-
-
-  caps = gst_caps_new_simple ("video/x-raw-rgb", "bpp", G_TYPE_INT, 24,
-                                                 "depth", G_TYPE_INT, 24, 
NULL);
-
-  gst_app_sink_set_caps(GST_APP_SINK(_appsink), caps);
-
-  gst_caps_unref (caps);
-
-
-  gst_bin_add_many (GST_BIN (_pipeline), _appsrc, decoder, _colorspace, 
_appsink, NULL);
-
-  gst_element_link_many(_appsrc, decoder, _colorspace, _appsink, NULL);
- - gst_base_src_set_live(GST_BASE_SRC(_appsrc), TRUE);
-
-  gst_element_set_state (GST_ELEMENT (_pipeline), GST_STATE_PLAYING);
+  setup(caps);
+
 }

+
 VideoDecoderGst::~VideoDecoderGst()
 {
-  if (_pipeline) {
-    gst_element_set_state (GST_ELEMENT (_pipeline), GST_STATE_NULL);
-    gst_object_unref (GST_OBJECT (_pipeline));
-  }
+    swfdec_gst_decoder_push_eos(&_decoder);
+    swfdec_gst_decoder_finish(&_decoder);
+}
+
+void
+VideoDecoderGst::setup(GstCaps* srccaps)
+{
+    gst_init (NULL, NULL);
+
+    GstCaps* sinkcaps;
+ +
+    if (!srccaps) {
+ throw MediaException(_("VideoDecoderGst: internal error (caps creation failed)")); + }
+
+    sinkcaps = gst_caps_new_simple ("video/x-raw-rgb", "bpp", G_TYPE_INT, 24,
+                                                       "depth", G_TYPE_INT, 24,
+                                                       NULL);
+    if (!sinkcaps) {
+ throw MediaException(_("VideoDecoderGst: internal error (caps creation failed)")); + }
+
+    bool rv = swfdec_gst_decoder_init (&_decoder, srccaps, sinkcaps, 
"ffmpegcolorspace", NULL);
+    if (!rv) {
+ throw MediaException(_("VideoDecoderGst: initialisation failed.")); + }
+
+    gst_caps_unref (srccaps);
+    gst_caps_unref (sinkcaps);
 }

 void
 VideoDecoderGst::push(const EncodedVideoFrame& frame)
 {
-  if (!_pipeline) {
-    return;
-  }
- - GstBuffer* buffer = gst_buffer_new(); - - // dunno why gst needs non-const here
-  GST_BUFFER_DATA(buffer) = const_cast<uint8_t*>(frame.data());
- GST_BUFFER_SIZE(buffer) = frame.dataSize(); - GST_BUFFER_OFFSET(buffer) = frame.frameNum();
-  GST_BUFFER_TIMESTAMP(buffer) = GST_CLOCK_TIME_NONE;
-  GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE;
- - gst_app_src_push_buffer (GST_APP_SRC(_appsrc), buffer); - - checkMessages();
+    GstBuffer* buffer;
+ + EncodedExtraGstData* extradata = dynamic_cast<EncodedExtraGstData*>(frame.extradata.get()); + + if (extradata) {
+        buffer = extradata->buffer;
+    } else {
+        buffer = gst_buffer_new();
+
+        GST_BUFFER_DATA(buffer) = const_cast<uint8_t*>(frame.data());
+ GST_BUFFER_SIZE(buffer) = frame.dataSize(); + GST_BUFFER_OFFSET(buffer) = frame.frameNum();
+        GST_BUFFER_TIMESTAMP(buffer) = GST_CLOCK_TIME_NONE;
+        GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE;
+    }
+ + bool success = swfdec_gst_decoder_push(&_decoder, buffer);
+    if (!success) {
+        log_error(_("VideoDecoderGst: buffer push failed."));
+        gst_buffer_unref(buffer);
+    }
 }


 std::auto_ptr<image::ImageBase>
 VideoDecoderGst::pop()
 {
-  if (!_pipeline) {
-    return std::auto_ptr<image::ImageBase>();
-  }
-
-  checkMessages();
- - GstBuffer* buffer = gst_app_sink_pull_buffer_timed (GST_APP_SINK(_appsink)); - - if (!buffer) {
-    return std::auto_ptr<image::ImageBase>();
-  }
- - GstCaps* caps = gst_buffer_get_caps(buffer);
-
-  assert(gst_caps_get_size(caps) == 1);
- - GstStructure* structure = gst_caps_get_structure (caps, 0);
-
-  gint height, width;
-
-  gst_structure_get_int (structure, "width", &width);
-  gst_structure_get_int (structure, "height", &height);
- - gst_caps_unref(caps); - - std::auto_ptr<image::ImageBase> ret(new gnashGstBuffer(buffer, width, height)); - - return ret;
+    GstBuffer * buffer = swfdec_gst_decoder_pull (&_decoder);
+
+    if (!buffer) {
+        return std::auto_ptr<image::ImageBase>();
+    }
+ + GstCaps* caps = gst_buffer_get_caps(buffer);
+
+    assert(gst_caps_get_size(caps) == 1);
+ + GstStructure* structure = gst_caps_get_structure (caps, 0);
+
+    gint height, width;
+
+    gst_structure_get_int (structure, "width", &width);
+    gst_structure_get_int (structure, "height", &height);
+ + gst_caps_unref(caps); + + std::auto_ptr<image::ImageBase> ret(new gnashGstBuffer(buffer, width, height)); + + return ret;
 }


 bool
 VideoDecoderGst::peek()
 {
-  if (!_pipeline) {
-    return false;
-  }
-
-  return gst_app_sink_peek_buffer (GST_APP_SINK(_appsink));
-}
-
-void
-VideoDecoderGst::checkMessages() // any messages for me?
-{
-  if (!_pipeline) {
-    return;
-  }
-
-  GstBus* bus = gst_element_get_bus(_pipeline);
-
-  while (gst_bus_have_pending(bus)) {
-    GstMessage* msg = gst_bus_pop(bus);
-    handleMessage(msg);
-
-    gst_message_unref(msg);
-  }
-
-  gst_object_unref(GST_OBJECT(bus));
-}
-
-void
-VideoDecoderGst::handleMessage (GstMessage *message)
-{
-#if 0
-  g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message));
-#endif
-
-  switch (GST_MESSAGE_TYPE (message)) {
-    case GST_MESSAGE_ERROR:
-    {
-      GError *err;
-      gchar *debug;
-      gst_message_parse_error (message, &err, &debug);
- - log_error(_("VideoDecoderGst::handleMessage: module %s reported: %s"),
-                gst_element_get_name(GST_MESSAGE_SRC (message)), err->message);
- - g_error_free (err);
-      g_free (debug);
- - // Clear any buffers.
-      gst_element_set_state (_pipeline, GST_STATE_NULL);
-
-      break;
-    }
-    case GST_MESSAGE_EOS:
-      log_debug(_("NetStream has reached the end of the stream."));
-
-      break;
- - default:
-    {
-#if 0
-      g_print("unhandled message\n");
-#endif
-    }
-  }
-
+  return !g_queue_is_empty (_decoder.queue);
 }



=== modified file 'libmedia/gst/VideoDecoderGst.h'
--- libmedia/gst/VideoDecoderGst.h      2008-08-18 23:53:04 +0000
+++ libmedia/gst/VideoDecoderGst.h      2008-10-05 00:08:38 +0000
@@ -34,6 +34,9 @@
 #include <gst/gst.h>


+#include "swfdec_codec_gst.h"
+
+
 namespace gnash {
 namespace media {

@@ -69,26 +72,23 @@
 class DSOEXPORT VideoDecoderGst : public VideoDecoder
 {
 public:
-  VideoDecoderGst(videoCodecType codec_type, int width, int height);
-  ~VideoDecoderGst();
-
-  void push(const EncodedVideoFrame& buffer);
-
-  std::auto_ptr<image::ImageBase> pop();
+    VideoDecoderGst(videoCodecType codec_type, int width, int height);
+    VideoDecoderGst(GstCaps* caps);
+    ~VideoDecoderGst();
+
+    void push(const EncodedVideoFrame& buffer);
+
+    std::auto_ptr<image::ImageBase> pop();

-  bool peek();
-
-private: - void checkMessages();
-  void handleMessage(GstMessage* message);
-
-  VideoDecoderGst();
-  VideoDecoderGst(const gnash::media::VideoDecoderGst&);
-
-  GstElement* _pipeline;
-  GstElement* _appsrc;
-  GstElement* _appsink;
-  GstElement* _colorspace;
+    bool peek();
+
+private:
+    void setup(GstCaps* caps);
+
+    VideoDecoderGst();
+    VideoDecoderGst(const gnash::media::VideoDecoderGst&);
+
+    SwfdecGstDecoder _decoder;
 };



=== modified file 'libmedia/gst/swfdec_codec_gst.c'
--- libmedia/gst/swfdec_codec_gst.c     2008-09-29 20:12:08 +0000
+++ libmedia/gst/swfdec_codec_gst.c     2008-10-05 00:08:38 +0000
@@ -50,7 +50,7 @@

 /* NB: try to mirror decodebin behavior */
 static gboolean
-swfdec_gst_feature_filter (GstPluginFeature *feature, gpointer caps)
+swfdec_gst_feature_filter (GstPluginFeature *feature, gpointer caps, const 
gchar* klassname, gboolean autoplugonly)
 {
   const GList *walk;
   const gchar *klass;
@@ -59,13 +59,15 @@
   if (!GST_IS_ELEMENT_FACTORY (feature))
     return FALSE;

+
   /* only decoders are interesting */
   klass = gst_element_factory_get_klass (GST_ELEMENT_FACTORY (feature));
-  if (strstr (klass, "Decoder") == NULL)
+  if (strstr (klass, klassname) == NULL)
     return FALSE;

+
   /* only select elements with autoplugging rank */
-  if (gst_plugin_feature_get_rank (feature) < GST_RANK_MARGINAL)
+  if (autoplugonly && gst_plugin_feature_get_rank (feature) < 
GST_RANK_MARGINAL)
     return FALSE;

   /* only care about the right sink caps */
@@ -92,6 +94,26 @@
   return FALSE;
 }

+static gboolean
+swfdec_gst_feature_filter_decoder (GstPluginFeature *feature, gpointer caps)
+{
+    return swfdec_gst_feature_filter (feature, caps, "Decoder", TRUE);
+}
+
+static gboolean
+swfdec_gst_feature_filter_demuxer (GstPluginFeature *feature, gpointer caps)
+{
+    return swfdec_gst_feature_filter (feature, caps, "Demuxer", TRUE);
+}
+
+static gboolean
+swfdec_gst_feature_filter_parser (GstPluginFeature *feature, gpointer caps)
+{
+    return swfdec_gst_feature_filter (feature, caps, "Parser", FALSE);
+}
+
+
+
 static int
 swfdec_gst_compare_features (gconstpointer a_, gconstpointer b_)
 {
@@ -106,14 +128,14 @@
   return strcmp (gst_plugin_feature_get_name (a), gst_plugin_feature_get_name 
(b));
 }

-GstElementFactory *
-swfdec_gst_get_element_factory (GstCaps *caps)
+static GstElementFactory *
+_swfdec_gst_get_factory (GstCaps *caps, GstPluginFeatureFilter filter)
 {
   GstElementFactory *ret;
   GList *list;

list = gst_registry_feature_filter (gst_registry_get_default (), - swfdec_gst_feature_filter, FALSE, caps);
+      filter, FALSE, caps);
   if (list == NULL)
     return NULL;

@@ -124,9 +146,28 @@
   return ret;
 }

+GstElementFactory *
+swfdec_gst_get_element_factory (GstCaps *caps)
+{
+  return _swfdec_gst_get_factory (caps, swfdec_gst_feature_filter_decoder);
+}
+
+GstElementFactory *
+swfdec_gst_get_demuxer_factory (GstCaps *caps)
+{
+  return  _swfdec_gst_get_factory (caps, swfdec_gst_feature_filter_demuxer);
+}
+
+GstElementFactory *
+swfdec_gst_get_parser_factory (GstCaps *caps)
+{
+  return  _swfdec_gst_get_factory (caps, swfdec_gst_feature_filter_parser);
+}
+
+
 /*** PADS ***/

-static GstPad *
+/* static */ GstPad *
 swfdec_gst_connect_srcpad (GstElement *element, GstCaps *caps)
 {
   GstPadTemplate *tmpl;
@@ -153,7 +194,7 @@
   return NULL;
 }

-static GstPad *
+/*static*/ GstPad *
 swfdec_gst_connect_sinkpad (GstElement *element, GstCaps *caps)
 {
   GstPadTemplate *tmpl;

=== modified file 'libmedia/gst/swfdec_codec_gst.h'
--- libmedia/gst/swfdec_codec_gst.h     2008-09-29 16:02:53 +0000
+++ libmedia/gst/swfdec_codec_gst.h     2008-10-05 00:08:38 +0000
@@ -46,6 +46,14 @@
 GstElementFactory *
                swfdec_gst_get_element_factory  (GstCaps *              caps);

+GstElementFactory *
+               swfdec_gst_get_demuxer_factory  (GstCaps *              caps);
+GstElementFactory *
+               swfdec_gst_get_parser_factory   (GstCaps *              caps);
+
+GstPad *       swfdec_gst_connect_srcpad (GstElement *element, GstCaps *caps);
+
+GstPad *       swfdec_gst_connect_sinkpad (GstElement *element, GstCaps *caps);

 G_END_DECLS
 #endif






reply via email to

[Prev in Thread] Current Thread [Next in Thread]