Recherche avancée

Médias (91)

Autres articles (48)

  • Keeping control of your media in your hands

    13 avril 2011, par

    The vocabulary used on this site and around MediaSPIP in general, aims to avoid reference to Web 2.0 and the companies that profit from media-sharing.
    While using MediaSPIP, you are invited to avoid using words like "Brand", "Cloud" and "Market".
    MediaSPIP is designed to facilitate the sharing of creative media online, while allowing authors to retain complete control of their work.
    MediaSPIP aims to be accessible to as many people as possible and development is based on expanding the (...)

  • Use, discuss, criticize

    13 avril 2011, par

    Talk to people directly involved in MediaSPIP’s development, or to people around you who could use MediaSPIP to share, enhance or develop their creative projects.
    The bigger the community, the more MediaSPIP’s potential will be explored and the faster the software will evolve.
    A discussion list is available for all exchanges between users.

  • Publier sur MédiaSpip

    13 juin 2013

    Puis-je poster des contenus à partir d’une tablette Ipad ?
    Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir

Sur d’autres sites (7958)

  • JavaCPP FFMpeg to JavaSound

    8 août 2020, par TW2

    I have a problem to be able to read audio using JavaCPP FFMpeg library. I don’t know how to pass it to java sound and I don’t know too if my code is correct.

    


    Let’s see the more important part of my code (video is OK so I drop this)  :

    


    The variables  :

    


    //==========================================================================&#xA;// FFMpeg 4.x - Video and Audio&#xA;//==========================================================================&#xA;&#xA;private final AVFormatContext   pFormatCtx = new AVFormatContext(null);&#xA;private final AVDictionary      OPTIONS_DICT = null;&#xA;private AVPacket                pPacket = new AVPacket();&#xA;    &#xA;//==========================================================================&#xA;// FFMpeg 4.x - Audio&#xA;//==========================================================================&#xA;    &#xA;private AVCodec                 pAudioCodec;&#xA;private AVCodecContext          pAudioCodecCtx;&#xA;private final List<streaminfo>  audioStreams = new ArrayList&lt;>();&#xA;private int                     audio_data_size;&#xA;private final BytePointer       audio_data = new BytePointer(0);&#xA;private int                     audio_ret;&#xA;private AVFrame                 pAudioDecodedFrame = null;&#xA;private AVCodecParserContext    pAudioParser;&#xA;private SwrContext              audio_swr_ctx = null;&#xA;</streaminfo>

    &#xA;

    Then I call prepare functions in this order  :

    &#xA;

    private void prepareFirst() throws Exception{&#xA;    oldFile = file;&#xA;            &#xA;    // Initialize packet and check for error&#xA;    pPacket = av_packet_alloc();&#xA;    if(pPacket == null){&#xA;        throw new Exception("ALL: Couldn&#x27;t allocate packet");&#xA;    }&#xA;&#xA;    // Open video file&#xA;    if (avformat_open_input(pFormatCtx, file.getPath(), null, null) != 0) {&#xA;        throw new Exception("ALL: Couldn&#x27;t open file");&#xA;    }&#xA;&#xA;    // Retrieve stream information&#xA;    if (avformat_find_stream_info(pFormatCtx, (PointerPointer)null) &lt; 0) {&#xA;        throw new Exception("ALL: Couldn&#x27;t find stream information");&#xA;    }&#xA;&#xA;    // Dump information about file onto standard error&#xA;    av_dump_format(pFormatCtx, 0, file.getPath(), 0);&#xA;&#xA;    // Find the first audio/video stream&#xA;    for (int i = 0; i &lt; pFormatCtx.nb_streams(); i&#x2B;&#x2B;) {&#xA;        switch(pFormatCtx.streams(i).codecpar().codec_type()){&#xA;            case AVMEDIA_TYPE_VIDEO -> videoStreams.add(new StreamInfo(i, pFormatCtx.streams(i)));&#xA;            case AVMEDIA_TYPE_AUDIO -> audioStreams.add(new StreamInfo(i, pFormatCtx.streams(i)));&#xA;        }&#xA;    }&#xA;    &#xA;    if(videoStreams.isEmpty() &amp;&amp; type != PlayType.AudioOnly){&#xA;        throw new Exception("Didn&#x27;t find an audio stream");&#xA;    }&#xA;    if(audioStreams.isEmpty() &amp;&amp; type != PlayType.VideoOnly){&#xA;        throw new Exception("Didn&#x27;t find a video stream");&#xA;    }&#xA;}&#xA;&#xA;private void prepareAudio() throws Exception{&#xA;    //&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#x2B;&#xA;    // AUDIO&#xA;    //------------------------------------------------------------------&#xA;&#xA;    if(audioStreams.isEmpty() == false){&#xA;        //===========================&#xA;        //------------&#xA;        &#xA;//                // Let&#x27;s search for AVCodec&#xA;//                pAudioCodec = avcodec_find_decoder(pFormatCtx.streams(audioStreams.get(0).getStreamIndex()).codecpar().codec_id());&#xA;//                if (pAudioCodec == null) {&#xA;//                    throw new Exception("AUDIO: Unsupported codec or not found!");&#xA;//                }&#xA;//&#xA;//                // Let&#x27;s alloc AVCodecContext&#xA;//                pAudioCodecCtx = avcodec_alloc_context3(pAudioCodec);&#xA;//                if (pAudioCodecCtx == null) {            &#xA;//                    throw new Exception("AUDIO: Unallocated codec context or not found!");&#xA;//                }&#xA;        &#xA;        // Get a pointer to the codec context for the video stream&#xA;        pAudioCodecCtx = pFormatCtx.streams(audioStreams.get(0).getStreamIndex()).codec();&#xA;&#xA;        // Find the decoder for the video stream&#xA;        pAudioCodec = avcodec_find_decoder(pAudioCodecCtx.codec_id());&#xA;        if (pAudioCodec == null) {&#xA;            throw new Exception("AUDIO: Unsupported codec or not found!");&#xA;        }&#xA;&#xA;        //===========================&#xA;        //------------&#xA;&#xA;        /* open it */&#xA;        if (avcodec_open2(pAudioCodecCtx, pAudioCodec, OPTIONS_DICT) &lt; 0) {&#xA;            throw new Exception("AUDIO: Could not open codec");&#xA;        }&#xA;&#xA;        pAudioDecodedFrame = av_frame_alloc();&#xA;        if (pAudioDecodedFrame == null){&#xA;            throw new Exception("AUDIO: DecodedFrame allocation failed");&#xA;        }&#xA;&#xA;        audio_swr_ctx = swr_alloc_set_opts(&#xA;                null,                           // existing Swr context or NULL&#xA;                AV_CH_LAYOUT_STEREO,            // output channel layout (AV_CH_LAYOUT_*)&#xA;                AV_SAMPLE_FMT_S16,              // output sample format (AV_SAMPLE_FMT_*).&#xA;                44100,                          // output sample rate (frequency in Hz)&#xA;                pAudioCodecCtx.channels(),  // input channel layout (AV_CH_LAYOUT_*)&#xA;                pAudioCodecCtx.sample_fmt(),    // input sample format (AV_SAMPLE_FMT_*).&#xA;                pAudioCodecCtx.sample_rate(),   // input sample rate (frequency in Hz)&#xA;                0,                              // logging level offset&#xA;                null                            // parent logging context, can be NULL&#xA;        );&#xA;        &#xA;        swr_init(audio_swr_ctx);&#xA;        &#xA;        av_samples_fill_arrays(&#xA;                pAudioDecodedFrame.data(),      // audio_data,&#xA;                pAudioDecodedFrame.linesize(),  // linesize&#xA;                audio_data,                     // buf&#xA;                (int)AV_CH_LAYOUT_STEREO,       // nb_channels&#xA;                44100,                          // nb_samples&#xA;                AV_SAMPLE_FMT_S16,              // sample_fmt&#xA;                0                               // align&#xA;        );&#xA;        &#xA;    }&#xA;    &#xA;    // Audio treatment end ---------------------------------------------&#xA;    //==================================================================&#xA;}&#xA;

    &#xA;

    And then when I launch the thread  :

    &#xA;

    private void doPlay() throws Exception{&#xA;    av_init_packet(pPacket);&#xA;&#xA;    // Read frames&#xA;    while (av_read_frame(pFormatCtx, pPacket) >= 0) {&#xA;        if (type != PlayType.AudioOnly &amp;&amp; pPacket.stream_index() == videoStreams.get(0).getStreamIndex()) {&#xA;            // Is this a packet from the video stream?&#xA;            decodeVideo();&#xA;            renewPacket();&#xA;        }&#xA;&#xA;        if (type != PlayType.VideoOnly &amp;&amp; pPacket.stream_index() == audioStreams.get(0).getStreamIndex()) {&#xA;            // Is this a packet from the audio stream?&#xA;            if(pPacket.size() > 0){&#xA;                decodeAudio();&#xA;                renewPacket();&#xA;            }&#xA;        }&#xA;    }&#xA;}&#xA;&#xA;private void renewPacket(){&#xA;    // Free the packet that was allocated by av_read_frame&#xA;    av_packet_unref(pPacket);&#xA;&#xA;    pPacket.data(null);&#xA;    pPacket.size(0);&#xA;    av_init_packet(pPacket);&#xA;}&#xA;

    &#xA;

    And again, this is where I don’t read audio  :

    &#xA;

    private void decodeAudio() throws Exception{&#xA;&#xA;    do {&#xA;        audio_ret = avcodec_send_packet(pAudioCodecCtx, pPacket);&#xA;    } while(audio_ret == AVERROR_EAGAIN());&#xA;    System.out.println("packet sent return value: " &#x2B; audio_ret);&#xA;&#xA;    if(audio_ret == AVERROR_EOF || audio_ret == AVERROR_EINVAL()) {&#xA;        StringBuilder sb = new StringBuilder();&#xA;        Formatter formatter = new Formatter(sb, Locale.US);&#xA;        formatter.format("AVERROR(EAGAIN): %d, AVERROR_EOF: %d, AVERROR(EINVAL): %d\n", AVERROR_EAGAIN(), AVERROR_EOF, AVERROR_EINVAL());&#xA;        formatter.format("Audio frame getting error (%d)!\n", audio_ret);&#xA;        throw new Exception(sb.toString());&#xA;    }&#xA;&#xA;    audio_ret = avcodec_receive_frame(pAudioCodecCtx, pAudioDecodedFrame);&#xA;    System.out.println("frame received return value: " &#x2B; audio_ret);&#xA;&#xA;    audio_data_size = av_get_bytes_per_sample(AV_SAMPLE_FMT_S16);&#xA;&#xA;    if (audio_data_size &lt; 0) {&#xA;        /* This should not occur, checking just for paranoia */&#xA;        throw new Exception("Failed to calculate data size");&#xA;    }&#xA;    &#xA;    double frame_nb = 44100d / pAudioCodecCtx.sample_rate() * pAudioDecodedFrame.nb_samples();&#xA;    long out_count = Math.round(Math.floor(frame_nb));&#xA;&#xA;    int out_samples = swr_convert(&#xA;            audio_swr_ctx,&#xA;            audio_data, &#xA;            (int)out_count,&#xA;            pAudioDecodedFrame.data(0),&#xA;            pAudioDecodedFrame.nb_samples()&#xA;    );&#xA;    &#xA;    if (out_samples &lt; 0) {&#xA;        throw new Exception("AUDIO: Error while converting");&#xA;    }&#xA;    &#xA;    int dst_bufsize = av_samples_get_buffer_size(&#xA;        pAudioDecodedFrame.linesize(), &#xA;        (int)AV_CH_LAYOUT_STEREO,  &#xA;        out_samples,&#xA;        AV_SAMPLE_FMT_S16,&#xA;        1&#xA;    );&#xA;    &#xA;    AudioFormat audioFormat = new AudioFormat(&#xA;            pAudioDecodedFrame.sample_rate(),&#xA;            16,&#xA;            2, &#xA;            true, &#xA;            false&#xA;    );&#xA;    &#xA;    BytePointer bytePointer = pAudioDecodedFrame.data(0);&#xA;    ByteBuffer byteBuffer = bytePointer.asBuffer();&#xA;&#xA;    byte[] bytes = new byte[byteBuffer.remaining()];&#xA;    byteBuffer.get(bytes);&#xA;    &#xA;    try (SourceDataLine sdl = AudioSystem.getSourceDataLine(audioFormat)) {&#xA;        sdl.open(audioFormat);                &#xA;        sdl.start();&#xA;        sdl.write(bytes, 0, bytes.length);&#xA;        sdl.drain();&#xA;        sdl.stop();&#xA;    } catch (LineUnavailableException ex) {&#xA;        Logger.getLogger(AVEntry.class.getName()).log(Level.SEVERE, null, ex);&#xA;    }    &#xA;}&#xA;

    &#xA;

    Do you have an idea  ?

    &#xA;

  • Screeching white sound coming while playing audio as a raw stream

    27 avril 2020, par Sri Nithya Sharabheshwarananda

    I. Background

    &#xA;&#xA;

      &#xA;
    1. I am trying to make an application which helps to match subtitles to the audio waveform very accurately at the waveform level, at the word level or even at the character level.
    2. &#xA;

    3. The audio is expected to be Sanskrit chants (Yoga, rituals etc.) which are extremely long compound words [ example - aṅganyā-sokta-mātaro-bījam is traditionally one word broken only to assist reading ]
    4. &#xA;

    5. The input transcripts / subtitles might be roughly in sync at the sentence/verse level but surely would not be in sync at the word level.
    6. &#xA;

    7. The application should be able to figure out points of silence in the audio waveform, so that it can guess the start and end points of each word (or even letter/consonant/vowel in a word), such that the audio-chanting and visual-subtitle at the word level (or even at letter/consonant/vowel level) perfectly match, and the corresponding UI just highlights or animates the exact word (or even letter) in the subtitle line which is being chanted at that moment, and also show that word (or even the letter/consonant/vowel) in bigger font. This app's purpose is to assist learning Sanskrit chanting.
    8. &#xA;

    9. It is not expected to be a 100% automated process, nor 100% manual but a mix where the application should assist the human as much as possible.
    10. &#xA;

    &#xA;&#xA;

    II. Following is the first code I wrote for this purpose, wherein

    &#xA;&#xA;

      &#xA;
    1. First I open a mp3 (or any audio format) file,
    2. &#xA;

    3. Seek to some arbitrary point in the timeline of the audio file // as of now playing from zero offset
    4. &#xA;

    5. Get the audio data in raw format for 2 purposes - (1) playing it and (2) drawing the waveform.
    6. &#xA;

    7. Playing the raw audio data using standard java audio libraries
    8. &#xA;

    &#xA;&#xA;

    III. The problem I am facing is, between every cycle there is screeching sound.

    &#xA;&#xA;

      &#xA;
    • Probably I need to close the line between cycles ? Sounds simple, I can try.
    • &#xA;

    • But I am also wondering if this overall approach itself is correct ? Any tip, guide, suggestion, link would be really helpful.
    • &#xA;

    • Also I just hard coded the sample-rate etc ( 44100Hz etc. ), are these good to set as default presets or it should depend on the input format ?
    • &#xA;

    &#xA;&#xA;

    IV. Here is the code

    &#xA;&#xA;

    import com.github.kokorin.jaffree.StreamType;&#xA;import com.github.kokorin.jaffree.ffmpeg.FFmpeg;&#xA;import com.github.kokorin.jaffree.ffmpeg.FFmpegProgress;&#xA;import com.github.kokorin.jaffree.ffmpeg.FFmpegResult;&#xA;import com.github.kokorin.jaffree.ffmpeg.NullOutput;&#xA;import com.github.kokorin.jaffree.ffmpeg.PipeOutput;&#xA;import com.github.kokorin.jaffree.ffmpeg.ProgressListener;&#xA;import com.github.kokorin.jaffree.ffprobe.Stream;&#xA;import com.github.kokorin.jaffree.ffmpeg.UrlInput;&#xA;import com.github.kokorin.jaffree.ffprobe.FFprobe;&#xA;import com.github.kokorin.jaffree.ffprobe.FFprobeResult;&#xA;import java.io.IOException;&#xA;import java.io.OutputStream;&#xA;import java.nio.file.Path;&#xA;import java.nio.file.Paths;&#xA;import java.util.concurrent.TimeUnit;&#xA;import java.util.concurrent.atomic.AtomicLong;&#xA;import javax.sound.sampled.AudioFormat;&#xA;import javax.sound.sampled.AudioSystem;&#xA;import javax.sound.sampled.DataLine;&#xA;import javax.sound.sampled.SourceDataLine;&#xA;&#xA;&#xA;public class FFMpegToRaw {&#xA;    Path BIN = Paths.get("f:\\utilities\\ffmpeg-20190413-0ad0533-win64-static\\bin");&#xA;    String VIDEO_MP4 = "f:\\org\\TEMPLE\\DeviMahatmyamRecitationAudio\\03_01_Devi Kavacham.mp3";&#xA;    FFprobe ffprobe;&#xA;    FFmpeg ffmpeg;&#xA;&#xA;    public void basicCheck() throws Exception {&#xA;        if (BIN != null) {&#xA;            ffprobe = FFprobe.atPath(BIN);&#xA;        } else {&#xA;            ffprobe = FFprobe.atPath();&#xA;        }&#xA;        FFprobeResult result = ffprobe&#xA;                .setShowStreams(true)&#xA;                .setInput(VIDEO_MP4)&#xA;                .execute();&#xA;&#xA;        for (Stream stream : result.getStreams()) {&#xA;            System.out.println("Stream " &#x2B; stream.getIndex()&#xA;                    &#x2B; " type " &#x2B; stream.getCodecType()&#xA;                    &#x2B; " duration " &#x2B; stream.getDuration(TimeUnit.SECONDS));&#xA;        }    &#xA;        if (BIN != null) {&#xA;            ffmpeg = FFmpeg.atPath(BIN);&#xA;        } else {&#xA;            ffmpeg = FFmpeg.atPath();&#xA;        }&#xA;&#xA;        //Sometimes ffprobe can&#x27;t show exact duration, use ffmpeg trancoding to NULL output to get it&#xA;        final AtomicLong durationMillis = new AtomicLong();&#xA;        FFmpegResult fFmpegResult = ffmpeg&#xA;                .addInput(&#xA;                        UrlInput.fromUrl(VIDEO_MP4)&#xA;                )&#xA;                .addOutput(new NullOutput())&#xA;                .setProgressListener(new ProgressListener() {&#xA;                    @Override&#xA;                    public void onProgress(FFmpegProgress progress) {&#xA;                        durationMillis.set(progress.getTimeMillis());&#xA;                    }&#xA;                })&#xA;                .execute();&#xA;        System.out.println("audio size - "&#x2B;fFmpegResult.getAudioSize());&#xA;        System.out.println("Exact duration: " &#x2B; durationMillis.get() &#x2B; " milliseconds");&#xA;    }&#xA;&#xA;    public void toRawAndPlay() throws Exception {&#xA;        ProgressListener listener = new ProgressListener() {&#xA;            @Override&#xA;            public void onProgress(FFmpegProgress progress) {&#xA;                System.out.println(progress.getFrame());&#xA;            }&#xA;        };&#xA;&#xA;        // code derived from : https://stackoverflow.com/questions/32873596/play-raw-pcm-audio-received-in-udp-packets&#xA;&#xA;        int sampleRate = 44100;//24000;//Hz&#xA;        int sampleSize = 16;//Bits&#xA;        int channels   = 1;&#xA;        boolean signed = true;&#xA;        boolean bigEnd = false;&#xA;        String format  = "s16be"; //"f32le"&#xA;&#xA;        //https://trac.ffmpeg.org/wiki/audio types&#xA;        final AudioFormat af = new AudioFormat(sampleRate, sampleSize, channels, signed, bigEnd);&#xA;        final DataLine.Info info = new DataLine.Info(SourceDataLine.class, af);&#xA;        final SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);&#xA;&#xA;        line.open(af, 4096); // format , buffer size&#xA;        line.start();&#xA;&#xA;        OutputStream destination = new OutputStream() {&#xA;            @Override public void write(int b) throws IOException {&#xA;                throw new UnsupportedOperationException("Nobody uses thi.");&#xA;            }&#xA;            @Override public void write(byte[] b, int off, int len) throws IOException {&#xA;                String o = new String(b);&#xA;                boolean showString = false;&#xA;                System.out.println("New output ("&#x2B; len&#xA;                        &#x2B; ", off="&#x2B;off &#x2B; ") -> "&#x2B;(showString?o:"")); &#xA;                // output wave form repeatedly&#xA;&#xA;                if(len%2!=0) {&#xA;                    len -= 1;&#xA;                    System.out.println("");&#xA;                }&#xA;                line.write(b, off, len);&#xA;                System.out.println("done round");&#xA;            }&#xA;        };&#xA;&#xA;        // src : http://blog.wudilabs.org/entry/c3d357ed/?lang=en-US&#xA;        FFmpegResult result = FFmpeg.atPath(BIN).&#xA;            addInput(UrlInput.fromPath(Paths.get(VIDEO_MP4))).&#xA;            addOutput(PipeOutput.pumpTo(destination).&#xA;                disableStream(StreamType.VIDEO). //.addArgument("-vn")&#xA;                setFrameRate(sampleRate).            //.addArguments("-ar", sampleRate)&#xA;                addArguments("-ac", "1").&#xA;                setFormat(format)              //.addArguments("-f", format)&#xA;            ).&#xA;            setProgressListener(listener).&#xA;            execute();&#xA;&#xA;        // shut down audio&#xA;        line.drain();&#xA;        line.stop();&#xA;        line.close();&#xA;&#xA;        System.out.println("result = "&#x2B;result.toString());&#xA;    }&#xA;&#xA;    public static void main(String[] args) throws Exception {&#xA;        FFMpegToRaw raw = new FFMpegToRaw();&#xA;        raw.basicCheck();&#xA;        raw.toRawAndPlay();&#xA;    }&#xA;}&#xA;&#xA;

    &#xA;&#xA;

    Thank You

    &#xA;

  • Trying to cancel execution and delete file using ffmpeg C API

    6 mars 2020, par Vuwox

    The code below is a class that handle the conversion of multiples images, through add_frame() method, into a GIF with encode(). It also use a filter to generate and apply the palette. The usage is like this :

    Code call example

    std::unique_ptr gif_obj = nullptr;
    try
    {
       gif_obj = std::make_unique({1000,1000}, 12, "C:/out.gif",
                 "format=pix_fmts=rgb24,split [a][b];[a]palettegen[p];[b][p]paletteuse");

       // Example: a simple vector of images (usually process internally)
       for(auto img : image_vector)
            gif_obj->add_frame(img);

       // Once all frame were added, encode the final GIF with the filter applied.
       gif_obj->encode();
    }
    catch(const std::exception&amp; e)
    {
       // An error occured! We must close FFMPEG properly and delete the created file.
       gif_obj->cancel();
    }

    I have the following issue. If the code for any reason throw an exception, I call ffmpeg->cancel() and it supposes to delete the GIF file on disk. But this is never working, I assume there is a lock on the file or something like that. So here are my question :

    What is the proper way to close/free ffmpeg object in order to remove the file afterward ?


    Full class code below

    Header

    // C++ Standard includes    
    #include <memory>
    #include <string>
    #include <vector>


    // 3rd Party incldues
    #ifdef __cplusplus
    extern "C" {
    #include "libavformat/avformat.h"
    #include "libavfilter/avfilter.h"
    #include "libavutil/opt.h"
    #include "libavfilter/buffersrc.h"
    #include "libavfilter/buffersink.h"
    #include "libswscale/swscale.h"
    #include "libavutil/imgutils.h"
    }
    #endif

    #define FFMPEG_MSG_LEN 2000

    namespace px
    {
       namespace GIF
       {
           class FFMPEG
           {
           public:
               FFMPEG(const px::Point2D<int>&amp; dim,
                      const int framerate,
                      const std::string&amp; filename,
                      const std::string&amp; filter_cmd);

               ~FFMPEG();

               void add_frame(pxImage * const img);
               void encode();
               void cancel();

           private:

               void init_filters();            // Init everything that needed to filter the input frame.
               void init_muxer();              // The muxer that creates the output file.
               void muxing_one_frame(AVFrame* frame);
               void release();

               int _ret = 0;                   // status code from FFMPEG.
               char _err_msg[FFMPEG_MSG_LEN];  // Error message buffer.


               int m_width = 0;                // The width that all futur images must have to be accepted.
               int m_height = 0;               // The height that all futur images must have to be accepted.

               int m_framerate = 0;            // GIF Framerate.
               std::string m_filename = "";    // The GIF filename (on cache?)
               std::string m_filter_desc = ""; // The FFMPEG filter to apply over the frames.

               bool as_frame = false;

               AVFrame* picture_rgb24 = nullptr;           // Temporary frame that will hold the pxImage in an RGB24 format (NOTE: TOP-LEFT origin)

               AVFormatContext* ofmt_ctx = nullptr;        // ouput format context associated to the
               AVCodecContext* o_codec_ctx = nullptr;      // output codec for the GIF

               AVFilterGraph* filter_graph = nullptr;      // filter graph associate with the string we want to execute
               AVFilterContext* buffersrc_ctx = nullptr;   // The buffer that will store all the frames in one place for the palette generation.
               AVFilterContext* buffersink_ctx = nullptr;  // The buffer that will store the result afterward (once the palette are used).

               int64_t m_pts_increment = 0;
           };
       };
    };
    </int></vector></string></memory>

    ctor

    px::GIF::FFMPEG::FFMPEG(const px::Point2D<int>&amp; dim,
                           const int framerate,
                           const std::string&amp; filename,
                           const std::string&amp; filter_cmd) :
       m_width(dim.x()),
       m_height(dim.y()),
       m_framerate(framerate),
       m_filename(filename),
       m_filter_desc(filter_cmd)
    {
    #if !_DEBUG
       av_log_set_level(AV_LOG_QUIET); // Set the FFMPEG log to quiet to avoid too much logs.
    #endif

       // Allocate the temporary buffer that hold the ffmpeg image (pxImage to AVFrame conversion).
       picture_rgb24 = av_frame_alloc();
       picture_rgb24->pts = 0;
       picture_rgb24->data[0] = NULL;
       picture_rgb24->linesize[0] = -1;
       picture_rgb24->format = AV_PIX_FMT_RGB24;
       picture_rgb24->height = m_height;
       picture_rgb24->width = m_width;

       if ((_ret = av_image_alloc(picture_rgb24->data, picture_rgb24->linesize, m_width, m_height, (AVPixelFormat)picture_rgb24->format, 24)) &lt; 0)
           throw px::GIF::Error("Failed to allocate the AVFrame for pxImage conversion with error: " +
                                std::string(av_make_error_string(_err_msg, FFMPEG_MSG_LEN, _ret)),
                                "GIF::FFMPEG CTOR");  

       //printf("allocated picture of size %d, linesize %d %d %d %d\n", _ret, picture_rgb24->linesize[0], picture_rgb24->linesize[1], picture_rgb24->linesize[2], picture_rgb24->linesize[3]);

       init_muxer();   // Prepare the GIF encoder (open it on disk).
       init_filters(); // Prepare the filter that will be applied over the frame.

       // Instead of hardcoder {1,100} which is the GIF tbn, we collect it from its stream.
       // This will avoid future problem if the codec change in ffmpeg.
       if (ofmt_ctx &amp;&amp; ofmt_ctx->nb_streams > 0)
           m_pts_increment = av_rescale_q(1, { 1, m_framerate }, ofmt_ctx->streams[0]->time_base);
       else
           m_pts_increment = av_rescale_q(1, { 1, m_framerate }, { 1, 100 });
    }
    </int>

    FFMPEG Initialization (Filter and muxer)

    void px::GIF::FFMPEG::init_filters()
    {
       const AVFilter* buffersrc = avfilter_get_by_name("buffer");
       const AVFilter* buffersink = avfilter_get_by_name("buffersink");

       AVRational time_base = { 1, m_framerate };
       AVRational aspect_pixel = { 1, 1 };

       AVFilterInOut* inputs = avfilter_inout_alloc();
       AVFilterInOut* outputs = avfilter_inout_alloc();

       filter_graph = avfilter_graph_alloc();

       try
       {
           if (!outputs || !inputs || !filter_graph)
               throw px::GIF::Error("Failed to 'init_filters' could not allocated the graph/filters.", "GIF::FFMPEG init_filters");

           char args[512];
           snprintf(args, sizeof(args),
                    "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
                    m_width, m_height,
                    picture_rgb24->format,
                    time_base.num, time_base.den,
                    aspect_pixel.num, aspect_pixel.den);

           if (avfilter_graph_create_filter(&amp;buffersrc_ctx, buffersrc, "in", args, nullptr, filter_graph) &lt; 0)
               throw px::GIF::Error("Failed to create the 'source buffer' in init_filer method.", "GIF::FFMPEG init_filters");


           if (avfilter_graph_create_filter(&amp;buffersink_ctx, buffersink, "out", nullptr, nullptr, filter_graph) &lt; 0)
               throw px::GIF::Error("Failed to create the 'sink buffer' in init_filer method.", "GIF::FFMPEG init_filters");

           // GIF has possible output of PAL8.
           enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_PAL8, AV_PIX_FMT_NONE };

           if (av_opt_set_int_list(buffersink_ctx, "pix_fmts", pix_fmts, AV_PIX_FMT_NONE, AV_OPT_SEARCH_CHILDREN) &lt; 0)
               throw px::GIF::Error("Failed to set the output pixel format.", "GIF::FFMPEG init_filters");

           outputs->name = av_strdup("in");
           outputs->filter_ctx = buffersrc_ctx;
           outputs->pad_idx = 0;
           outputs->next = nullptr;

           inputs->name = av_strdup("out");
           inputs->filter_ctx = buffersink_ctx;
           inputs->pad_idx = 0;
           inputs->next = nullptr;

           // GIF has possible output of PAL8.
           if (avfilter_graph_parse_ptr(filter_graph, m_filter_desc.c_str(), &amp;inputs, &amp;outputs, nullptr) &lt; 0)
               throw px::GIF::Error("Failed to parse the filter graph (bad string!).", "GIF::FFMPEG init_filters");

           if (avfilter_graph_config(filter_graph, nullptr) &lt; 0)
               throw px::GIF::Error("Failed to configure the filter graph (bad string!).", "GIF::FFMPEG init_filters");

           avfilter_inout_free(&amp;inputs);
           avfilter_inout_free(&amp;outputs);
       }
       catch (const std::exception&amp; e)
       {
           // Catch exception to delete element.
           avfilter_inout_free(&amp;inputs);
           avfilter_inout_free(&amp;outputs);
           throw e; // re-throuw
       }
    }


    void px::GIF::FFMPEG::init_muxer()
    {
       AVOutputFormat* o_fmt = av_guess_format("gif", m_filename.c_str(), "video/gif");

       if ((_ret = avformat_alloc_output_context2(&amp;ofmt_ctx, o_fmt, "gif", m_filename.c_str())) &lt; 0)
           throw px::GIF::Error(std::string(av_make_error_string(_err_msg, FFMPEG_MSG_LEN, _ret)) + " allocate output format.", "GIF::FFMPEG init_muxer");

       AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_GIF);
       if (!codec) throw px::GIF::Error("Could to find the 'GIF' codec.", "GIF::FFMPEG init_muxer");

    #if 0
       const AVPixelFormat* p = codec->pix_fmts;
       while (p != NULL &amp;&amp; *p != AV_PIX_FMT_NONE) {
           printf("supported pix fmt: %s\n", av_get_pix_fmt_name(*p));
           ++p;
       }
    #endif

       AVStream* stream = avformat_new_stream(ofmt_ctx, codec);

       AVCodecParameters* codec_paramters = stream->codecpar;
       codec_paramters->codec_tag = 0;
       codec_paramters->codec_id = codec->id;
       codec_paramters->codec_type = AVMEDIA_TYPE_VIDEO;
       codec_paramters->width = m_width;
       codec_paramters->height = m_height;
       codec_paramters->format = AV_PIX_FMT_PAL8;

       o_codec_ctx = avcodec_alloc_context3(codec);
       avcodec_parameters_to_context(o_codec_ctx, codec_paramters);

       o_codec_ctx->time_base = { 1, m_framerate };

       if (ofmt_ctx->oformat->flags &amp; AVFMT_GLOBALHEADER)
           o_codec_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;

       if ((_ret = avcodec_open2(o_codec_ctx, codec, NULL)) &lt; 0)
           throw px::GIF::Error(std::string(av_make_error_string(_err_msg, FFMPEG_MSG_LEN, _ret)) + " open output codec.", "GIF::FFMPEG init_muxer");

       if ((_ret = avio_open(&amp;ofmt_ctx->pb, m_filename.c_str(), AVIO_FLAG_WRITE)) &lt; 0)
           throw px::GIF::Error(std::string(av_make_error_string(_err_msg, FFMPEG_MSG_LEN, _ret)) + " avio open error.", "GIF::FFMPEG init_muxer");

       if ((_ret = avformat_write_header(ofmt_ctx, NULL)) &lt; 0)
           throw px::GIF::Error(std::string(av_make_error_string(_err_msg, FFMPEG_MSG_LEN, _ret)) + " write GIF header", "GIF::FFMPEG init_muxer");

    #if _DEBUG
       // This print the stream/output format.
       av_dump_format(ofmt_ctx, -1, m_filename.c_str(), 1);
    #endif
    }

    Add frame (usually in a loop)

    void px::GIF::FFMPEG::add_frame(pxImage * const img)
    {
       if (img->getImageType() != PXT_BYTE || img->getNChannels() != 4)
           throw px::GIF::Error("Failed to 'add_frame' since image is not PXT_BYTE and 4-channels.", "GIF::FFMPEG add_frame");

       if (img->getWidth() != m_width || img->getHeight() != m_height)
           throw px::GIF::Error("Failed to 'add_frame' since the size is not same to other inputs.", "GIF::FFMPEG add_frame");

       const int pitch = picture_rgb24->linesize[0];
       auto px_ptr = getImageAccessor(img);

       for (int y = 0; y &lt; m_height; y++)
       {
           const int px_row = img->getOrigin() == ORIGIN_BOT_LEFT ? m_height - y - 1 : y;
           for (int x = 0; x &lt; m_width; x++)
           {
               const int idx = y * pitch + 3 * x;
               picture_rgb24->data[0][idx] = px_ptr[px_row][x].ch[PX_RE];
               picture_rgb24->data[0][idx + 1] = px_ptr[px_row][x].ch[PX_GR];
               picture_rgb24->data[0][idx + 2] = px_ptr[px_row][x].ch[PX_BL];
           }
       }

       // palettegen need a whole stream, just add frame to buffer.
       if ((_ret = av_buffersrc_add_frame_flags(buffersrc_ctx, picture_rgb24, AV_BUFFERSRC_FLAG_KEEP_REF)) &lt; 0)
           throw px::GIF::Error("Failed to 'add_frame' to global buffer with error: " +
                                std::string(av_make_error_string(_err_msg, FFMPEG_MSG_LEN, _ret)),
                                "GIF::FFMPEG add_frame");

       // Increment the FPS of the picture for the next add-up to the buffer.      
       picture_rgb24->pts += m_pts_increment;

       as_frame = true;
    }    

    Encoder (final step)

    void px::GIF::FFMPEG::encode()
    {
       if (!as_frame)
           throw px::GIF::Error("Please 'add_frame' before running the Encoding().", "GIF::FFMPEG encode");

       // end of buffer
       if ((_ret = av_buffersrc_add_frame_flags(buffersrc_ctx, nullptr, AV_BUFFERSRC_FLAG_KEEP_REF)) &lt; 0)
           throw px::GIF::Error("error add frame to buffer source: " + std::string(av_make_error_string(_err_msg, FFMPEG_MSG_LEN, _ret)), "GIF::FFMPEG encode");

       do {
           AVFrame* filter_frame = av_frame_alloc();
           _ret = av_buffersink_get_frame(buffersink_ctx, filter_frame);
           if (_ret == AVERROR(EAGAIN) || _ret == AVERROR_EOF) {
               av_frame_unref(filter_frame);
               break;
           }

           // write the filter frame to output file
           muxing_one_frame(filter_frame);

           av_frame_unref(filter_frame);
       } while (_ret >= 0);

       av_write_trailer(ofmt_ctx);
    }

    void px::GIF::FFMPEG::muxing_one_frame(AVFrame* frame)
    {
       int ret = avcodec_send_frame(o_codec_ctx, frame);
       AVPacket *pkt = av_packet_alloc();
       av_init_packet(pkt);

       while (ret >= 0) {
           ret = avcodec_receive_packet(o_codec_ctx, pkt);
           if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
               break;
           }

           av_write_frame(ofmt_ctx, pkt);
       }
       av_packet_unref(pkt);
    }

    DTOR, Release and Cancel

    px::GIF::FFMPEG::~FFMPEG()
    {
       release();
    }


    void px::GIF::FFMPEG::release()
    {
       // Muxer stuffs
       if (ofmt_ctx != nullptr) avformat_free_context(ofmt_ctx);
       if (o_codec_ctx != nullptr) avcodec_close(o_codec_ctx);
       if (o_codec_ctx != nullptr) avcodec_free_context(&amp;o_codec_ctx);

       ofmt_ctx = nullptr;
       o_codec_ctx = nullptr;

       // Filter stuffs
       if (buffersrc_ctx != nullptr) avfilter_free(buffersrc_ctx);
       if (buffersink_ctx != nullptr) avfilter_free(buffersink_ctx);
       if (filter_graph != nullptr) avfilter_graph_free(&amp;filter_graph);

       buffersrc_ctx = nullptr;
       buffersink_ctx = nullptr;
       filter_graph = nullptr;

       // Conversion image.
       if (picture_rgb24 != nullptr) av_frame_free(&amp;picture_rgb24);
       picture_rgb24 = nullptr;
    }

    void px::GIF::FFMPEG::cancel()
    {
       // In-case of failure we must close ffmpeg and exit.
       av_write_trailer(ofmt_ctx);

       // Release and close all elements.
       release();

       // Delete the file on disk.
       if (remove(m_filename.c_str()) != 0)
           PX_LOG0(PX_LOGLEVEL_ERROR, "GIF::FFMPEG - On 'cancel' failed to remove the file.");
    }