Recherche avancée

Médias (91)

Autres articles (14)

  • Gestion générale des documents

    13 mai 2011, par

    MédiaSPIP ne modifie jamais le document original mis en ligne.
    Pour chaque document mis en ligne il effectue deux opérations successives : la création d’une version supplémentaire qui peut être facilement consultée en ligne tout en laissant l’original téléchargeable dans le cas où le document original ne peut être lu dans un navigateur Internet ; la récupération des métadonnées du document original pour illustrer textuellement le fichier ;
    Les tableaux ci-dessous expliquent ce que peut faire MédiaSPIP (...)

  • Les vidéos

    21 avril 2011, par

    Comme les documents de type "audio", Mediaspip affiche dans la mesure du possible les vidéos grâce à la balise html5 .
    Un des inconvénients de cette balise est qu’elle n’est pas reconnue correctement par certains navigateurs (Internet Explorer pour ne pas le nommer) et que chaque navigateur ne gère en natif que certains formats de vidéos.
    Son avantage principal quant à lui est de bénéficier de la prise en charge native de vidéos dans les navigateur et donc de se passer de l’utilisation de Flash et (...)

  • Les formats acceptés

    28 janvier 2010, par

    Les commandes suivantes permettent d’avoir des informations sur les formats et codecs gérés par l’installation local de ffmpeg :
    ffmpeg -codecs ffmpeg -formats
    Les format videos acceptés en entrée
    Cette liste est non exhaustive, elle met en exergue les principaux formats utilisés : h264 : H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 m4v : raw MPEG-4 video format flv : Flash Video (FLV) / Sorenson Spark / Sorenson H.263 Theora wmv :
    Les formats vidéos de sortie possibles
    Dans un premier temps on (...)

Sur d’autres sites (2190)

  • FFmpeg H264 parsing

    20 mai 2015, par Yoohoo

    I am using FFmpeg to decoding H.264 video, for the following program :

       #include
       #include
       #include
       #include

       #include <sys></sys>time.h>
       #include

    #include <libavcodec></libavcodec>avcodec.h>
    #include <libavutil></libavutil>mathematics.h>

    #include <sdl></sdl>SDL.h>

    void sigint_handler(int signal) {
       printf("\n");
       exit(0);
    }

    const char *window_title;
    SDL_Surface *screen;
    SDL_Overlay *yuv_overlay;

    #define INBUF_SIZE 80000

    /*
    * Video decoding example
    */

    static long get_time_diff(struct timeval time_now) {
      struct timeval time_now2;
      gettimeofday(&amp;time_now2,0);
      return time_now2.tv_sec*1.e6 - time_now.tv_sec*1.e6 + time_now2.tv_usec - time_now.tv_usec;
    }

    int video_open(AVCodecContext *avctx, const char *filename){
       int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
       int w,h;

       flags |= SDL_RESIZABLE;

       if (avctx->width){
           w = avctx->width;
           h = avctx->height;
       } else {
           w = 640;
           h = 480;
       }

       if(SDL_Init(SDL_INIT_VIDEO) &lt; 0) {
       fprintf(stderr, "SDL_INIT_VIDEO failed!\n");
       exit(1);
       }

       screen = SDL_SetVideoMode(w, h, 0, flags);

       if (!screen) {
           fprintf(stderr, "SDL: could not set video mode - exiting\n");
           return -1;
       }
       if (!window_title)
           window_title = filename;
       SDL_WM_SetCaption(window_title, window_title);

       yuv_overlay = SDL_CreateYUVOverlay(w, h, SDL_YV12_OVERLAY, screen);

       if (yuv_overlay->hw_overlay) {
       fprintf(stderr, "Using hardware overlay!\n");
       }

       return 0;
    }

    int main(int argc, char **argv) {
       AVCodec *codec;
       AVCodecContext *c= NULL;
       AVCodecParserContext *parser = NULL;
       int frame, got_picture, len2, len;
       const char *filename;
       FILE *f;
       AVFrame *picture;
       char *arghwtf = malloc(INBUF_SIZE);
       char *luma = NULL;
       char *chroma = NULL;
       int i=0;
       uint64_t in_len;
       int pts, dts;
       struct timeval t;
       float inv_fps = 1e6/23.98;
       AVPacket avpkt;
       SDL_Rect rect;


       /* register all the codecs */
       avcodec_register_all();

       filename = argv[1];

       av_init_packet(&amp;avpkt);

       printf("Decoding file %s...\n", filename);

       /* find the H.264 video decoder */
       codec = avcodec_find_decoder(CODEC_ID_H264);
       if (!codec) {
           fprintf(stderr, "codec not found\n");
           exit(1);
       }

       c = avcodec_alloc_context3(codec);
       picture = avcodec_alloc_frame();

       c->skip_loop_filter = 48; // skiploopfilter=all

       if (avcodec_open(c, codec) &lt; 0) {
           fprintf(stderr, "could not open codec\n");
           exit(1);
       }

       /* the codec gives us the frame size, in samples */
       parser = av_parser_init(c->codec_id);
       parser->flags |= PARSER_FLAG_ONCE;

       f = fopen(filename, "rb");
       if (!f) {
           fprintf(stderr, "could not open %s\n", filename);
           exit(1);
       }

       frame = 0;
       gettimeofday(&amp;t, 0);
       if(fread(arghwtf, 1, INBUF_SIZE, f) == 0) {
       exit(1);
       }
       in_len = 80000;
           while (in_len > 0 &amp;&amp; !feof(f)) {
           len = av_parser_parse2(parser, c, &amp;avpkt.data, &amp;avpkt.size, arghwtf, in_len,
                                      pts, dts, AV_NOPTS_VALUE);

               len2 = avcodec_decode_video2(c, picture, &amp;got_picture, &amp;avpkt);
               if (len2 &lt; 0) {
                   fprintf(stderr, "Error while decoding frame %d\n", frame);
                   exit(1);
               }
               if (got_picture) {
           if(!screen) {
               video_open(c, filename);

               rect.x = 0;
               rect.y = 0;
               rect.w = c->width;
               rect.h = c->height;
               inv_fps = av_q2d(c->time_base);
               fprintf(stderr, "w:%i h:%i\n", rect.w, rect.h);

               luma = malloc(c->width*c->height);
               chroma = malloc(c->width*c->height/4);

               SDL_DisplayYUVOverlay(yuv_overlay, &amp;rect);

               signal(SIGINT, sigint_handler);
           }
                   fprintf(stderr, "\rDisplaying %c:frame %3d (%02d:%02d)...", av_get_pict_type_char(picture->pict_type), frame, frame/1440, (frame/24)%60);
                   fflush(stderr);

           SDL_LockYUVOverlay(yuv_overlay);

                   for(i=0;iheight;i++) {
                     memcpy(luma + i * c->width, picture->data[0] + i * picture->linesize[0], c->width);
                   }
           memcpy(yuv_overlay->pixels[0], luma, c->width * c->height);
                   for(i=0;iheight/2;i++) {
                     memcpy(chroma + i * c->width/2, picture->data[2] + i * picture->linesize[2], c->width/2);
                   }
           memcpy(yuv_overlay->pixels[1], chroma, c->width * c->height / 4);
                   for(i=0;iheight/2;i++) {
                     memcpy(chroma + i * c->width/2, picture->data[1] + i * picture->linesize[1], c->width/2);
                   }
           memcpy(yuv_overlay->pixels[2], chroma, c->width * c->height / 4);

           SDL_UnlockYUVOverlay(yuv_overlay);
           SDL_DisplayYUVOverlay(yuv_overlay, &amp;rect);

           while(get_time_diff(t) &lt; inv_fps) {
               sleep(1000);
           }
                   frame++;
           gettimeofday(&amp;t, 0);
               }
           memcpy(arghwtf, arghwtf + len, 80000-len);
           fread(arghwtf + 80000 - len, 1, len, f);
           }

       /* some codecs, such as MPEG, transmit the I and P frame with a
          latency of one frame. You must do the following to have a
          chance to get the last frame of the video */
       avpkt.data = NULL;
       avpkt.size = 0;
       len = avcodec_decode_video2(c, picture, &amp;got_picture, &amp;avpkt);
       if (got_picture) {
           printf("saving last frame %3d\n", frame);
           fflush(stdout);

       /* Display last frame here, same code as in the decoding loop above. */

           frame++;
       }

       fclose(f);

       avcodec_close(c);
       av_free(c);
       av_free(picture);
       printf("\n");
    }

    It gives error :

    ||=== H264_decoding, Debug ===|
    /home/yoohoo/codeblocks/Real-time video streaming/H264_decoding/main.cpp||In function ‘int main(int, char**)’:|
    /home/yoohoo/codeblocks/Real-time video streaming/H264_decoding/main.cpp|107|error: invalid conversion from ‘void*’ to ‘char*’ [-fpermissive]|
    /home/yoohoo/codeblocks/Real-time video streaming/H264_decoding/main.cpp|136|warning: ‘AVFrame* avcodec_alloc_frame()’ is deprecated (declared at /home/yoohoo/ffmpeg_build/include/libavcodec/avcodec.h:3629) [-Wdeprecated-declarations]|
    /home/yoohoo/codeblocks/Real-time video streaming/H264_decoding/main.cpp|136|warning: ‘AVFrame* avcodec_alloc_frame()’ is deprecated (declared at /home/yoohoo/ffmpeg_build/include/libavcodec/avcodec.h:3629) [-Wdeprecated-declarations]|
    /home/yoohoo/codeblocks/Real-time video streaming/H264_decoding/main.cpp|138|error: invalid conversion from ‘int’ to ‘AVDiscard’ [-fpermissive]|
    /home/yoohoo/codeblocks/Real-time video streaming/H264_decoding/main.cpp|140|error: ‘avcodec_open’ was not declared in this scope|
    /home/yoohoo/codeblocks/Real-time video streaming/H264_decoding/main.cpp|163|error: invalid conversion from ‘char*’ to ‘const uint8_t* {aka const unsigned char*}’ [-fpermissive]|
    /home/yoohoo/ffmpeg_build/include/libavcodec/avcodec.h|4465|error:   initializing argument 5 of ‘int av_parser_parse2(AVCodecParserContext*, AVCodecContext*, uint8_t**, int*, const uint8_t*, int, int64_t, int64_t, int64_t)’ [-fpermissive]|
    /home/yoohoo/codeblocks/Real-time video streaming/H264_decoding/main.cpp|181|error: invalid conversion from ‘void*’ to ‘char*’ [-fpermissive]|
    /home/yoohoo/codeblocks/Real-time video streaming/H264_decoding/main.cpp|182|error: invalid conversion from ‘void*’ to ‘char*’ [-fpermissive]|
    /home/yoohoo/codeblocks/Real-time video streaming/H264_decoding/main.cpp|188|error: ‘av_get_pict_type_char’ was not declared in this scope|
    /home/yoohoo/codeblocks/Real-time video streaming/H264_decoding/main.cpp|210|error: ‘sleep’ was not declared in this scope|
    ||=== Build finished: 9 errors, 2 warnings ===|

    I am wondering it is an incompatible problem due to I move this C program to C++, but I finally need to use it in C++. How to solve these invalid conversion problem ? Thanks in advance !

  • Cannot save animation in matplotlib : Windows permission denied

    13 mai 2015, par Vladimir

    I’ve been trying for a day long to sort this out, checking similar threads but with no success.
    Stretch’s Cannot save matplotlib animation with ffmpeg helped with previous errors (I had ffmpeg path wrong), but I kept getting Access denied after fixing it.

    My ffmpeg binary is on C:\ffmpeg\bin

    A nice alternative would be to able to export gif files, but I keep getting an ascii error with imagemagick. I think both problems are related, so I wanted to sort out the ffmpeg first.

    I think the problem might have to do with the fact I’m working with Canopy (in Windows 8 64bit), which pretty much hegemonized my path variable and broke some things along the way (e.g. I can’t open IDLE since I installed Canopy, didn’t tried to fix that yet). As I fixed things along the way I found at least 3 distinct path variables, all of which I updated : windows advanced settings path (set manually), windows console path (set via console with setx), and sys.path (set or checked at runtime), adding ";C:\ffmpeg\bin", where ffmpeg effectively is. Regardless I sort out the problem or not, I would like to learn which of these environment variables are relevant for what, I find it very confusing.

    The code is the following :

    # -*- coding: utf-8 -*-
    import sys
    import numpy as np
    from matplotlib import pyplot as plt
    from matplotlib import animation
    plt.rcParams['animation.ffmpeg_path'] = r'C:\ffmpeg\bin'
    if r'C:\ffmpeg\bin' not in sys.path: sys.path.append(r'C:\ffmpeg\bin')

    fig = plt.figure()
    ax = plt.axes(xlim=(0, 2), ylim=(-2, 2))
    line, = ax.plot([], [], lw=2)

    def init():
       line.set_data([], [])
       return line,

    def animate(i):
       x = np.linspace(0, 2, 1000)
       y = np.sin(2 * np.pi * (x - 0.01 * i))
       line.set_data(x, y)
       return line,

    anim = animation.FuncAnimation(fig, animate, init_func=init, frames=200, interval=20, blit=True)
    plt.show()

    # This case generates Windows err: Access Denied
    FFwriter = animation.FFMpegWriter()
    # anim.save(r'C:\basic_animation.mp4', writer = FFwriter, fps=30)

    # This case generates UnicodeDecodeError:'ascii' codec can't decode byte 0xa0 in position 3
    # anim.save(r'C:\animation.gif', writer='imagemagick', fps=30)

    The traceback for anim.save(r'C:\basic_animation.mp4', writer = FFwriter, fps=30) :

    %run "C:\Users\Yahveh\Documents\Vlad\Investigacion\animation saving.py"
    ---------------------------------------------------------------------------
    WindowsError                              Traceback (most recent call last)
    C:\Users\Yahveh\Documents\Vlad\Investigacion\animation saving.py in <module>()
        27 # This case generates Windows err: Access Denied
        28 FFwriter = animation.FFMpegWriter()
    ---> 29 anim.save(r'C:\basic_animation.mp4', writer = FFwriter, fps=30)
        30
        31 # This case generates UnicodeDecodeError:'ascii' codec can't decode byte 0xa0 in position 3

    C:\Users\Yahveh\AppData\Local\Enthought\Canopy\User\lib\site-packages\matplotlib\animation.pyc in save(self, filename, writer, fps, dpi, codec, bitrate, extra_args, metadata, extra_anim, savefig_kwargs)
       759         # since GUI widgets are gone. Either need to remove extra code to
       760         # allow for this non-existant use case or find a way to make it work.
    --> 761         with writer.saving(self._fig, filename, dpi):
       762             for data in zip(*[a.new_saved_frame_seq()
       763                               for a in all_anim]):

    C:\Users\Yahveh\AppData\Local\Enthought\Canopy\App\appdata\canopy-1.5.2.2785.win-x86_64\lib\contextlib.pyc in __enter__(self)
        15     def __enter__(self):
        16         try:
    ---> 17             return self.gen.next()
        18         except StopIteration:
        19             raise RuntimeError("generator didn't yield")

    C:\Users\Yahveh\AppData\Local\Enthought\Canopy\User\lib\site-packages\matplotlib\animation.pyc in saving(self, *args)
       184         '''
       185         # This particular sequence is what contextlib.contextmanager wants
    --> 186         self.setup(*args)
       187         yield
       188         self.finish()

    C:\Users\Yahveh\AppData\Local\Enthought\Canopy\User\lib\site-packages\matplotlib\animation.pyc in setup(self, fig, outfile, dpi, *args)
       174         # Run here so that grab_frame() can write the data to a pipe. This
       175         # eliminates the need for temp files.
    --> 176         self._run()
       177
       178     @contextlib.contextmanager

    C:\Users\Yahveh\AppData\Local\Enthought\Canopy\User\lib\site-packages\matplotlib\animation.pyc in _run(self)
       202                                       stdout=output, stderr=output,
       203                                       stdin=subprocess.PIPE,
    --> 204                                       creationflags=subprocess_creation_flags)
       205
       206     def finish(self):

    C:\Users\Yahveh\AppData\Local\Enthought\Canopy\App\appdata\canopy-1.5.2.2785.win-x86_64\lib\subprocess.pyc in __init__(self, args, bufsize, executable, stdin, stdout, stderr, preexec_fn, close_fds, shell, cwd, env, universal_newlines, startupinfo, creationflags)
       707                                 p2cread, p2cwrite,
       708                                 c2pread, c2pwrite,
    --> 709                                 errread, errwrite)
       710         except Exception:
       711             # Preserve original exception in case os.close raises.

    C:\Users\Yahveh\AppData\Local\Enthought\Canopy\App\appdata\canopy-1.5.2.2785.win-x86_64\lib\subprocess.pyc in _execute_child(self, args, executable, preexec_fn, close_fds, cwd, env, universal_newlines, startupinfo, creationflags, shell, to_close, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
       955                                          env,
       956                                          cwd,
    --> 957                                          startupinfo)
       958             except pywintypes.error, e:
       959                 # Translate pywintypes.error to WindowsError, which is

    WindowsError: [Error 5] Acceso denegado
    </module>

    The traceback for anim.save(r'C:\animation.gif', writer='imagemagick', fps=30) :

    In [8]: %run "C:\Users\Yahveh\Documents\Vlad\Investigacion\animation saving.py"
    ---------------------------------------------------------------------------
    UnicodeDecodeError                        Traceback (most recent call last)
    C:\Users\Yahveh\Documents\Vlad\Investigacion\animation saving.py in <module>()
        30
        31 # This case generates UnicodeDecodeError:'ascii' codec can't decode byte 0xa0 in position 3
    ---> 32 anim.save(r'C:\animation.gif', writer='imagemagick', fps=30)

    C:\Users\Yahveh\AppData\Local\Enthought\Canopy\User\lib\site-packages\matplotlib\animation.pyc in save(self, filename, writer, fps, dpi, codec, bitrate, extra_args, metadata, extra_anim, savefig_kwargs)
       765                     # TODO: Need to see if turning off blit is really necessary
       766                     anim._draw_next_frame(d, blit=False)
    --> 767                 writer.grab_frame(**savefig_kwargs)
       768
       769         # Reconnect signal for first draw if necessary

    C:\Users\Yahveh\AppData\Local\Enthought\Canopy\User\lib\site-packages\matplotlib\animation.pyc in grab_frame(self, **savefig_kwargs)
       225             verbose.report('MovieWriter -- Error '
       226                            'running proc:\n%s\n%s' % (out,
    --> 227                                                       err), level='helpful')
       228             raise
       229

    UnicodeDecodeError: 'ascii' codec can't decode byte 0xa0 in position 3: ordinal not in range(128)
    </module>

    Stared at them for a while.

    Thanks for your time !

    UPDATE : I followed the steps in this post for granting access to both C :\ffmpeg and destination folder, but no luck :(

  • Video too fast FFmpeg

    22 novembre 2012, par Spamdark

    I am having an issue again with ffmpeg, I'm a newbie with ffmpeg, and I can't find a good tutorial up to date...

    This time, when I play a video with ffmpeg, it plays too fast, ffmpeg is ignoring the FPS, I don't want to handle that with a thread sleep, because the videos have differents FPS's.

    I created a thread, there you can find the loop :

    AVPacket framepacket;

    while(av_read_frame(formatContext,&amp;framepacket)>= 0){
       pausecontrol.lock();

       // Is it a video or audio frame¿?
       if(framepacket.stream_index==gotVideoCodec){
           int framereaded;
           // Video? Ok
           avcodec_decode_video2(videoCodecContext,videoFrame,&amp;framereaded,&amp;framepacket);
           // Yeah, did we get it?
           if(framereaded &amp;&amp; doit){
               AVRational millisecondbase = {1,1000};
               int f_number = framepacket.dts;
               int f_time = av_rescale_q(framepacket.dts,formatContext->streams[gotVideoCodec]->time_base,millisecondbase);
               currentTime=f_time;
               currentFrameNumber=f_number;

               int stWidth = videoCodecContext->width;
               int stHeight = videoCodecContext->height;
               SwsContext *ctx = sws_getContext(stWidth, stHeight, videoCodecContext->pix_fmt, stWidth,
               stHeight, PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
               if(ctx!=0){
               sws_scale(ctx,videoFrame->data,videoFrame->linesize,0,videoCodecContext->height,videoFrameRGB->data,videoFrameRGB->linesize);
               QImage framecapsule=QImage(stWidth,stHeight,QImage::Format_RGB888);

               for(int y=0;ydata[0]+y*videoFrameRGB->linesize[0],stWidth*3);
               }
               emit newFrameReady(framecapsule);
               sws_freeContext(ctx);
               }

           }
       }
       if(framepacket.stream_index==gotAudioCodec){
           // Audio? Ok
       }
       pausecontrol.unlock();
       av_free_packet(&amp;framepacket);
    }

    Any Idea ?