Newest 'ffmpeg' Questions - Stack Overflow

http://stackoverflow.com/questions/tagged/ffmpeg

Les articles publiés sur le site

  • How to improve web camera streaming latency to v4l2loopback device with ffmpeg ?

    11 mars, par Made by Moses

    I'm trying to stream my iPhone camera to my PC on LAN.

    What I've done:

    1. HTTP server with html page and streaming script:

      I use WebSockets here and maybe WebRTC is better choice but it seems like network latency is good enough

    async function beginCameraStream() {
      const mediaStream = await navigator.mediaDevices.getUserMedia({
        video: { facingMode: "user" },
      });
    
      websocket = new WebSocket(SERVER_URL);
    
      websocket.onopen = () => {
        console.log("WS connected");
    
        const options = { mimeType: "video/mp4", videoBitsPerSecond: 1_000_000 };
        mediaRecorder = new MediaRecorder(mediaStream, options);
    
        mediaRecorder.ondataavailable = async (event) => {
          // to measure latency I prepend timestamp to the actual video bytes chunk
          const timestamp = Date.now();
          const timestampBuffer = new ArrayBuffer(8);
          const dataView = new DataView(timestampBuffer);
          dataView.setBigUint64(0, BigInt(timestamp), true);
          const data = await event.data.bytes();
    
          const result = new Uint8Array(data.byteLength + 8);
          result.set(new Uint8Array(timestampBuffer), 0);
          result.set(data, 8);
    
          websocket.send(result);
        };
    
        mediaRecorder.start(100); // Collect 100ms chunks
      };
    }
    
    1. Server to process video chunks

    import { serve } from "bun";
    import { Readable } from "stream";
    
    const V4L2LOOPBACK_DEVICE = "/dev/video10";
    
    export const setupFFmpeg = (v4l2device) => {
      // prettier-ignore
      return spawn("ffmpeg", [
        '-i', 'pipe:0',           // Read from stdin
        '-pix_fmt', 'yuv420p',    // Pixel format
        '-r', '30',               // Target 30 fps
        '-f', 'v4l2',             // Output format
        v4l2device, // Output to v4l2loopback device
      ]);
    };
    
    export class FfmpegStream extends Readable {
      _read() {
        // This is called when the stream wants more data
        // We push data when we get chunks
      }
    }
    
    function main() {
      const ffmpeg = setupFFmpeg(V4L2LOOPBACK_DEVICE);
      serve({
        port: 8000,
        fetch(req, server) {
          if (server.upgrade(req)) {
            return; // Upgraded to WebSocket
          }
        },
        websocket: {
          open(ws) {
            console.log("Client connected");
            const stream = new FfmpegStream();
            stream.pipe(ffmpeg?.stdin);
    
            ws.data = {
              stream,
              received: 0,
            };
          },
          async message(ws, message) {
            const view = new DataView(message.buffer, 0, 8);
            const ts = Number(view.getBigUint64(0, true));
            ws.data.received += message.byteLength;
            const chunk = new Uint8Array(message.buffer, 8, message.byteLength - 8);
    
            ws.data.stream.push(chunk);
    
            console.log(
              [
                `latency: ${Date.now() - ts} ms`,
                `chunk: ${message.byteLength}`,
                `total: ${ws.data.received}`,
              ].join(" | "),
            );
          },
        },
      });
    }
    
    main();
    

    After I try to open the v4l2loopback device

    cvlc v4l2:///dev/video10
    

    picture is delayed for at least 1.5 sec which is unacceptable for my project.

    Thoughts:

    • Problem doesn't seems to be with network latency
    latency: 140 ms | chunk: 661 Bytes | total: 661 Bytes
    latency: 206 ms | chunk: 16.76 KB | total: 17.41 KB
    latency: 141 ms | chunk: 11.28 KB | total: 28.68 KB
    latency: 141 ms | chunk: 13.05 KB | total: 41.74 KB
    latency: 199 ms | chunk: 11.39 KB | total: 53.13 KB
    latency: 141 ms | chunk: 16.94 KB | total: 70.07 KB
    latency: 139 ms | chunk: 12.67 KB | total: 82.74 KB
    latency: 142 ms | chunk: 13.14 KB | total: 95.88 KB
    

    ~150ms is actually too much for 15KB on LAN but there can some issue with my router

    • As far as I can tell it neither ties to ffmpeg throughput:
    Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'pipe:0':
      Metadata:
        major_brand     : iso5
        minor_version   : 1
        compatible_brands: isomiso5hlsf
        creation_time   : 2025-03-09T17:16:49.000000Z
      Duration: 00:00:01.38, start:
    0.000000, bitrate: N/A
        Stream #0:0(und): Video: h264 (Baseline) (avc1 / 0x31637661), yuvj420p(pc), 1280x720, 4012 kb/s, 57.14 fps, 29.83 tbr, 600 tbn, 1200 tbc (default)
        Metadata:
          rotate          : 90
          creation_time   : 2025-03-09T17:16:49.000000Z
          handler_name    : Core Media Video
        Side data:
          displaymatrix: rotation of -90.00 degrees
    
    Stream mapping:
      Stream #0:0 -> #0:0 (h264 (native) -> rawvideo (native))
    
    [swscaler @ 0x55d8d0b83100] deprecated pixel format used, make sure you did set range correctly
    
    Output #0, video4linux2,v4l2, to '/dev/video10':
      Metadata:
        major_brand     : iso5
        minor_version   : 1
        compatible_brands: isomiso5hlsf
        encoder         : Lavf58.45.100
    
    Stream #0:0(und): Video: rawvideo (I420 / 0x30323449), yuv420p, 720x1280, q=2-31, 663552 kb/s, 60 fps, 60 tbn, 60 tbc (default)
        Metadata:
          encoder         : Lavc58.91.100 rawvideo
          creation_time   : 2025-03-09T17:16:49.000000Z
          handler_name    : Core Media Video
        Side data:
          displaymatrix: rotation of -0.00 degrees
    
    frame=   99 fps=0.0 q=-0.0 size=N/A time=00:00:01.65 bitrate=N/A dup=50 drop=0 speed=2.77x
    frame=  137 fps=114 q=-0.0 size=N/A time=00:00:02.28 bitrate=N/A dup=69 drop=0 speed=1.89x
    frame=  173 fps= 98 q=-0.0 size=N/A time=00:00:02.88 bitrate=N/A dup=87 drop=0 speed=1.63x
    frame=  210 fps= 86 q=-0.0 size=N/A time=00:00:03.50 bitrate=N/A dup=105 drop=0 speed=1.44x
    frame=  249 fps= 81 q=-0.0 size=N/A time=00:00:04.15 bitrate=N/A dup=125 drop=0 speed=1.36
    frame=  279 fps= 78 q=-0.0 size=N/A time=00:00:04.65 bitrate=N/A dup=139 drop=0 speed=1.31x
    
    • I also tried to write the video stream directly to video.mp4 file and immediately open it with vlc but it only can be successfully opened after ~1.5 sec.

    • I've tried to use OBS v4l2 input source instead of vlc but the latency is the same

    Update №1

    When i try to stream actual .mp4 file to ffmpeg it works almost immediately with 0.2sec delay to spin up the ffmpeg itself:

    cat video.mp4 | ffmpeg -re -i pipe:0 -pix_fmt yuv420p -f v4l2 /dev/video10 & ; sleep 0.2 && cvlc v4l2:///dev/video10
    

    So the problem is apparently with streaming process

  • Is it possible to redirect the messages libffmpeg prints to stderr ?

    11 mars, par wallefan

    I'm writing a program that uses libffmpeg to do some transcoding internally, but I find it has a tendency to print a lot of things to the process's stderr. I want to redirect those messages somewhere more intelligent.

    To be clear, I am NOT talking about spawning the ffmpeg executable as a subprocess and redirecting its stdio! That is trivial in any programming language. I'm using ffmpeg as a C library (well, Rust bindings around a C library in my case) and I'm wondering if libffmpeg provides a hook for writing to stderr that I can override.

    If no such hook exists, would my best bet be to fork a child process with stdio redirection and only use libffmpeg from that subprocess? That feels really hacky to me, especially since it would be really nice to not have to send the results libffmpeg spits out across a process boundary.

  • av_log() function in ffmpeg to output to a file

    11 mars, par TilakVarisetty

    I am logging time-stamps, and frames using av_log() to the console. Unfortunately, the console does not pipe to a file.

    Is there a way to add function or other options to log to the file directly, instead of the console?

  • Linker error by trying example program with ffmpeg

    11 mars, par Chris

    I think this might be a stupid question and I'm just blind but that thing is driving me nuts for hours now. I downloaded ffmpeg and build it. Now I want to try the thing out in a program but I can't setup cmake to link ffmpeg properly and have no idea what is wrong.

    The linker always tells me this:

    christoph@christoph-ThinkPad-T490:~/develop/ffmpg_example/build$ make
    [ 50%] Linking CXX executable test
    CMakeFiles/test.dir/main.cxx.o: In function `main':
    main.cxx:(.text+0x180): undefined reference to `av_register_all()'
    main.cxx:(.text+0x1a7): undefined reference to `avformat_open_input(AVFormatContext**, char const*, AVInputFormat*, AVDictionary**)'
    main.cxx:(.text+0x1ce): undefined reference to `avformat_find_stream_info(AVFormatContext*, AVDictionary**)'
    main.cxx:(.text+0x206): undefined reference to `av_dump_format(AVFormatContext*, int, char const*, int)'
    main.cxx:(.text+0x2bb): undefined reference to `avcodec_find_decoder(AVCodecID)'
    main.cxx:(.text+0x2fc): undefined reference to `avcodec_alloc_context3(AVCodec const*)'
    main.cxx:(.text+0x316): undefined reference to `avcodec_copy_context(AVCodecContext*, AVCodecContext const*)'
    main.cxx:(.text+0x361): undefined reference to `avcodec_open2(AVCodecContext*, AVCodec const*, AVDictionary**)'
    main.cxx:(.text+0x377): undefined reference to `av_frame_alloc()'
    main.cxx:(.text+0x383): undefined reference to `av_frame_alloc()'
    main.cxx:(.text+0x3ba): undefined reference to `avpicture_get_size(AVPixelFormat, int, int)'
    main.cxx:(.text+0x3d0): undefined reference to `av_malloc(unsigned long)'
    main.cxx:(.text+0x3ff): undefined reference to `avpicture_fill(AVPicture*, unsigned char const*, AVPixelFormat, int, int)'
    main.cxx:(.text+0x43d): undefined reference to `sws_getContext(int, int, AVPixelFormat, int, int, AVPixelFormat, int, SwsFilter*, SwsFilter*, double const*)'
    main.cxx:(.text+0x465): undefined reference to `av_read_frame(AVFormatContext*, AVPacket*)'
    main.cxx:(.text+0x49f): undefined reference to `avcodec_decode_video2(AVCodecContext*, AVFrame*, int*, AVPacket const*)'
    main.cxx:(.text+0x4fd): undefined reference to `sws_scale(SwsContext*, unsigned char const* const*, int const*, int, int, unsigned char* const*, int const*)'
    main.cxx:(.text+0x545): undefined reference to `av_free_packet(AVPacket*)'
    main.cxx:(.text+0x556): undefined reference to `av_free(void*)'
    main.cxx:(.text+0x565): undefined reference to `av_frame_free(AVFrame**)'
    main.cxx:(.text+0x574): undefined reference to `av_frame_free(AVFrame**)'
    main.cxx:(.text+0x580): undefined reference to `avcodec_close(AVCodecContext*)'
    main.cxx:(.text+0x58f): undefined reference to `avcodec_close(AVCodecContext*)'
    main.cxx:(.text+0x59e): undefined reference to `avformat_close_input(AVFormatContext**)'
    collect2: error: ld returned 1 exit status
    CMakeFiles/test.dir/build.make:87: recipe for target 'test' failed
    make[2]: *** [test] Error 1
    CMakeFiles/Makefile2:75: recipe for target 'CMakeFiles/test.dir/all' failed
    make[1]: *** [CMakeFiles/test.dir/all] Error 2
    Makefile:83: recipe for target 'all' failed
    make: *** [all] Error 2
    

    The cmake list looks like this:

    cmake_minimum_required(VERSION 3.16)
    
    project(ffmpeg_test)
    
    add_library(avformat STATIC IMPORTED)
    set_target_properties(avformat
        PROPERTIES IMPORTED_LOCATION /home/christoph/develop/FFmpeg/build/lib/libavformat.a
    )
    add_library(avcodec STATIC IMPORTED)
    set_target_properties(avcodec
        PROPERTIES IMPORTED_LOCATION /home/christoph/develop/FFmpeg/build/lib/libavcodec.a
    )
    add_library(swscale STATIC IMPORTED)
    set_target_properties(swscale
        PROPERTIES IMPORTED_LOCATION /home/christoph/develop/FFmpeg/build/lib/libswscale.a
    )
    add_library(avutil STATIC IMPORTED)
    set_target_properties(avutil
        PROPERTIES IMPORTED_LOCATION /home/christoph/develop/FFmpeg/build/lib/libavutil.a
    )
    add_executable(test main.cxx)
    
    target_link_libraries(test PRIVATE
        /home/christoph/develop/FFmpeg/build/lib/libavformat.a
        avcodec
        swscale
        avutil
    )
    target_include_directories(test PRIVATE /home/christoph/develop/FFmpeg/build/include)
    

    And here are the ffmpeg libs:

    christoph@christoph-ThinkPad-T490:~/develop/FFmpeg/build/lib$ ll
    total 277840
    drwxr-xr-x  3 christoph christoph      4096 Dez  7 23:59 ./
    drwxr-xr-x 17 christoph christoph      4096 Dez  7 23:59 ../
    -rw-r--r--  1 christoph christoph 173479270 Dez  7 23:59 libavcodec.a
    -rw-r--r--  1 christoph christoph   2174910 Dez  7 23:59 libavdevice.a
    -rw-r--r--  1 christoph christoph  37992438 Dez  7 23:59 libavfilter.a
    -rw-r--r--  1 christoph christoph  59222040 Dez  7 23:59 libavformat.a
    -rw-r--r--  1 christoph christoph   4759514 Dez  7 23:59 libavutil.a
    -rw-r--r--  1 christoph christoph    695698 Dez  7 23:59 libswresample.a
    -rw-r--r--  1 christoph christoph   6164398 Dez  7 23:59 libswscale.a
    drwxr-xr-x  2 christoph christoph      4096 Dez  7 23:59 pkgconfig/
    

    And this is the example code:

    #include
    
    #include avcodec.h>
    #include avformat.h>
    #include swscale.h>
    
    // compatibility with newer API
    #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55,28,1)
    #define av_frame_alloc avcodec_alloc_frame
    #define av_frame_free avcodec_free_frame
    #endif
    
    void SaveFrame(AVFrame *pFrame, int width, int height, int iFrame) {
      FILE *pFile;
      char szFilename[32];
      int  y;
      
      // Open file
      sprintf(szFilename, "frame%d.ppm", iFrame);
      pFile=fopen(szFilename, "wb");
      if(pFile==NULL)
        return;
      
      // Write header
      fprintf(pFile, "P6\n%d %d\n255\n", width, height);
      
      // Write pixel data
      for(y=0; ydata[0]+y*pFrame->linesize[0], 1, width*3, pFile);
      
      // Close file
      fclose(pFile);
    }
    
    int main(int argc, char *argv[]) {
      // Initalizing these to NULL prevents segfaults!
      AVFormatContext   *pFormatCtx = NULL;
      int               i, videoStream;
      AVCodecContext    *pCodecCtxOrig = NULL;
      AVCodecContext    *pCodecCtx = NULL;
      AVCodec           *pCodec = NULL;
      AVFrame           *pFrame = NULL;
      AVFrame           *pFrameRGB = NULL;
      AVPacket          packet;
      int               frameFinished;
      int               numBytes;
      uint8_t           *buffer = NULL;
      struct SwsContext *sws_ctx = NULL;
    
      if(argc < 2) {
        printf("Please provide a movie file\n");
        return -1;
      }
      // Register all formats and codecs
      av_register_all();
      
      // Open video file
      if(avformat_open_input(&pFormatCtx, argv[1], NULL, NULL)!=0)
        return -1; // Couldn't open file
      
      // Retrieve stream information
      if(avformat_find_stream_info(pFormatCtx, NULL)<0)
        return -1; // Couldn't find stream information
      
      // Dump information about file onto standard error
      av_dump_format(pFormatCtx, 0, argv[1], 0);
      
      // Find the first video stream
      videoStream=-1;
      for(i=0; inb_streams; i++)
        if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
          videoStream=i;
          break;
        }
      if(videoStream==-1)
        return -1; // Didn't find a video stream
      
      // Get a pointer to the codec context for the video stream
      pCodecCtxOrig=pFormatCtx->streams[videoStream]->codec;
      // Find the decoder for the video stream
      pCodec=avcodec_find_decoder(pCodecCtxOrig->codec_id);
      if(pCodec==NULL) {
        fprintf(stderr, "Unsupported codec!\n");
        return -1; // Codec not found
      }
      // Copy context
      pCodecCtx = avcodec_alloc_context3(pCodec);
      if(avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) {
        fprintf(stderr, "Couldn't copy codec context");
        return -1; // Error copying codec context
      }
    
      // Open codec
      if(avcodec_open2(pCodecCtx, pCodec, NULL)<0)
        return -1; // Could not open codec
      
      // Allocate video frame
      pFrame=av_frame_alloc();
      
      // Allocate an AVFrame structure
      pFrameRGB=av_frame_alloc();
      if(pFrameRGB==NULL)
        return -1;
    
      // Determine required buffer size and allocate buffer
      numBytes=avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width,
                      pCodecCtx->height);
      buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
      
      // Assign appropriate parts of buffer to image planes in pFrameRGB
      // Note that pFrameRGB is an AVFrame, but AVFrame is a superset
      // of AVPicture
      avpicture_fill((AVPicture *)pFrameRGB, buffer, AV_PIX_FMT_RGB24,
             pCodecCtx->width, pCodecCtx->height);
      
      // initialize SWS context for software scaling
      sws_ctx = sws_getContext(pCodecCtx->width,
                   pCodecCtx->height,
                   pCodecCtx->pix_fmt,
                   pCodecCtx->width,
                   pCodecCtx->height,
                   AV_PIX_FMT_RGB24,
                   SWS_BILINEAR,
                   NULL,
                   NULL,
                   NULL
                   );
    
      // Read frames and save first five frames to disk
      i=0;
      while(av_read_frame(pFormatCtx, &packet)>=0) {
        // Is this a packet from the video stream?
        if(packet.stream_index==videoStream) {
          // Decode video frame
          avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
          
          // Did we get a video frame?
          if(frameFinished) {
        // Convert the image from its native format to RGB
        sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
              pFrame->linesize, 0, pCodecCtx->height,
              pFrameRGB->data, pFrameRGB->linesize);
        
        // Save the frame to disk
        if(++i<=5)
          SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, 
                i);
          }
        }
        
        // Free the packet that was allocated by av_read_frame
        av_free_packet(&packet);
      }
      
      // Free the RGB image
      av_free(buffer);
      av_frame_free(&pFrameRGB);
      
      // Free the YUV frame
      av_frame_free(&pFrame);
      
      // Close the codecs
      avcodec_close(pCodecCtx);
      avcodec_close(pCodecCtxOrig);
    
      // Close the video file
      avformat_close_input(&pFormatCtx);
      
      return 0;
    }
    
  • FFMPEG compile with x264 support for Android

    10 mars, par Pecana

    I am trying to build ffmpeg extensions for media3 (ExoPlayer) for Android

    https://github.com/androidx/media/tree/release/libraries/decoder_ffmpeg

    Using the default settings everything is fine but if I try to add support to libx264 the build failed. I compiled x264 for Android locally and it worked so the .so file for arm64 is present, I added it to the pkg-config with :

    export PKG_CONFIG_PATH=/build/x264/arm64/lib/pkgconfig:$PKG_CONFIG_PATH

    but when I try to build ffmpeg with the following command it fails :

    ./configure \
    --prefix=/build/ffmpeg \
    --enable-gpl \
    --enable-libx264 \
    --enable-static \
    --enable-pic \
    --arch=arm64 \
    --target-os=android \
    --cross-prefix=$TOOLCHAIN/bin/aarch64-linux-android21- \
    --sysroot=$SYSROOT
    

    Error : ERROR: x264 not found using pkg-config

    But it is not due to pkg-config as the command:

    pkg-config --cflags --libs x264
    

    reports : -DX264_API_IMPORTS -I/build/x264/arm64/include -L/build/x264/arm64/lib -lx264

    Any idea on how to fix it ?

    Thank you :-)