Recherche avancée

Médias (2)

Mot : - Tags -/kml

Autres articles (15)

  • Creating farms of unique websites

    13 avril 2011, par

    MediaSPIP platforms can be installed as a farm, with a single "core" hosted on a dedicated server and used by multiple websites.
    This allows (among other things) : implementation costs to be shared between several different projects / individuals rapid deployment of multiple unique sites creation of groups of like-minded sites, making it possible to browse media in a more controlled and selective environment than the major "open" (...)

  • Les autorisations surchargées par les plugins

    27 avril 2010, par

    Mediaspip core
    autoriser_auteur_modifier() afin que les visiteurs soient capables de modifier leurs informations sur la page d’auteurs

  • Gestion des droits de création et d’édition des objets

    8 février 2011, par

    Par défaut, beaucoup de fonctionnalités sont limitées aux administrateurs mais restent configurables indépendamment pour modifier leur statut minimal d’utilisation notamment : la rédaction de contenus sur le site modifiables dans la gestion des templates de formulaires ; l’ajout de notes aux articles ; l’ajout de légendes et d’annotations sur les images ;

Sur d’autres sites (3910)

  • EXC_BAD_ACCESS at avformat_find_stream_info in FFMPEG

    27 décembre 2018, par Mubin Mall

    I achieved IP Camera streaming using ffmpeg with the help of this library : https://github.com/kolyvan/kxmovie also I am recording IPCamera streaming coming and same is done using ffmpeg.

    Now I am facing one issue as soon as I add R5ProStreaming.framework in project and run application in real device application is getting crashed at if (avformat_find_stream_info(formatCtx, NULL) < 0) here.
    And when I remove that framework and move it to trash and run again then all working fine.

    - (kxMovieError) openInput: (NSString *) path
    {
       AVFormatContext *formatCtx = NULL;

       if (_interruptCallback) {

           formatCtx = avformat_alloc_context();
           if (!formatCtx)
               return kxMovieErrorOpenFile;

           AVIOInterruptCB cb = {interrupt_callback, (__bridge void *)(self)};
           formatCtx->interrupt_callback = cb;
       }
       AVDictionary *opts = 0;
    //
       av_dict_set(&opts, "rtsp_transport", "tcp", 0);

       if (avformat_open_input(&formatCtx, [path cStringUsingEncoding: NSUTF8StringEncoding], NULL,  &opts) < 0) {
           av_log(NULL, AV_LOG_ERROR, "Couldn't open file\n");
           return kxMovieErrorStreamInfoNotFound;


       }
           if (avformat_open_input(&formatCtx, [path cStringUsingEncoding: NSUTF8StringEncoding], NULL, NULL) < 0) {

           if (formatCtx)
               avformat_free_context(formatCtx);
           return kxMovieErrorOpenFile;
       }

       //-----APP IS GETTING CRASHED HERE AND GIVING EXC_BAD_ACCESS---//
       if (avformat_find_stream_info(formatCtx, NULL) < 0)
       {
           avformat_close_input(&formatCtx);
           return kxMovieErrorStreamInfoNotFound;
       }
    //
       av_dump_format(formatCtx, 0, [path.lastPathComponent cStringUsingEncoding: NSUTF8StringEncoding], false);

       _formatCtx = formatCtx;

       inputFormatCtx = _formatCtx;
       NSString *filePath = [[NSBundle mainBundle] pathForResource:@"newdemo" ofType:@".mov"];

       if (filePath)
       {
           NSLog(@"%s - %d # File found", __PRETTY_FUNCTION__, __LINE__);
       }
       else
       {
           NSLog(@"%s - %d # File NOT found", __PRETTY_FUNCTION__, __LINE__);
       }

       /*
        *  av_find_input_format(const char *short_name)
        *
        *  Find AVInputFormat based on the short name of the input format.
        */
       AVInputFormat *inputFormat = av_find_input_format([@"mpeg" UTF8String]);

       if (inputFormat)
       {
           NSLog(@"%s - %d # inputFormat identifed", __PRETTY_FUNCTION__, __LINE__);
       }
       else
       {
           NSLog(@"%s - %d # inputFormat NOT identifed", __PRETTY_FUNCTION__, __LINE__) ;
       }

       const char *utf8FilePath = [filePath UTF8String];
       NSLog(@"%s - %d # utf8FilePath = %s", __PRETTY_FUNCTION__, __LINE__, utf8FilePath);

       /*
        *  avformat_open_input(AVFormatContext **ps, const char *filename, AVInputFormat *fmt, AVDictionary **options)
        *
        *  Open an input stream and read the header. The codecs are not opened.
        */
       int openInputValue =0;
       NSLog(@"%s - %d # openInputValue = %d", __PRETTY_FUNCTION__, __LINE__, openInputValue);

       if (openInputValue == 0)
       {
           NSLog(@"%s - %d # Can open the file", __PRETTY_FUNCTION__, __LINE__);
       }
       else
       {
           NSLog(@"%s - %d # Cannot open the file", __PRETTY_FUNCTION__, __LINE__);
           avformat_close_input(&inputFormatCtx);
       }

       /*
        *  Read packets of a media file to get stream information.
        *
        *  avformat_find_stream_info(AVFormatContext *ic, AVDictionary **options)
        */
       //    int streamInfoValue = avformat_find_stream_info(inputFormatCtx, NULL);
       //    NSLog(@"%s - %d # streamInfoValue = %d", __PRETTY_FUNCTION__, __LINE__, streamInfoValue);
       //
       //    if (streamInfoValue < 0)
       //    {
       //        NSLog(@"%s - %d # streamInfoValue Error", __PRETTY_FUNCTION__, __LINE__);
       //        avformat_close_input(&inputFormatCtx);
       //    }

       /*
        *  nb_streams : Number of Audio and Video streams of the input file
        */

       NSUInteger inputStreamCount = inputFormatCtx->nb_streams;
       NSLog(@"%s - %d # inputStreamCount = %lu", __PRETTY_FUNCTION__, __LINE__, (unsigned long)inputStreamCount);

       for(unsigned int i = 0; istreams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
           {
               NSLog(@"%s - %d # Found Video Stream", __PRETTY_FUNCTION__, __LINE__);
               inputVideoStreamIndex = i;
               inputVideoStream = inputFormatCtx->streams[i];
           }

           if(inputFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
           {
               NSLog(@"%s - %d # Found Audio Stream", __PRETTY_FUNCTION__, __LINE__);
               inputAudioStreamIndex = i;
               inputAudioStream = inputFormatCtx->streams[i];
           }
       }

       if(inputVideoStreamIndex == -1 && inputAudioStreamIndex == -1)
       {
           NSLog(@"%s - %d # Have not found any Video or Audio stream", __PRETTY_FUNCTION__, __LINE__);
       }

       /*
        *  Finding duration of the stream
        */
       if(inputFormatCtx->duration == AV_NOPTS_VALUE)
       {
           NSLog(@"%s - %d # Undefined timestamp value", __PRETTY_FUNCTION__, __LINE__);

           if(_videoStream != -1 && inputFormatCtx->streams[_videoStream])
           {
               //            if(inputFormatCtx->streams[_videoStream]->duration != AV_NOPTS_VALUE)
               //            {
               inputEndtimeInt64 = (inputFormatCtx->streams[_videoStream]->duration)/(inputFormatCtx->streams[_videoStream]->time_base.den/inputFormatCtx->streams[_videoStream]->time_base.num);
               //            }
               //            else
               //            {
               //                inputEndtimeInt64 = (inputFormatCtx->duration)/(AV_TIME_BASE);
               //
               //            }
           }
           else if(_audioStream != -1 && inputFormatCtx->streams[_audioStream])
           {
               //            if(inputFormatCtx->streams[_audioStream]->duration != AV_NOPTS_VALUE)
               //            {
               inputEndtimeInt64 = (inputFormatCtx->streams[_audioStream]->duration)/(AV_TIME_BASE);
               //            }
               //            else
               //            {
               //                inputEndtimeInt64 = (inputFormatCtx->duration)/(AV_TIME_BASE);
               //
               //            }
           }
       }
       else
       {
           NSLog(@"%s - %d # Defined timestamp value", __PRETTY_FUNCTION__, __LINE__);

           inputEndtimeInt64 = (inputFormatCtx->duration)/(AV_TIME_BASE);
       }
       NSLog(@"%s - %d # inputEndtimeInt64 = %lld", __PRETTY_FUNCTION__, __LINE__, inputEndtimeInt64);

       /*
        *  Finding out the frame rate
        */
       if(_videoStream != -1 && inputFormatCtx->streams[_videoStream])
       {

           framesPerSec =  (inputFormatCtx->streams[_videoStream]->r_frame_rate.num)/ (inputFormatCtx->streams[_videoStream]->r_frame_rate.den);
       }
       else
       {
           framesPerSec = 24;
       }

       numberOfFrames = framesPerSec * (int) inputEndtimeInt64;
       NSLog(@"%s - %d # numberOfFrames = %d", __PRETTY_FUNCTION__, __LINE__, numberOfFrames);


       /*
        *  Seek to timestamp ts.
        *
        *  avformat_seek_file(AVFormatContext *s, int stream_index, int64_t min_ts, int64_t ts, int64_t max_ts, int flags)
        */

       if(avformat_seek_file(inputFormatCtx, inputAudioStreamIndex, INT64_MIN, outputStartTimeInt64, INT64_MAX, AVSEEK_FLAG_FRAME) < 0)
       {
           NSLog(@"%s - %d # Seek OK", __PRETTY_FUNCTION__, __LINE__);
       }
       else
       {
           NSLog(@"%s - %d # Seek ERROR", __PRETTY_FUNCTION__, __LINE__);
       }

       /*
        *  Creating output file path1
        */

    //    NSString * timestamp = [NSString stringWithFormat:@"%f",[[NSDate date] timeIntervalSince1970] * 1000];
    //    
    //    NSArray *directoryPathsArray = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    //    NSString *documentsDirectory = [directoryPathsArray objectAtIndex:0];
    //    NSString *outputFilePath = [NSString stringWithFormat:@"%@/%@.mov",documentsDirectory,timestamp]; // Not working if we replace .avi with .mp4

       NSString* filename = [NSString stringWithFormat:@"IPCamera%d.mov", _currentFile];
       NSString* outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
       _url = [NSURL fileURLWithPath:outputFilePath];

       /*
        *  Return the output format in the list of registered output formats
        *  which best matches the provided parameters, or return NULL if
        *  there is no match.
        *
        *  av_guess_format(const char *short_name, const char *filename, const char *mime_type)
        */
       outputFormat = av_guess_format(NULL, [outputFilePath UTF8String], NULL);

       NSLog(@"%s - %d # outputFormat->name = %s", __PRETTY_FUNCTION__, __LINE__, outputFormat->name);

       if(outputFormat == NULL)
       {
           NSLog(@"%s - %d # outputFormat == NULL", __PRETTY_FUNCTION__, __LINE__);
       }
       else
       {
           /*
            *  Allocate an AVFormatContext.
            */
           outputContext = avformat_alloc_context();

           if(outputContext)
           {
               outputContext->oformat = outputFormat;      // The output container format.

               snprintf(outputContext->filename, sizeof(outputContext->filename), "%s", [outputFilePath UTF8String]);
           }
           else
           {
               NSLog(@"%s - %d # outputContext == NULL", __PRETTY_FUNCTION__, __LINE__);
           }
       }

       outputVideoCodec = outputAudioCodec = NULL;

       /*
        *  video_codec = default video codec
        */
       if(outputFormat->video_codec != AV_CODEC_ID_NONE && inputVideoStream != NULL)
       {
           /*
            *  Find a registered encoder with a matching codec ID.
            *
            *  avcodec_find_encoder(enum AVCodecID id)
            */
           outputVideoCodec = avcodec_find_encoder(outputFormat->video_codec);

           if(NULL == outputVideoCodec)
           {
               NSLog(@"%s - %d # Could Not Find Vid Encoder", __PRETTY_FUNCTION__, __LINE__);
           }
           else
           {
               NSLog(@"%s - %d # Found Out Vid Encoder", __PRETTY_FUNCTION__, __LINE__);

               /*
                *  Add a new stream to a media file.
                *
                *  avformat_new_stream(AVFormatContext *s, const AVCodec *c)
                */
               outputVideoStream = avformat_new_stream(outputContext, outputVideoCodec);

               if(NULL == outputVideoStream)
               {
                   NSLog(@"%s - %d # Failed to Allocate Output Vid Strm", __PRETTY_FUNCTION__, __LINE__);
               }
               else
               {
                   NSLog(@"%s - %d # Allocated Video Stream", __PRETTY_FUNCTION__, __LINE__);

                   /*
                    *  Copy the settings of the source AVCodecContext into the destination AVCodecContext.
                    *
                    *  avcodec_copy_context(AVCodecContext *dest, const AVCodecContext *src)
                    */

                   if(avcodec_copy_context(outputVideoStream->codec, inputFormatCtx->streams[inputVideoStreamIndex]->codec) != 0)
                   {
                       NSLog(@"%s - %d # Failed to Copy Context", __PRETTY_FUNCTION__, __LINE__);
                   }
                   else
                   {
                       AVStream *st = _formatCtx->streams[_videoStream];
                       outputVideoStream->sample_aspect_ratio.den = outputVideoStream->codec->sample_aspect_ratio.den;     // denominator
                       outputVideoStream->sample_aspect_ratio.num = st->codec->sample_aspect_ratio.num;    // numerator
                       NSLog(@"%s - %d # Copied Context 1", __PRETTY_FUNCTION__, __LINE__);
                       outputVideoStream->codec->codec_id = st->codec->codec_id;
                       outputVideoStream->codec->time_base.num = st->codec->time_base.num;
                       outputVideoStream->codec->time_base.den = STREAM_FRAME_RATE;
                       outputVideoStream->time_base.num = st->time_base.num;
                       outputVideoStream->time_base.den =  st->time_base.den;
                       outputVideoStream->r_frame_rate.num =st->r_frame_rate.num;
                       outputVideoStream->nb_frames = STREAM_NB_FRAMES;
                       outputVideoStream->r_frame_rate.den = st->r_frame_rate.den;
                       outputVideoStream->avg_frame_rate.den = st->avg_frame_rate.num;
                       outputVideoStream->avg_frame_rate.num = st->avg_frame_rate.num;
    //                  outputVideoStream->duration = st->duration;
                   }
               }
           }
       }

       if(outputFormat->audio_codec != AV_CODEC_ID_NONE && inputAudioStream != NULL)
       {
           outputAudioCodec = avcodec_find_encoder(outputFormat->audio_codec);

           if(NULL == outputAudioCodec)
           {
               NSLog(@"%s - %d # Could Not Find Out Aud Encoder", __PRETTY_FUNCTION__, __LINE__);
           }
           else
           {
               NSLog(@"%s - %d # Found Out Aud Encoder", __PRETTY_FUNCTION__, __LINE__);

               outputAudioStream = avformat_new_stream(outputContext, outputAudioCodec);

               if(NULL == outputAudioStream)
               {
                   NSLog(@"%s - %d # Failed to Allocate Out Vid Strm", __PRETTY_FUNCTION__, __LINE__);
               }
               else
               {
                   if(avcodec_copy_context(outputAudioStream->codec, inputFormatCtx->streams[inputAudioStreamIndex]->codec) != 0)
                   {
                       NSLog(@"%s - %d # Failed to Copy Context", __PRETTY_FUNCTION__, __LINE__);
                   }
                   else
                   {
                       //                    AVStream *st = _formatCtx->streams[_audioStream];

                       NSLog(@"%s - %d # Copied Context 2", __PRETTY_FUNCTION__, __LINE__);
                       outputAudioStream->codec->codec_id = inputAudioStream->codec->codec_id;
                       outputAudioStream->codec->codec_tag = 0;
                       //                    outputAudioStream->pts = inputAudioStream->pts;
    //                    outputAudioStream->duration = inputAudioStream->duration;
                       outputAudioStream->time_base.num = inputAudioStream->time_base.num;
                       outputAudioStream->time_base.den = inputAudioStream->time_base.den;
                   }
               }
           }
       }

       if (!(outputFormat->flags & AVFMT_NOFILE))
       {
           /*
            *  Create and initialize a AVIOContext for accessing the resource indicated by url.
            *
            *  avio_open2(AVIOContext **s, const char *url, int flags, const AVIOInterruptCB *int_cb, AVDictionary **options)
            */
           if (avio_open2(&outputContext->pb, [outputFilePath UTF8String], AVIO_FLAG_WRITE, NULL, NULL) < 0)
           {
               NSLog(@"%s - %d # Could Not Open File", __PRETTY_FUNCTION__, __LINE__);
           }
       }

       /* Write the stream header, if any. */
       /*
        *  Allocate the stream private data and write the stream header to an output media file.
        *
        *  avformat_write_header(AVFormatContext *s, AVDictionary **options);
        */
       if (avformat_write_header(outputContext, NULL) < 0)
       {
           NSLog(@"%s - %d # Error Occurred While Writing Header", __PRETTY_FUNCTION__, __LINE__);
       }
       else
       {
           NSLog(@"%s - %d # Written Output header", __PRETTY_FUNCTION__, __LINE__);

           initDone = true;
       }

       return kxMovieErrorNone;
    }

    More over I contacted Red5Pro team and asked them regarding that along with video demo. They replied me like this

    What’s most likely happening is that the version of FFMPEG that’s being loaded by that project is incompatible with the customized version of it that’s embedded in our SDK, and some duplicate definition error is causing the wrong version to be loaded. It could also be any number of conflicts between one of the libraries in that project with the SDK, or one of the support libraries that the sdk requires ( I have to assume that since it compiled, that you did add the libraries listed in step four here : https://www.red5pro.com/docs/streaming/ios.html#project-setup ) and if that’s the case, I don’t know of a good way to correct the issue as chasing down individual third-party libraries that raise incompatibilities with our SDKs to correct them is beyond the reach of our team. (edited)

    Can anybody have idea where to look ?

    Thanks

  • How to split a mp4 file into multiple .m4s chunks with one init.mp4 file

    29 novembre 2018, par JAVA Coder

    I am trying to split one audio.mp4 into multiple chunks(each of 10 seconds) with file format of .m4s or .mpd and one init.mp4 .

    1>I have successfully made chunks of 10 secs with one m3u8 file by this command-

    ffmpeg -i song.mp3 -c:a aac -b:a 64k -vn -hls_time 10 -hls_list_size 0 abc.m3u8

    2>Tried this command also

    MP4Box -dash 10000 -frag 1000 -rap -segment-name myDash -subsegs-per-sidx 5 -url-template test.mp4

    But not able to make .m4s chunks with one init.mp4.

  • OpenCV's VideoCapture::open Video Source Dialog

    13 novembre 2015, par swtdrgn

    In my current project, when I call VideoCapture::open(camera device index) and the camera is in used by another program, it shows a Video Source dialog and returns true when I select a device that is already in use.

    However, in my [previous] experiment project, when I called VideoCapture::open(camera device index), it doesn’t show this dialog.

    I want to know what is causing the Video Source dialog to show and the program to behave differently from the experimental project.

    This is the source code to the experiment project :

    int main (int argc, char *argv[])
    {

       //vars
       time_duration td, td1;
       ptime nextFrameTimestamp, currentFrameTimestamp, initialLoopTimestamp, finalLoopTimestamp;
       int delayFound = 0;
       int totalDelay= 0;

       // initialize capture on default source
       VideoCapture capture;
       std::cout << "capture.open(0): " << capture.open(0) << std::endl;
       std::cout << "NOOO" << std::endl;
       namedWindow("video", 1);

       // set framerate to record and capture at
       int framerate = 15;

       // Get the properties from the camera
       double width = capture.get(CV_CAP_PROP_FRAME_WIDTH);
       double height = capture.get(CV_CAP_PROP_FRAME_HEIGHT);

       // print camera frame size
       //cout << "Camera properties\n";
       //cout << "width = " << width << endl <<"height = "<< height << endl;

       // Create a matrix to keep the retrieved frame
       Mat frame;

       // Create the video writer
       VideoWriter video("capture.avi",0, framerate, cvSize((int)width,(int)height) );

       // initialize initial timestamps
       nextFrameTimestamp = microsec_clock::local_time();
       currentFrameTimestamp = nextFrameTimestamp;
       td = (currentFrameTimestamp - nextFrameTimestamp);

       // start thread to begin capture and populate Mat frame
       boost::thread captureThread(captureFunc, &frame, &capture);
       // loop infinitely
       for(bool q=true;q;)
       {
           if(frame.empty()){continue;}
           //if(cvWaitKey( 5 ) == 'q'){ q=false; }
           // wait for X microseconds until 1second/framerate time has passed after previous frame write
           while(td.total_microseconds() < 1000000/framerate){
               //determine current elapsed time
               currentFrameTimestamp = microsec_clock::local_time();
               td = (currentFrameTimestamp - nextFrameTimestamp);
               if(cvWaitKey( 5 ) == 'q'){
                   std::cout << "B" << std::endl;
                   q=false;
                   boost::posix_time::time_duration timeout = boost::posix_time::milliseconds(0);
                   captureThread.timed_join(timeout);
                   break;
               }
           }

           // determine time at start of write
           initialLoopTimestamp = microsec_clock::local_time();

           // Save frame to video
           video << frame;
           imshow("video", frame);

           //write previous and current frame timestamp to console
           cout << nextFrameTimestamp << " " << currentFrameTimestamp << " ";

           // add 1second/framerate time for next loop pause
           nextFrameTimestamp = nextFrameTimestamp + microsec(1000000/framerate);

           // reset time_duration so while loop engages
           td = (currentFrameTimestamp - nextFrameTimestamp);

           //determine and print out delay in ms, should be less than 1000/FPS
           //occasionally, if delay is larger than said value, correction will occur
           //if delay is consistently larger than said value, then CPU is not powerful
           // enough to capture/decompress/record/compress that fast.
           finalLoopTimestamp = microsec_clock::local_time();
           td1 = (finalLoopTimestamp - initialLoopTimestamp);
           delayFound = td1.total_milliseconds();
           cout << delayFound << endl;

           //output will be in following format
           //[TIMESTAMP OF PREVIOUS FRAME] [TIMESTAMP OF NEW FRAME] [TIME DELAY OF WRITING]
           if(!q || cvWaitKey( 5 ) == 'q'){
               std::cout << "C" << std::endl;
               q=false;
               boost::posix_time::time_duration timeout = boost::posix_time::milliseconds(0);
               captureThread.timed_join(timeout);
               break;
           }
       }

       // Exit
       return 0;
    }

    Video Source Dialog