
Recherche avancée
Autres articles (44)
-
Supporting all media types
13 avril 2011, parUnlike most software and media-sharing platforms, MediaSPIP aims to manage as many different media types as possible. The following are just a few examples from an ever-expanding list of supported formats : images : png, gif, jpg, bmp and more audio : MP3, Ogg, Wav and more video : AVI, MP4, OGV, mpg, mov, wmv and more text, code and other data : OpenOffice, Microsoft Office (Word, PowerPoint, Excel), web (html, CSS), LaTeX, Google Earth and (...)
-
MediaSPIP v0.2
21 juin 2013, parMediaSPIP 0.2 est la première version de MediaSPIP stable.
Sa date de sortie officielle est le 21 juin 2013 et est annoncée ici.
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Comme pour la version précédente, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...) -
MediaSPIP version 0.1 Beta
16 avril 2011, parMediaSPIP 0.1 beta est la première version de MediaSPIP décrétée comme "utilisable".
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Pour avoir une installation fonctionnelle, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...)
Sur d’autres sites (6649)
-
Revert "avcodec/decode : use a packet list to store packet properties"
4 décembre 2022, par James AlmerRevert "avcodec/decode : use a packet list to store packet properties"
The idea behind last_pkt_props was to store the properties of the last packet
fed to the decoder. Any sort of queueing required by CODEC_CAP_DELAY decoders
that consume several packets before they start outputting frames should be done
by the decoders in question. An example of this is libdav1d.This is required for the following commits that will fix last_pkt_props in
frame threading scenarios, as well as maintain its contents during flush.This revers commit 022a12b306ab2096e6ac9fc9b149828a849d65b2.
Signed-off-by : James Almer <jamrial@gmail.com>
-
rtmp streaming video does not work [on hold]
12 mai 2017, par 김동영I want to develop rtmp live streaming. It uses ffmpeg and sdl. First, I want to output a simple image. Packets come in but no image is output. Please let me know why the following sources do not work.
I can not find the answer throughout the week and I ask.
It is being developed as an iOS objective-c.
I hope you help me.
Have a good daythank you.
<code>
#import <libavcodec></libavcodec>avcodec.h>
#import <libavformat></libavformat>avformat.h>
#import <libswscale></libswscale>swscale.h>
#import <libavfilter></libavfilter>avfilter.h>
#import <libavfilter></libavfilter>avfiltergraph.h>
#import <libavfilter></libavfilter>buffersrc.h>
#import <libswresample></libswresample>swresample.h>
#import
#import
@implementation hello2
- (void)viewDidLoad {
[super viewDidLoad];
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame,*pFrameYUV;
uint8_t *out_buffer;
AVPacket *packet;
int y_size;
int ret, got_picture;
struct SwsContext *img_convert_ctx;
//SDL---------------------------
int screen_w=0,screen_h=0;
SDL_Window *screen;
SDL_Renderer* sdlRenderer;
SDL_Texture* sdlTexture;
SDL_Rect sdlRect;
FILE *fp_yuv;
av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
SDL_SetMainReady();
//rtmp://www.planeta-online.tv:1936/live/channel_4
if(avformat_open_input(&pFormatCtx,"rtmp://live.hkstv.hk.lxdns.com/live/hks",NULL,NULL)!=0){
printf("Couldn't open input stream.\n");
return;
}
if(avformat_find_stream_info(pFormatCtx,NULL)<0){
printf("Couldn't find stream information.\n");
return;
}
videoindex=-1;
for(i=0; inb_streams; i++)
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
videoindex=i;
break;
}
if(videoindex==-1){
printf("Didn't find a video stream.\n");
return;
}
pCodecCtx=pFormatCtx->streams[videoindex]->codec;
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL){
printf("Codec not found.\n");
return;
}
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){
printf("Could not open codec.\n");
return;
}
pFrame=av_frame_alloc();
pFrameYUV=av_frame_alloc();
out_buffer=(uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
packet=(AVPacket *)av_malloc(sizeof(AVPacket));
//Output Info-----------------------------
printf("--------------- File Information ----------------\n");
av_dump_format(pFormatCtx,0,"rtmp://live.hkstv.hk.lxdns.com/live/hks",0);
printf("-------------------------------------------------\n");
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
#if OUTPUT_YUV420P
fp_yuv=fopen("output.yuv","wb+");
#endif
if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
printf( "Could not initialize SDL - %s\n", SDL_GetError());
return;
}
screen_w = pCodecCtx->width;
screen_h = pCodecCtx->height;
//SDL 2.0 Support for multiple windows
screen = SDL_CreateWindow("Simplest ffmpeg player", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
screen_w, screen_h,
SDL_WINDOW_OPENGL);
if(!screen) {
printf("SDL: could not create window - exiting:%s\n",SDL_GetError());
return;
}
sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
//IYUV: Y + U + V (3 planes)
//YV12: Y + V + U (3 planes)
sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,pCodecCtx->width,pCodecCtx->height);
sdlRect.x=0;
sdlRect.y=0;
sdlRect.w=screen_w;
sdlRect.h=screen_h;
SDL_SetTextureBlendMode(sdlTexture, SDL_BLENDMODE_BLEND);
//SDL End----------------------
while(av_read_frame(pFormatCtx, packet)>=0){
if(packet->stream_index==videoindex){
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if(ret < 0){
printf("Decode Error.\n");
return;
}
//NSLog(@"write packet pst = %lld, dts = %lld, stream = %d", packet->pts, packet->dts, packet->stream_index);
if(got_picture){
sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
pFrameYUV->data, pFrameYUV->linesize);
#if OUTPUT_YUV420P
y_size=pCodecCtx->width*pCodecCtx->height;
fwrite(pFrameYUV->data[0],1,y_size,fp_yuv); //Y
fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv); //U
fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv); //V
#endif
//SDL---------------------------
#if 0
SDL_UpdateTexture( sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0] );
#else
SDL_UpdateYUVTexture(sdlTexture, &sdlRect,
pFrameYUV->data[0], pFrameYUV->linesize[0],
pFrameYUV->data[1], pFrameYUV->linesize[1],
pFrameYUV->data[2], pFrameYUV->linesize[2]);
#endif
SDL_RenderClear( sdlRenderer );
SDL_RenderCopy( sdlRenderer, sdlTexture, NULL, &sdlRect);
SDL_RenderPresent( sdlRenderer );
//SDL End-----------------------
//Delay 40ms
SDL_Delay(20);
}
}
av_free_packet(packet);
}
//flush decoder
//FIX: Flush Frames remained in Codec
while (1) {
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0)
break;
if (!got_picture)
break;
sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
pFrameYUV->data, pFrameYUV->linesize);
#if OUTPUT_YUV420P
int y_size=pCodecCtx->width*pCodecCtx->height;
fwrite(pFrameYUV->data[0],1,y_size,fp_yuv); //Y
fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv); //U
fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv); //V
#endif
//SDL---------------------------
SDL_UpdateTexture( sdlTexture, &sdlRect, pFrameYUV->data[0], pFrameYUV->linesize[0] );
SDL_RenderClear( sdlRenderer );
SDL_RenderCopy( sdlRenderer, sdlTexture, NULL, &sdlRect);
SDL_RenderPresent( sdlRenderer );
//SDL End-----------------------
//Delay 40ms
SDL_Delay(40);
}
sws_freeContext(img_convert_ctx);
#if OUTPUT_YUV420P
fclose(fp_yuv);
#endif
SDL_Quit();
av_frame_free(&pFrameYUV);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
}
@end` -
Why do I get a crash only sometimes when closing input file with ffmpeg
1er mai 2013, par BradleyI have a problem where only sometimes when I call
avformat_close_input(&pFormatCtx)
and it results inmalloc check failed
and my application crashes.I really need to use
ffmpeg
because I need to grab a thumbnail of a video to show in a list and I cannot find an alternative library.Can anybody see something in my code where I am doing something wrong when using this library which may cause this malloc check failed problem ?
bool MuteCamera::PullFrame( )
{
pMJPEGCodec = avcodec_find_encoder(CODEC_ID_MJPEG );
bool bRet = false;
int videoStream = -1;
AVFrame *pFrame=NULL;
AVFrame *pFrameRGB=NULL;
AVPacket packet;
int frameFinished=0;
//AVDictionary *optionsDict = NULL;
AVInputFormat *pFormat = NULL;
const char formatName[] = "mp4";
if (!(pFormat = av_find_input_format(formatName))) {
printf("can't find input format %s\n", formatName);
return -1;
}
AVFormatContext *pFormatCtx = NULL;
pFormatCtx=avformat_alloc_context();
if(pFormatCtx == NULL)
{
printf("\n NULL CONTEXT \n ");
return -1;
}
if(avformat_open_input (&pFormatCtx, capturedUrl.data(), pFormat, NULL) == 0 )
{
for(int i=0; i<(int)pFormatCtx->nb_streams; i++)
{
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
{
videoStream=i;
break;
}
}
if(videoStream >= 0 )
{
AVCodecContext *pCodecCtx = pFormatCtx->streams[videoStream]->codec;
AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec != NULL)
{
if( avcodec_open2(pCodecCtx, pCodec, NULL) >= 0 )
{
pFrame=avcodec_alloc_frame();
if(pFrame != NULL)
{
frameFinished = 0;
while(av_read_frame(pFormatCtx, &packet)>=0)
{
if(packet.stream_index==videoStream)
{
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if(frameFinished)
{
printf("\n FRAMEFINISHED \n ");
QString *uu = new QString(capturedUrl.data());//
uu->replace(".mp4", "thumbnail.jpg");
WriteJPEG(pCodecCtx, pFrame, uu->toLatin1().data(), PIX_FMT_YUVJ420P);
if(viewingVideos && viewingFromDifferent)
{
QVariantMap map = QVariantMap();
map["title"] = actualFilename;
map["path"] = actualFilename.replace(".mp4", "thumbnail.jpg");// QString("asset:///white_photo.png");
m_listDataModel << map;
}
delete uu;
av_free_packet(&packet);
break;
}
else
{
printf("\n FRAMENOTFINISHED \n ");
}
}
av_free_packet(&packet);
}
av_free(pFrameRGB);
av_free(pFrame);
avcodec_close(pCodecCtx);
//av_free(pCodecCtx);
cout << "\n before free formatctx \n";
cout.flush();
if(pFormatCtx)
avformat_close_input(&pFormatCtx);
cout << "\n after free formatctx \n";
cout.flush();
}
else
bRet = false;
}
else
bRet = false;
}
else
bRet = false;
}
else
bRet = false;
}
return bRet;
}
bool WriteJPEG (AVCodecContext *pCodecCtx, AVFrame *pFrame, char cFileName[], PixelFormat pix)
{
int complete = 0;
bool bRet = false;
int out_buf_size;
uint8_t *out_buf;
AVCodecContext *pMJPEGCtx = avcodec_alloc_context3(pMJPEGCodec);
if( pMJPEGCtx )
{
pMJPEGCtx->bit_rate = pCodecCtx->bit_rate;
pMJPEGCtx->width = pCodecCtx->width;
pMJPEGCtx->height = pCodecCtx->height;
pMJPEGCtx->pix_fmt = pix;
pMJPEGCtx->codec_id = CODEC_ID_MJPEG;
pMJPEGCtx->codec_type = AVMEDIA_TYPE_VIDEO;
pMJPEGCtx->time_base.num = pCodecCtx->time_base.num;
pMJPEGCtx->time_base.den = pCodecCtx->time_base.den;
pMJPEGCtx->time_base= (AVRational){1,29.7};
if( pMJPEGCodec && (avcodec_open2( pMJPEGCtx, pMJPEGCodec, NULL) >= 0) )
{
AVFrame *oframe;
oframe = avcodec_alloc_frame();
if(oframe == NULL)
{
printf("\n (oframe == NULL");
fflush(stdout);
}
/* calculate the bytes needed for the output image and create buffer for the output image */
out_buf_size = avpicture_get_size(pMJPEGCtx->pix_fmt,
pMJPEGCtx->width,
pMJPEGCtx->height);
out_buf = (uint8_t *)av_malloc(out_buf_size * sizeof(uint8_t));
if (out_buf == NULL) {
fprintf(stderr, "cannot allocate output data buffer!\n");
//ret = -ENOMEM;
}
avpicture_alloc((AVPicture *)oframe, pMJPEGCtx->pix_fmt, pMJPEGCtx->width, pMJPEGCtx->height);
struct SwsContext *sws;
sws = sws_getContext(pMJPEGCtx->width, pMJPEGCtx->height, pCodecCtx->pix_fmt,
pMJPEGCtx->width, pMJPEGCtx->height, pMJPEGCtx->pix_fmt, SWS_BILINEAR,
NULL, NULL, NULL);
sws_scale(sws, (const uint8_t **)pFrame->data, pFrame->linesize,
0, pMJPEGCtx->height, &oframe->data[0], &oframe->linesize[0]);
sws_freeContext(sws);
AVPacket pp2;
av_init_packet(&pp2);
pp2.data = NULL;
pp2.size = 0;
avcodec_encode_video2(pMJPEGCtx, &pp2, oframe, &complete);
if(complete)
{
printf("\n packet recieved");
fflush(stdout);
}
else
{
printf("\n packet NOT recieved");
fflush(stdout);
}
if( SaveFrameJpeg(pp2.size, pp2.data, cFileName ) )
bRet = true;
av_free(oframe);
avcodec_close(pMJPEGCtx);
av_free_packet(&pp2);
av_free(out_buf);
av_free(pMJPEGCtx);
}
else
{
printf("\n problem!!");
fflush(stdout);
}
return bRet;
}
}
bool SaveFrameJpeg(int nszBuffer, uint8_t *buffer, char cOutFileName[])
{
bool bRet = false;
FILE *pFile;
if( nszBuffer > 0 )
{
if(0 == 0 )
{
printf("\n start SaveFrameJpeg=%d",nszBuffer );
fflush(stdout);
pFile= fopen(cOutFileName, "wb");
fwrite(buffer, sizeof(uint8_t), nszBuffer, pFile);
bRet = true;
fclose(pFile);
printf("\n end SaveFrameJpeg=%d",nszBuffer );
fflush(stdout);
}
}
return bRet;
}
bool newPullFrame(const std::string& capturedUrl)
{
AVCodec* pMJPEGCodec = avcodec_find_encoder(CODEC_ID_MJPEG );
int videoStream = -1;
AVDictionary *optionsDict = NULL;
AVInputFormat *pFormat = NULL;
const char formatName[] = "mp4";
if (!(pFormat = av_find_input_format(formatName)))
{
std::cout << "can't find input format " << formatName << "\n";
return false;
}
AVFormatContextHandle FormatCtx(avformat_alloc_context());
if(!FormatCtx.is_valid())
{
std::cout << "\n NULL CONTEXT \n ";
return false;
}
if(avformat_open_input (&FormatCtx, capturedUrl.c_str(), pFormat, NULL))
return false;
for(int i=0; i<(int)FormatCtx->nb_streams; i++)
{
if(FormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
{
videoStream=i;
break;
}
}
if(videoStream < 0 )
return false;
CodecContextHandle CodecCtx(FormatCtx->streams[videoStream]->codec, avcodec_close);
AVCodec *pCodec = avcodec_find_decoder(CodecCtx->codec_id);
if(pCodec == NULL)
return false;
if( avcodec_open2(CodecCtx, pCodec, &optionsDict) < 0 )
return false;
FrameHandle Frame(avcodec_alloc_frame(), av_free);
if(!Frame.is_valid())
return false;
int frameFinished=0;
AVPacket packet;
while(av_read_frame(FormatCtx, &packet)>=0)
{
if(packet.stream_index==videoStream)
{
avcodec_decode_video2(CodecCtx, Frame, &frameFinished, &packet);
if(frameFinished)
{
std::string uu (capturedUrl);
size_t pos = capturedUrl.rfind(".mp4");
uu.replace(pos, 4, "thumbnail.jpg");
// save the frame to file
int Bytes = avpicture_get_size(PIX_FMT_YUVJ420P, CodecCtx->width, CodecCtx->height);
BufferHandle buffer((uint8_t*)av_malloc(Bytes*sizeof(uint8_t)), av_free);
CodecContextHandle OutContext(avcodec_alloc_context3(NULL), free_context);
OutContext->bit_rate = CodecCtx->bit_rate;
OutContext->width = CodecCtx->width;
OutContext->height = CodecCtx->height;
OutContext->pix_fmt = PIX_FMT_YUVJ420P;
OutContext->codec_id = CODEC_ID_MJPEG;
OutContext->codec_type = AVMEDIA_TYPE_VIDEO;
OutContext->time_base.num = CodecCtx->time_base.num;
OutContext->time_base.den = CodecCtx->time_base.den;
OutContext->time_base= (AVRational){1,29.7};
AVCodec *OutCodec = avcodec_find_encoder(OutContext->codec_id);
avcodec_open2(OutContext, OutCodec, NULL);
OutContext->mb_lmin = OutContext->lmin = OutContext->qmin * 118;
OutContext->mb_lmax = OutContext->lmax = OutContext->qmax * 118;
OutContext->flags = 2;
OutContext->global_quality = OutContext->qmin * 118;
Frame->pts = 1;
Frame->quality = OutContext->global_quality;
int ActualSize = avcodec_encode_video(OutContext, buffer, Bytes, Frame);
std::ofstream file(uu.data(), std::ios_base::binary | std::ios_base::out);
file.write((const char*)(uint8_t*)buffer, ActualSize);
file.close();
av_free_packet(&packet);
av_free(Frame);
break;
}
else
{
std::cout << " new pullframe frameNOTfinished\n";
cout.flush();
}
//if(CodecCtx->refcounted_frames == 1)
av_free(Frame);
}
av_free_packet(&packet);
}
return true;
}