
Recherche avancée
Autres articles (37)
-
Ajouter notes et légendes aux images
7 février 2011, parPour pouvoir ajouter notes et légendes aux images, la première étape est d’installer le plugin "Légendes".
Une fois le plugin activé, vous pouvez le configurer dans l’espace de configuration afin de modifier les droits de création / modification et de suppression des notes. Par défaut seuls les administrateurs du site peuvent ajouter des notes aux images.
Modification lors de l’ajout d’un média
Lors de l’ajout d’un média de type "image" un nouveau bouton apparait au dessus de la prévisualisation (...) -
Des sites réalisés avec MediaSPIP
2 mai 2011, parCette page présente quelques-uns des sites fonctionnant sous MediaSPIP.
Vous pouvez bien entendu ajouter le votre grâce au formulaire en bas de page. -
Gestion générale des documents
13 mai 2011, parMédiaSPIP ne modifie jamais le document original mis en ligne.
Pour chaque document mis en ligne il effectue deux opérations successives : la création d’une version supplémentaire qui peut être facilement consultée en ligne tout en laissant l’original téléchargeable dans le cas où le document original ne peut être lu dans un navigateur Internet ; la récupération des métadonnées du document original pour illustrer textuellement le fichier ;
Les tableaux ci-dessous expliquent ce que peut faire MédiaSPIP (...)
Sur d’autres sites (4276)
-
j2kdec : Check for interger overflow in tile array allocation
24 décembre 2011, par Michael Niedermayerj2kdec : Check for interger overflow in tile array allocation
-
Log tile size
10 novembre 2011, par Mashiat Sarker ShakkharLog tile size
-
Black screen when playing a video with ffmpeg and SDL on iOS
1er avril 2012, par patrickI'm attempting to create a video player on iOS using ffmpeg and SDL. I'm decoding the video stream and attempting to convert the pixel data into a SDL_Surface and then convert that over to an SDL_Texture and render it on screen. However, all I'm getting is a black screen. I know the video file is good and can be viewed fine from VLC. Any idea what I'm missing here ?
Initialization code :
// initialize SDL (Simple DirectMedia Layer) to playback the content
if( SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER) )
{
DDLogError(@"Unable to initialize SDL");
return NO;
}
// create window and renderer
window = SDL_CreateWindow(NULL, 0, 0, SCREEN_WIDTH, SCREEN_HEIGHT,
SDL_WINDOW_OPENGL | SDL_WINDOW_BORDERLESS |
SDL_WINDOW_SHOWN);
if ( window == 0 )
{
DDLogError(@"Unable to initialize SDL Window");
}
renderer = SDL_CreateRenderer(window, -1, 0);
if ( !renderer )
{
DDLogError(@"Unable to initialize SDL Renderer");
}
// Initialize the FFMpeg and register codecs and their respected file formats
av_register_all();Playback code :
AVFormatContext *formatContext = NULL ;DDLogInfo(@"Opening media file at location:%@", filePath);
const char *filename = [filePath cStringUsingEncoding:NSUTF8StringEncoding];
// Open media file
if( avformat_open_input(&formatContext, filename, NULL, NULL) != 0 )
{
DDLogWarn(@"Unable to open media file. [File:%@]", filePath);
NSString *failureReason = NSLocalizedString(@"Unable to open file.", @"Media playback failed, unable to open file.");
if ( error != NULL )
{
*error = [NSError errorWithDomain:MediaPlayerErrorDomain
code:UNABLE_TO_OPEN
userInfo:[NSDictionary dictionaryWithObject:failureReason
forKey:NSLocalizedFailureReasonErrorKey]];
}
return NO; // Couldn't open file
}
// Retrieve stream information
if( avformat_find_stream_info(formatContext, NULL) <= 0 )
{
DDLogWarn(@"Unable to locate stream information for file. [File:%@]", filePath);
NSString *failureReason = NSLocalizedString(@"Unable to find audio/video stream information.", @"Media playback failed, unable to find stream information.");
if ( error != NULL )
{
*error = [NSError errorWithDomain:MediaPlayerErrorDomain
code:UNABLE_TO_FIND_STREAM
userInfo:[NSDictionary dictionaryWithObject:failureReason
forKey:NSLocalizedFailureReasonErrorKey]];
}
return NO; // Missing stream information
}
// Find the first video or audio stream
int videoStream = -1;
int audioStream = -1;
DDLogInfo(@"Locating stream information for media file");
for( int index=0; index<(formatContext->nb_streams); index++)
{
if( formatContext->streams[index]->codec->codec_type==AVMEDIA_TYPE_VIDEO )
{
DDLogInfo(@"Found video stream");
videoStream = index;
break;
}
else if( mediaType == AUDIO_FILE &&
(formatContext->streams[index]->codec->codec_type==AVMEDIA_TYPE_AUDIO) )
{
DDLogInfo(@"Found audio stream");
audioStream = index;
break;
}
}
if( videoStream == -1 && (audioStream == -1) )
{
DDLogWarn(@"Unable to find video or audio stream for file");
NSString *failureReason = NSLocalizedString(@"Unable to locate audio/video stream.", @"Media playback failed, unable to locate media stream.");
if ( error != NULL )
{
*error = [NSError errorWithDomain:MediaPlayerErrorDomain
code:UNABLE_TO_FIND_STREAM
userInfo:[NSDictionary dictionaryWithObject:failureReason
forKey:NSLocalizedFailureReasonErrorKey]];
}
return NO; // Didn't find a video or audio stream
}
// Get a pointer to the codec context for the video/audio stream
AVCodecContext *codecContext;
DDLogInfo(@"Attempting to locate the codec for the media file");
if ( videoStream > -1 )
{
codecContext = formatContext->streams[videoStream]->codec;
}
else
{
codecContext = formatContext->streams[audioStream]->codec;
}
// Now that we have information about the codec that the file is using,
// we need to actually open the codec to decode the content
DDLogInfo(@"Attempting to open the codec to playback the media file");
AVCodec *codec;
// Find the decoder for the video stream
codec = avcodec_find_decoder(codecContext->codec_id);
if( codec == NULL )
{
DDLogWarn(@"Unsupported codec! Cannot playback meda file [File:%@]", filePath);
NSString *failureReason = NSLocalizedString(@"Unsupported file format. Cannot playback media.", @"Media playback failed, unsupported codec.");
if ( error != NULL )
{
*error = [NSError errorWithDomain:MediaPlayerErrorDomain
code:UNSUPPORTED_CODEC
userInfo:[NSDictionary dictionaryWithObject:failureReason
forKey:NSLocalizedFailureReasonErrorKey]];
}
return NO; // Codec not found
}
// Open codec
if( avcodec_open2(codecContext, codec, NULL) < 0 )
{
DDLogWarn(@"Unable to open codec! Cannot playback meda file [File:%@]", filePath);
NSString *failureReason = NSLocalizedString(@"Unable to open media codec. Cannot playback media.", @"Media playback failed, cannot open codec.");
if ( error != NULL )
{
*error = [NSError errorWithDomain:MediaPlayerErrorDomain
code:UNABLE_TO_LOAD_CODEC
userInfo:[NSDictionary dictionaryWithObject:failureReason
forKey:NSLocalizedFailureReasonErrorKey]];
}
return NO; // Could not open codec
}
// Allocate player frame
AVFrame *playerFrame=avcodec_alloc_frame();
// Allocate an AVFrame structure
AVFrame *RGBframe=avcodec_alloc_frame();
if( RGBframe==NULL )
{
// could not create a frame to convert our video frame
// to a 16-bit RGB565 frame.
DDLogWarn(@"Unable to convert video frame. Cannot playback meda file [File:%@]", filePath);
NSString *failureReason = NSLocalizedString(@"Problems interpreting video frame information.", @"Media playback failed, cannot convert frame.");
if ( error != NULL )
{
*error = [NSError errorWithDomain:MediaPlayerErrorDomain
code:UNABLE_TO_LOAD_FRAME
userInfo:[NSDictionary dictionaryWithObject:failureReason
forKey:NSLocalizedFailureReasonErrorKey]];
}
return NO; // Could not open codec
}
int frameFinished = 0;
AVPacket packet;
// Figure out the destination width/height based on the screen size
int destHeight = codecContext->height;
int destWidth = codecContext->width;
if ( destHeight > SCREEN_HEIGHT || (destWidth > SCREEN_WIDTH) )
{
if ( destWidth > SCREEN_WIDTH )
{
float percentDiff = ( destWidth - SCREEN_WIDTH ) / (float)destWidth;
destWidth = destWidth - (int)(destWidth * percentDiff );
destHeight = destHeight - (int)(destHeight * percentDiff );
}
if ( destHeight > SCREEN_HEIGHT )
{
float percentDiff = (destHeight - SCREEN_HEIGHT ) / (float)destHeight;
destWidth = destWidth - (int)(destWidth * percentDiff );
destHeight = destHeight - (int)(destHeight * percentDiff );
}
}
SwsContext *swsContext = sws_getContext(codecContext->width, codecContext->height, codecContext->pix_fmt, destWidth, destHeight, PIX_FMT_RGB565, SWS_BICUBIC, NULL, NULL, NULL);
while( av_read_frame(formatContext, &packet) >= 0 )
{
// Is this a packet from the video stream?
if( packet.stream_index == videoStream )
{
// Decode video frame
avcodec_decode_video2(codecContext, playerFrame, &frameFinished, &packet);
// Did we get a video frame?
if( frameFinished != 0 )
{
// Convert the content over to RGB565 (16-bit RGB) to playback with SDL
uint8_t *dst[3];
int dstStride[3];
// Set the destination stride
for (int plane = 0; plane < 3; plane++)
{
dstStride[plane] = codecContext->width*2;
dst[plane]= (uint8_t*) malloc(dstStride[plane]*destHeight);
}
sws_scale(swsContext, playerFrame->data,
playerFrame->linesize, 0,
destHeight,
dst, dstStride);
// Create the SDL surface frame that we are going to use to draw our video
// 16-bit RGB so 2 bytes per pixel (pitch = width*(bytes per pixel))
int pitch = destWidth*2;
SDL_Surface *frameSurface = SDL_CreateRGBSurfaceFrom(dst[0], destWidth, destHeight, 16, pitch, 0, 0, 0, 0);
// Clear the old frame first
SDL_RenderClear(renderer);
// Move the frame over to a texture and render it on screen
SDL_Texture *texture = SDL_CreateTextureFromSurface(renderer, frameSurface);
SDL_SetTextureBlendMode(texture, SDL_BLENDMODE_BLEND);
// Draw the new frame on the screen
SDL_RenderPresent(renderer);
SDL_DestroyTexture(texture);
SDL_FreeSurface(frameSurface);
}