
Recherche avancée
Médias (1)
-
Revolution of Open-source and film making towards open film making
6 octobre 2011, par
Mis à jour : Juillet 2013
Langue : English
Type : Texte
Autres articles (75)
-
Emballe médias : à quoi cela sert ?
4 février 2011, parCe plugin vise à gérer des sites de mise en ligne de documents de tous types.
Il crée des "médias", à savoir : un "média" est un article au sens SPIP créé automatiquement lors du téléversement d’un document qu’il soit audio, vidéo, image ou textuel ; un seul document ne peut être lié à un article dit "média" ; -
Installation en mode ferme
4 février 2011, parLe mode ferme permet d’héberger plusieurs sites de type MediaSPIP en n’installant qu’une seule fois son noyau fonctionnel.
C’est la méthode que nous utilisons sur cette même plateforme.
L’utilisation en mode ferme nécessite de connaïtre un peu le mécanisme de SPIP contrairement à la version standalone qui ne nécessite pas réellement de connaissances spécifique puisque l’espace privé habituel de SPIP n’est plus utilisé.
Dans un premier temps, vous devez avoir installé les mêmes fichiers que l’installation (...) -
Creating farms of unique websites
13 avril 2011, parMediaSPIP platforms can be installed as a farm, with a single "core" hosted on a dedicated server and used by multiple websites.
This allows (among other things) : implementation costs to be shared between several different projects / individuals rapid deployment of multiple unique sites creation of groups of like-minded sites, making it possible to browse media in a more controlled and selective environment than the major "open" (...)
Sur d’autres sites (4497)
-
How to compile FFmpeg for Android with clang in ubuntu18.04 ?
31 juillet 2019, par user10959099I want to compile FFmpeg with clang as it’s more faster,but I don’t know how to do it in ubuntu.
NDk version r16b ,ffmpeg version 4.1.I did it with gcc. here is my build script,and it works well.
#!/bin/sh
NDK=/home/gjy/ndk/android-ndk-r16b-linux-x86_64/android-ndk-r16b
ANDROID_VERSION=19
TOOLCHAIN_VERSION=4.9
BUILD_PLATFORM=linux-x86_64
ARCH=arm
ANDROID_ARCH_ABI=armeabi
HOST=arm-linux-androideabi
CROSS=arm-linux-androideabi
SYSROOT=${NDK}/platforms/android-${ANDROID_VERSION}/arch-${ARCH}/
PREFIX=$(pwd)/android/${ANDROID_VERSION}/$ANDROID_ARCH_ABI
TOOLCHAIN=${NDK}/toolchains/${HOST}-${TOOLCHAIN_VERSION}/prebuilt/${BUILD_PLATFORM}
CFLAGS="-Os -fpic -march=armv5te -isysroot $NDK/sysroot -I$NDK/sysroot/usr/include/$CROSS -D__ANDROID_API__=$ANDROID_VERSION -U_FILE_OFFSET_BITS"
CROSS_PREFIX=${TOOLCHAIN}/bin/${CROSS}-
build(){
echo "configuring $ANDROID_ARCH_ABI ANDROID_VERSION=$ANDROID_VERSION"
./configure \
--prefix=$PREFIX \
--enable-neon \
--enable-hwaccels \
--enable-gpl \
--enable-postproc \
--enable-shared \
--enable-jni \
--enable-mediacodec \
--enable-decoder=h264_mediacodec \
--enable-hwaccel=h264_mediacodec \
--enable-decoder=hevc_mediacodec \
--enable-decoder=mpeg4_mediacodec \
--enable-decoder=vp8_mediacodec \
--enable-decoder=vp9_mediacodec \
--disable-static \
--disable-doc \
--enable-ffmpeg \
--disable-ffplay \
--disable-ffprobe \
--enable-avdevice \
--disable-doc \
--disable-symver \
--cross-prefix=$CROSS_PREFIX \
--target-os=android \
--arch=$ARCH \
--disable-yasm \
--enable-cross-compile \
--sysroot=$SYSROOT \
--extra-cflags="$CFLAGS " \
--extra-ldflags="$ADDI_LDFLAGS" \
$ADDITIONAL_CONFIGURE_FLAG
make clean
make -j4
make install
echo "$ANDROID_ARCH_ABI installed"
}
buildI wonder how to do it with clang friendly.I’m still new int the NDK and I’ve just started learning.So I have no idea to do it at all.
When I was trying turn to clang,and get many errors.
After trying so many times,I update my build script with this question :#!/bin/sh
#r16b min support android-14 max android-8.1
NDK=/home/gjy/ndk/android-ndk-r16b-linux-x86_64/android-ndk-r16b
ANDROID_VERSION=19
TOOLCHAIN_VERSION=4.9
BUILD_PLATFORM=linux-x86_64
ARCH=arm
ANDROID_ARCH_ABI=armeabi
HOST=arm-linux-androideabi
CROSS=arm-linux-androideabi
SYSROOT=${NDK}/platforms/android-${ANDROID_VERSION}/arch-${ARCH}/
PREFIX=$(pwd)/android/${ANDROID_VERSION}/$ANDROID_ARCH_ABI
TOOLCHAIN=${NDK}/toolchains/llvm/prebuilt/${BUILD_PLATFORM}/bin
#LD
LD=${NDK}/toolchains/${HOST}-${TOOLCHAIN_VERSION}/prebuilt/${BUILD_PLATFORM}/bin/${CROSS}-ld
#AS
AS=${NDK}/toolchains/${HOST}-${TOOLCHAIN_VERSION}/prebuilt/${BUILD_PLATFORM}/bin/${CROSS}-as
#AR
AR=${NDK}/toolchains/${HOST}-${TOOLCHAIN_VERSION}/prebuilt/${BUILD_PLATFORM}/bin/${CROSS}-ar
CFLAGS="-mcpu=$ARCH -I$NDK/sysroot/usr/include/$CROSS -D__ANDROID_API__=$ANDROID_VERSION"
CROSS_PREFIX=${NDK}/toolchains/${HOST}-${TOOLCHAIN_VERSION}/prebuilt/${BUILD_PLATFORM}/bin/${CROSS}-
build(){
echo "configuring $ANDROID_ARCH_ABI ANDROID_VERSION=$ANDROID_VERSION"
./configure \
--prefix=$PREFIX \
--toolchain=clang-usan \
--cross-prefix=$CROSS_PREFIX \
--enable-neon \
--enable-hwaccels \
--enable-gpl \
--enable-postproc \
--enable-shared \
--disable-static \
--enable-jni \
--enable-mediacodec \
--enable-decoder=h264_mediacodec \
--enable-hwaccel=h264_mediacodec \
--enable-decoder=hevc_mediacodec \
--enable-decoder=mpeg4_mediacodec \
--enable-decoder=vp8_mediacodec \
--enable-decoder=vp9_mediacodec \
--disable-doc \
--enable-ffmpeg \
--disable-ffplay \
--disable-ffprobe \
--enable-avdevice \
--disable-doc \
--disable-symver \
--target-os=android \
--extra-ldflags="-shared" \
--arch=$ARCH \
--cpu=$ANDROID_ARCH_ABI \
--extra-cflags="-fPIE -fPIC -ffast-math -funroll-loops -mfloat-abi=softfp -mfpu=vfpv3-d16" \
--enable-x86asm \
--enable-cross-compile \
--cc=$TOOLCHAIN/clang \
--cxx=$TOOLCHAIN/clang++ \
--ld=$LD \
--as=$AS \
--ar=$AR \
--strip=${NDK}/toolchains/${HOST}-${TOOLCHAIN_VERSION}/prebuilt/${BUILD_PLATFORM}/${CROSS}-strip \
$ADDITIONAL_CONFIGURE_FLAG
#make clean
#make -j4
#make install
echo "$ANDROID_ARCH_ABI installed"
}
buildthen I get error form config.log :
/home/gjy/ndk/android-ndk-r16b-linux-x86_64/android-ndk-r16b/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/arm-linux-androideabi-as -D_ISOC99_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -D_POSIX_C_SOURCE=200112 -D_XOPEN_SOURCE=600 -DPIC -fPIE -fPIC -ffast-math -funroll-loops -mfloat-abi=softfp -mfpu=vfpv3-d16 -mcpu=armeabi -fPIC -c -o /tmp/ffconf.hfTGr6sO/test.o /tmp/ffconf.hfTGr6sO/test.S
/home/gjy/ndk/android-ndk-r16b-linux-x86_64/android-ndk-r16b/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/arm-linux-androideabi-as: invalid option -- '_'
GNU assembler not found, install/update gas-preprocessorit seems like that the frist char was ignored. When I try :
/home/gjy/ndk/android-ndk-r16b-linux-x86_64/android-ndk-r16b/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/arm-linux-androideabi-as -fPIC -c -o /tmp/ffconf.jHtqLsFE/test.o /tmp/ffconf.jHtqLsFE/test.S
invalid option -- Pcan any body help fine the problem?
-
Can I convert a django video upload from a form using ffmpeg before storing the video ?
5 mai 2014, par GetItDoneI’ve been stuck for weeks trying to use ffmpeg to convert user uploaded videos to flv. I use heroku to host my website, and store my static and media files on amazon S3 with s3boto. The initial video file will upload fine, however when I retrieve the video and run a celery task (in the same view where the initial video file is uploaded), the new file won’t store on S3. I’ve been trying to get this to work for over a month, with no luck, and really no good resources available for learning how to do this, so I figure maybe if I can get the ffmpeg task to run before storing the video I may be able to get it to work. Unfortunately I’m still not a very advanced at python (or django), so I don’t even know if/how this is possible. Anyone have any ideas ? I am willing to use any solution at this point no matter how ugly, as long as it successfully takes video uploads and converts to flv using ffmpeg, with the resulting file being stored on S3. It doesn’t seem that my situation is very common, because no matter where I look, I cannot find a solution that explains what I should be trying to do. Therefore I will be very appreciative of any guidance. Thanks. My relevant code follows :
#models.py
def content_file_name(instance, filename):
ext = filename.split('.')[-1]
new_file_name = "remove%s.%s" % (uuid.uuid4(), ext)
return '/'.join(['videos', instance.teacher.username, new_file_name])
class BroadcastUpload(models.Model):
title = models.CharField(max_length=50, verbose_name=_('Title'))
description = models.TextField(max_length=100, verbose_name=_('Description'))
teacher = models.ForeignKey(User, null=True, blank=True, related_name='teacher')
created_date = models.DateTimeField(auto_now_add=True)
video_upload = models.FileField(upload_to=content_file_name)
flvfilename = models.CharField(max_length=100, null=True, blank=True)
videothumbnail = models.CharField(max_length=100, null=True, blank=True)
#tasks.py
@task(name='celeryfiles.tasks.convert_flv')
def convert_flv(video_id):
video = BroadcastUpload.objects.get(pk=video_id)
print "ID: %s" % video.id
id = video.id
print "VIDEO NAME: %s" % video.video_upload.name
teacher = video.teacher
print "TEACHER: %s" % teacher
filename = video.video_upload
sourcefile = "%s%s" % (settings.MEDIA_URL, filename)
vidfilename = "%s_%s.flv" % (teacher, video.id)
targetfile = "%svideos/flv/%s" % (settings.MEDIA_URL, vidfilename)
ffmpeg = "ffmpeg -i %s %s" % (sourcefile, vidfilename)
try:
ffmpegresult = subprocess.call(ffmpeg)
#also tried separately with following line:
#ffmpegresult = commands.getoutput(ffmpeg)
print "---------------FFMPEG---------------"
print "FFMPEGRESULT: %s" % ffmpegresult
except Exception as e:
ffmpegresult = None
print("Failed to convert video file %s to %s" % (sourcefile, targetfile))
print(traceback.format_exc())
video.flvfilename = vidfilename
video.save()
@task(name='celeryfiles.tasks.ffmpeg_image')
def ffmpeg_image(video_id):
video = BroadcastUpload.objects.get(pk=video_id)
print "ID: %s" %video.id
id = video.id
print "VIDEO NAME: %s" % video.video_upload.name
teacher = video.teacher
print "TEACHER: %s" % teacher
filename = video.video_upload
sourcefile = "%s%s" % (settings.MEDIA_URL, filename)
imagefilename = "%s_%s.png" % (teacher, video.id)
thumbnailfilename = "%svideos/flv/%s" % (settings.MEDIA_URL, thumbnailfilename)
grabimage = "ffmpeg -y -i %s -vframes 1 -ss 00:00:02 -an -vcodec png -f rawvideo -s 320x240 %s" % (sourcefile, thumbnailfilename)
try:
videothumbnail = subprocess.call(grabimage)
#also tried separately following line:
#videothumbnail = commands.getoutput(grabimage)
print "---------------IMAGE---------------"
print "VIDEOTHUMBNAIL: %s" % videothumbnail
except Exception as e:
videothumbnail = None
print("Failed to convert video file %s to %s" % (sourcefile, thumbnailfilename))
print(traceback.format_exc())
video.videothumbnail = imagefilename
video.save()
#views.py
def upload_broadcast(request):
if request.method == 'POST':
form = BroadcastUploadForm(request.POST, request.FILES)
if form.is_valid():
upload=form.save()
video_id = upload.id
image_grab = ffmpeg_image.delay(video_id)
video_conversion = convert_flv.delay(video_id)
return HttpResponseRedirect('/current_classes/')
else:
form = BroadcastUploadForm(initial={'teacher': request.user,})
return render_to_response('videos/create_video.html', {'form': form,}, context_instance=RequestContext(request))
#settings.py
DEFAULT_FILE_STORAGE = 'myapp.s3utils.MediaRootS3BotoStorage'
DEFAULT_S3_PATH = "media"
STATICFILES_STORAGE = 'myapp.s3utils.StaticRootS3BotoStorage'
STATIC_S3_PATH = "static"
AWS_STORAGE_BUCKET_NAME = 'my_bucket'
CLOUDFRONT_DOMAIN = 'domain.cloudfront.net'
AWS_ACCESS_KEY_ID = 'MY_KEY_ID'
AWS_SECRET_ACCESS_KEY = 'MY_SECRET_KEY'
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = 'http://%s/%s/' % (CLOUDFRONT_DOMAIN, DEFAULT_S3_PATH)
...
#s3utils.py
from storages.backends.s3boto import S3BotoStorage
from django.utils.functional import SimpleLazyObject
StaticRootS3BotoStorage = lambda: S3BotoStorage(location='static')
MediaRootS3BotoStorage = lambda: S3BotoStorage(location='media')I can add any other info if needed to help me solve my problem.
-
FFMPEG and STB_Image Create awful Picture
9 février 2023, par murage kibichoI was learning how to use the FFMPEG C api and I was trying to encode a jpeg into a MPEG file. I load the JPEG into (unsigned char *) using the stb-image library. Then I create a (uint8_t *) and copy my rgb values. Finally, I convert RGB to YUV420 using sws_scale. However, a portion of my image blurs out when I perform the encoding.

/
This is the original image



Perhaps I allocate my frame buffer incorrectly ?

ret = av_frame_get_buffer(frame, 0);




This is my entire program


#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"
#define STB_IMAGE_WRITE_IMPLEMENTATION
#include "stb_image_write.h"
#define STB_IMAGE_RESIZE_IMPLEMENTATION
#include "stb_image_resize.h"
#include 

#include <libavcodec></libavcodec>avcodec.h>
#include <libavutil></libavutil>opt.h>
#include <libavutil></libavutil>imgutils.h>
#include <libswscale></libswscale>swscale.h>
//gcc stack.c -lm -o stack.o `pkg-config --cflags --libs libavformat libavcodec libswresample libswscale libavutil` && ./stack.o

/*
int i : pts of current frame
*/
void PictureToFrame(int i, AVFrame *frame, int height, int width)
{
 //Use stb image to get rgb values
 char *fileName = "profil.jpeg";
 int imageHeight = 0;
 int imageWidth = 0;
 int colorChannels = 0;
 int arrayLength = 0;
 unsigned char *image = stbi_load(fileName,&imageWidth,&imageHeight,&colorChannels,0);
 
 printf("(height: %d, width: %d)\n",imageHeight, imageWidth);
 assert(colorChannels == 3 && imageHeight == height && imageWidth == width);
 
 //Convert unsigned char * to uint8_t *
 arrayLength = imageHeight * imageWidth * colorChannels;
 uint8_t *rgb = calloc(arrayLength, sizeof(uint8_t));
 int j = arrayLength-1;
 for(int i = 0; i < arrayLength; i++)
 {
 rgb[i] = (uint8_t) image[i];
 }
 
 //Use SwsContext to scale RGB to YUV420P and write to frame
 const int in_linesize[1] = { 3* imageWidth};
 struct SwsContext *sws_context = NULL;
 sws_context = sws_getCachedContext(sws_context,
 imageWidth, imageHeight, AV_PIX_FMT_RGB24,
 imageWidth, imageHeight, AV_PIX_FMT_YUV420P,
 0, 0, 0, 0);
 sws_scale(sws_context, (const uint8_t * const *)&rgb, in_linesize, 0,
 imageHeight, frame->data, frame->linesize);
 //Save frame pts
 frame->pts = i;
 
 //Free alloc'd data
 stbi_image_free(image);
 sws_freeContext(sws_context);
 free(rgb);
}
static void encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt, FILE *outfile)
{
 int returnValue;
 /* send the frame to the encoder */
 if(frame)
 {
 printf("Send frame %3"PRId64"\n", frame->pts);
 }
 returnValue = avcodec_send_frame(enc_ctx, frame);
 if(returnValue < 0)
 {
 printf("Error sending a frame for encoding\n");
 return;
 }
 while(returnValue >= 0)
 {
 returnValue = avcodec_receive_packet(enc_ctx, pkt);
 if(returnValue == AVERROR(EAGAIN) || returnValue == AVERROR_EOF)
 {
 return;
 }
 else if(returnValue < 0)
 {
 printf("Error during encoding\n");
 return;
 }

 printf("Write packet %3"PRId64" (size=%5d)\n", pkt->pts, pkt->size);
 fwrite(pkt->data, 1, pkt->size, outfile);
 av_packet_unref(pkt);
 }
}


int main(int argc, char **argv)
{
 const char *filename, *codec_name;
 const AVCodec *codec;
 AVCodecContext *c= NULL;
 int i, ret, x, y;
 FILE *f;
 AVFrame *frame;
 AVPacket *pkt;
 uint8_t endcode[] = { 0, 0, 1, 0xb7 };

 filename = "outo.mp4";
 codec_name = "mpeg1video";//"mpeg1video";//"libx264";


 /* find the mpeg1video encoder */
 codec = avcodec_find_encoder_by_name(codec_name);
 if(!codec)
 {
 printf("Error finding codec\n");
 return 0;
 }

 c = avcodec_alloc_context3(codec);
 if(!c)
 {
 printf("Error allocating c\n");
 return 0;
 }

 pkt = av_packet_alloc();
 if(!pkt)
 {
 printf("Error allocating pkt\n");
 return 0;
 }

 /* put sample parameters */
 c->bit_rate = 400000;
 /* resolution must be a multiple of two */
 c->width = 800;
 c->height = 800;
 /* frames per second */
 c->time_base = (AVRational){1, 25};
 c->framerate = (AVRational){25, 1};
 c->gop_size = 10;
 c->max_b_frames = 1;
 c->pix_fmt = AV_PIX_FMT_YUV420P;

 if(codec->id == AV_CODEC_ID_H264)
 {
 av_opt_set(c->priv_data, "preset", "slow", 0);
 }
 

 /* open it */
 ret = avcodec_open2(c, codec, NULL);
 if(ret < 0) 
 {
 printf("Error opening codec\n");
 return 0;
 }

 f = fopen(filename, "wb");
 if(!f)
 {
 printf("Error opening file\n");
 return 0;
 }

 frame = av_frame_alloc();
 if(!frame)
 {
 printf("Error allocating frame\n");
 return 0;
 }
 frame->format = c->pix_fmt;
 frame->width = c->width;
 frame->height = c->height;

 //I suspect this is the problem
 ret = av_frame_get_buffer(frame, 0);
 if(ret < 0)
 {
 fprintf(stderr, "Could not allocate the video frame data\n");
 exit(1);
 }

 /* encode 25 frames*/
 for(i = 0; i < 25; i++) 
 {

 /* make sure the frame data is writable */
 ret = av_frame_make_writable(frame);
 if(ret < 0)
 {
 return 0;
 }
 //FIll Frame with picture data
 PictureToFrame(i, frame, c->height, c->width);

 /* encode the image */
 encode(c, frame, pkt, f);
 }

 /* flush the encoder */
 encode(c, NULL, pkt, f);

 /* add sequence end code to have a real MPEG file */
 if (codec->id == AV_CODEC_ID_MPEG1VIDEO || codec->id == AV_CODEC_ID_MPEG2VIDEO)
 fwrite(endcode, 1, sizeof(endcode), f);
 fclose(f);

 avcodec_free_context(&c);
 av_frame_free(&frame);
 av_packet_free(&pkt);

 return 0;
}