
Recherche avancée
Autres articles (8)
-
Ajouter notes et légendes aux images
7 février 2011, parPour pouvoir ajouter notes et légendes aux images, la première étape est d’installer le plugin "Légendes".
Une fois le plugin activé, vous pouvez le configurer dans l’espace de configuration afin de modifier les droits de création / modification et de suppression des notes. Par défaut seuls les administrateurs du site peuvent ajouter des notes aux images.
Modification lors de l’ajout d’un média
Lors de l’ajout d’un média de type "image" un nouveau bouton apparait au dessus de la prévisualisation (...) -
Les formats acceptés
28 janvier 2010, parLes commandes suivantes permettent d’avoir des informations sur les formats et codecs gérés par l’installation local de ffmpeg :
ffmpeg -codecs ffmpeg -formats
Les format videos acceptés en entrée
Cette liste est non exhaustive, elle met en exergue les principaux formats utilisés : h264 : H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 m4v : raw MPEG-4 video format flv : Flash Video (FLV) / Sorenson Spark / Sorenson H.263 Theora wmv :
Les formats vidéos de sortie possibles
Dans un premier temps on (...) -
Les vidéos
21 avril 2011, parComme les documents de type "audio", Mediaspip affiche dans la mesure du possible les vidéos grâce à la balise html5 .
Un des inconvénients de cette balise est qu’elle n’est pas reconnue correctement par certains navigateurs (Internet Explorer pour ne pas le nommer) et que chaque navigateur ne gère en natif que certains formats de vidéos.
Son avantage principal quant à lui est de bénéficier de la prise en charge native de vidéos dans les navigateur et donc de se passer de l’utilisation de Flash et (...)
Sur d’autres sites (2861)
-
Data Privacy in Business : A Risk Leading to Major Opportunities
9 août 2022, par Erin — Privacy -
how can I take audio file(mp3) and video file(mp4) as input in Java spring ?
29 juillet 2022, par Jawad Un Islam AbirI am using this code to read the audio file. but don't know how to take user input.


InputStream inputStream = new FileInputStream("example.ogg");
FFmpegInput input = new FFmpegInput(inputStream);
FFmpegSourceStream stream = input.open(inputFormat);

stream.registerStreams();

AudioSourceSubstream audioSourceSubstream = null;
for(MediaSourceSubstream substream : stream.getSubstreams()) {
 if (substream.getMediaType() != MediaType.AUDIO) continue;

 audioSourceSubstream = (AudioSourceSubstream) substream;
}

if (audioSourceSubstream == null) throw new NullPointerException();



-
How to minimize latency in ffmpeg stream Java ?
13 juillet 2022, par Taavi SõerdI need to stream ffmpeg video feed in android studio and need minimal latency. Code below has achieved that when playing on galaxy s21 ultra but when I play it on galaxy tab then it's like in slow motion. When i set buffer size to 0 I get minimal latency but can't actually even see the video as it's all corrupted (all gray and colored noise).


public class Decode implements Runnable {
public Activity activity;
AVFrame pFrameRGB;
SwsContext sws_ctx;
ByteBuffer bitmapBuffer;
Bitmap bmp;
byte[] array;
int imageViewWidth = 0;
int imageViewHeight = 0;
boolean imageChanged = true;
int v_stream_idx = -1;
int klv_stream_idx = -1;

boolean imageDrawMutex = false;

boolean imageIsSet = false;
ImageView imageView = MainActivity.getmInstanceActivity().findViewById(R.id.imageView);

String mFilename = "udp://@" + MainActivity.connectionIP;;
UasDatalinkLocalSet mLatestDls;

public Decode(Activity _activity) {
 this.activity = _activity;
}

public void create_decoder(AVCodecContext codec_ctx) {
 imageChanged = true;

 // Determine required buffer size and allocate buffer
 int numBytes =av_image_get_buffer_size(AV_PIX_FMT_RGBA, codec_ctx.width(),
 codec_ctx.height(), 1);
 BytePointer buffer = new BytePointer(av_malloc(numBytes));

 bmp = Bitmap.createBitmap(codec_ctx.width(), codec_ctx.height(), Bitmap.Config.ARGB_8888);

 array = new byte[codec_ctx.width() * codec_ctx.height() * 4];
 bitmapBuffer = ByteBuffer.wrap(array);

 sws_ctx = sws_getContext(
 codec_ctx.width(),
 codec_ctx.height(),
 codec_ctx.pix_fmt(),
 codec_ctx.width(),
 codec_ctx.height(),
 AV_PIX_FMT_RGBA,
 SWS_POINT,
 null,
 null,
 (DoublePointer) null
 );

 if (sws_ctx == null) {
 Log.d("app", "Can not use sws");
 throw new IllegalStateException();
 }

 av_image_fill_arrays(pFrameRGB.data(), pFrameRGB.linesize(),
 buffer, AV_PIX_FMT_RGBA, codec_ctx.width(), codec_ctx.height(), 1);
}

@Override
public void run() {
 Log.d("app", "Start decoder");

 int ret = -1, i = 0;
 String vf_path = mFilename;

 AVFormatContext fmt_ctx = new AVFormatContext(null);
 AVPacket pkt = new AVPacket();


 AVDictionary multicastDict = new AVDictionary();

 av_dict_set(multicastDict, "rtsp_transport", "udp_multicast", 0);

 av_dict_set(multicastDict, "localaddr", getIPAddress(true), 0);
 av_dict_set(multicastDict, "reuse", "1", 0);

 av_dict_set(multicastDict, "buffer_size", "0.115M", 0);

 ret = avformat_open_input(fmt_ctx, vf_path, null, multicastDict);
 if (ret < 0) {
 Log.d("app", String.format("Open video file %s failed \n", vf_path));
 byte[] error_message = new byte[1024];
 int elen = av_strerror(ret, error_message, 1024);
 String s = new String(error_message, 0, 20);
 Log.d("app", String.format("Return: %d", ret));
 Log.d("app", String.format("Message: %s", s));
 throw new IllegalStateException();
 }
 
 if (avformat_find_stream_info(fmt_ctx, (PointerPointer) null) < 0) {
 //System.exit(-1);
 Log.d("app", "Stream info not found");
 }


 avformat.av_dump_format(fmt_ctx, 0, mFilename, 0);

 int nstreams = fmt_ctx.nb_streams();

 for (i = 0; i < fmt_ctx.nb_streams(); i++) {
 if (fmt_ctx.streams(i).codecpar().codec_type() == AVMEDIA_TYPE_VIDEO) {
 v_stream_idx = i;
 }
 if (fmt_ctx.streams(i).codecpar().codec_type() == AVMEDIA_TYPE_DATA) {
 klv_stream_idx = i;
 }
 }
 if (v_stream_idx == -1) {
 Log.d("app", "Cannot find video stream");
 throw new IllegalStateException();
 } else {
 Log.d("app", String.format("Video stream %d with resolution %dx%d\n", v_stream_idx,
 fmt_ctx.streams(v_stream_idx).codecpar().width(),
 fmt_ctx.streams(v_stream_idx).codecpar().height()));
 }

 AVCodecContext codec_ctx = avcodec_alloc_context3(null);
 avcodec_parameters_to_context(codec_ctx, fmt_ctx.streams(v_stream_idx).codecpar());


 AVCodec codec = avcodec_find_decoder(codec_ctx.codec_id());


 AVDictionary avDictionary = new AVDictionary();

 av_dict_set(avDictionary, "fflags", "nobuffer", 0);


 if (codec == null) {
 Log.d("app", "Unsupported codec for video file");
 throw new IllegalStateException();
 }
 ret = avcodec_open2(codec_ctx, codec, avDictionary);
 if (ret < 0) {
 Log.d("app", "Can not open codec");
 throw new IllegalStateException();
 }

 AVFrame frm = av_frame_alloc();

 // Allocate an AVFrame structure
 pFrameRGB = av_frame_alloc();
 if (pFrameRGB == null) {
 //System.exit(-1);
 Log.d("app", "unable to init pframergb");
 }

 create_decoder(codec_ctx);

 int width = codec_ctx.width();
 int height = codec_ctx.height();

 double fps = 15;
 

 while (true) {
 try {
 Thread.sleep(1);
 } catch (Exception e) {

 }

 try {
 if (av_read_frame(fmt_ctx, pkt) >= 0) {
 if (pkt.stream_index() == v_stream_idx) {
 avcodec_send_packet(codec_ctx, pkt);

 if (codec_ctx.width() != width || codec_ctx.height() != height) {
 create_decoder(codec_ctx);
 width = codec_ctx.width();
 height = codec_ctx.height();
 }
 }

 if (pkt.stream_index() == klv_stream_idx) {

 byte[] klvDataBuffer = new byte[pkt.size()];

 for (int j = 0; j < pkt.size(); j++) {
 klvDataBuffer[j] = pkt.data().get(j);
 }

 try {
 KLV k = new KLV(klvDataBuffer, KLV.KeyLength.SixteenBytes, KLV.LengthEncoding.BER);
 byte[] main_payload = k.getValue();

 // decode the Uas Datalink Local Set from main_payload binary blob.
 mLatestDls = new UasDatalinkLocalSet(main_payload);

 if (mLatestDls != null) {

 MainActivity.getmInstanceActivity().runOnUiThread(new Runnable() {
 @RequiresApi(api = Build.VERSION_CODES.Q)
 @Override
 public void run() {
 MainActivity.getmInstanceActivity().updateKlv(mLatestDls);
 }
 });
 }
 } catch (Exception e) {
 e.printStackTrace();
 }
 
 }

 int wasFrameDecoded = 0;
 while (wasFrameDecoded >= 0) {
 wasFrameDecoded = avcodec_receive_frame(codec_ctx, frm);

 if (wasFrameDecoded >= 0) {
 // get clip fps
 fps = 15; //av_q2d(fmt_ctx.streams(v_stream_idx).r_frame_rate());

 sws_scale(
 sws_ctx,
 frm.data(),
 frm.linesize(),
 0,
 codec_ctx.height(),
 pFrameRGB.data(),
 pFrameRGB.linesize()
 );

 if(!imageDrawMutex) {
 MainActivity.getmInstanceActivity().runOnUiThread(new Runnable() {
 @Override
 public void run() {
 if (imageIsSet) {
 imageDrawMutex = true;
 pFrameRGB.data(0).position(0).get(array);
 bitmapBuffer.rewind();
 bmp.copyPixelsFromBuffer(bitmapBuffer);

 if (imageChanged) {
 (imageView).setImageBitmap(bmp);
 imageChanged = false;
 }

 (imageView).invalidate();
 imageDrawMutex = false;
 } else {
 (imageView).setImageBitmap(bmp);
 imageIsSet = true;
 }
 }
 });
 }
 }
 }
 av_packet_unref(pkt);

 }
 } catch (Exception e) {
 e.printStackTrace();
 }

 if (false) {
 Log.d("threads", "false");

 av_frame_free(frm);

 avcodec_close(codec_ctx);
 avcodec_free_context(codec_ctx);

 avformat_close_input(fmt_ctx);
 }
 }
}



This code is running in Android Studio with Java. I'm quite new on this topic so not really sure even where to start.
What could be the cause of that ?