
Recherche avancée
Autres articles (48)
-
Participer à sa traduction
10 avril 2011Vous pouvez nous aider à améliorer les locutions utilisées dans le logiciel ou à traduire celui-ci dans n’importe qu’elle nouvelle langue permettant sa diffusion à de nouvelles communautés linguistiques.
Pour ce faire, on utilise l’interface de traduction de SPIP où l’ensemble des modules de langue de MediaSPIP sont à disposition. ll vous suffit de vous inscrire sur la liste de discussion des traducteurs pour demander plus d’informations.
Actuellement MediaSPIP n’est disponible qu’en français et (...) -
La file d’attente de SPIPmotion
28 novembre 2010, parUne file d’attente stockée dans la base de donnée
Lors de son installation, SPIPmotion crée une nouvelle table dans la base de donnée intitulée spip_spipmotion_attentes.
Cette nouvelle table est constituée des champs suivants : id_spipmotion_attente, l’identifiant numérique unique de la tâche à traiter ; id_document, l’identifiant numérique du document original à encoder ; id_objet l’identifiant unique de l’objet auquel le document encodé devra être attaché automatiquement ; objet, le type d’objet auquel (...) -
Pas question de marché, de cloud etc...
10 avril 2011Le vocabulaire utilisé sur ce site essaie d’éviter toute référence à la mode qui fleurit allègrement
sur le web 2.0 et dans les entreprises qui en vivent.
Vous êtes donc invité à bannir l’utilisation des termes "Brand", "Cloud", "Marché" etc...
Notre motivation est avant tout de créer un outil simple, accessible à pour tout le monde, favorisant
le partage de créations sur Internet et permettant aux auteurs de garder une autonomie optimale.
Aucun "contrat Gold ou Premium" n’est donc prévu, aucun (...)
Sur d’autres sites (5898)
-
How to convert ffmpeg video frame to YUV444 ?
21 octobre 2019, par Edward SeverinsenI have been following a tutorial on how to use ffmpeg and SDL to make a simple video player with no audio (yet). While looking through the tutorial I realized it was out of date and many of the functions it used, for both ffmpeg and SDL, were deprecated. So I searched for an up-to-date solution and found a stackoverflow question answer that completed what the tutorial was missing.
However, it uses YUV420 which is of low quality. I want to implement YUV444 and after studying chroma-subsampling for a bit and looking at the different formats for YUV am confused as to how to implement it. From what I understand YUV420 is a quarter of the quality YUV444 is. YUV444 means every pixel has its own chroma sample and as such is more detailed while YUV420 means pixels are grouped together and have the same chroma sample and therefore is less detailed.
And from what I understand the different formats of YUV(420, 422, 444) are different in the way they order y, u, and v. All of this is a bit overwhelming because I haven’t done much with codecs, conversions, etc. Any help would be much appreciated and if additional info is needed please let me know before downvoting.
Here is the code from the answer I mentioned concerning the conversion to YUV420 :
texture = SDL_CreateTexture(
renderer,
SDL_PIXELFORMAT_YV12,
SDL_TEXTUREACCESS_STREAMING,
pCodecCtx->width,
pCodecCtx->height
);
if (!texture) {
fprintf(stderr, "SDL: could not create texture - exiting\n");
exit(1);
}
// initialize SWS context for software scaling
sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_YUV420P,
SWS_BILINEAR,
NULL,
NULL,
NULL);
// set up YV12 pixel array (12 bits per pixel)
yPlaneSz = pCodecCtx->width * pCodecCtx->height;
uvPlaneSz = pCodecCtx->width * pCodecCtx->height / 4;
yPlane = (Uint8*)malloc(yPlaneSz);
uPlane = (Uint8*)malloc(uvPlaneSz);
vPlane = (Uint8*)malloc(uvPlaneSz);
if (!yPlane || !uPlane || !vPlane) {
fprintf(stderr, "Could not allocate pixel buffers - exiting\n");
exit(1);
}
uvPitch = pCodecCtx->width / 2;
while (av_read_frame(pFormatCtx, &packet) >= 0) {
// Is this a packet from the video stream?
if (packet.stream_index == videoStream) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
// Did we get a video frame?
if (frameFinished) {
AVPicture pict;
pict.data[0] = yPlane;
pict.data[1] = uPlane;
pict.data[2] = vPlane;
pict.linesize[0] = pCodecCtx->width;
pict.linesize[1] = uvPitch;
pict.linesize[2] = uvPitch;
// Convert the image into YUV format that SDL uses
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pict.data,
pict.linesize);
SDL_UpdateYUVTexture(
texture,
NULL,
yPlane,
pCodecCtx->width,
uPlane,
uvPitch,
vPlane,
uvPitch
);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, texture, NULL, NULL);
SDL_RenderPresent(renderer);
}
}
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
SDL_PollEvent(&event);
switch (event.type) {
case SDL_QUIT:
SDL_DestroyTexture(texture);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(screen);
SDL_Quit();
exit(0);
break;
default:
break;
}
}
// Free the YUV frame
av_frame_free(&pFrame);
free(yPlane);
free(uPlane);
free(vPlane);
// Close the codec
avcodec_close(pCodecCtx);
avcodec_close(pCodecCtxOrig);
// Close the video file
avformat_close_input(&pFormatCtx);EDIT :
After more research I learned that in YUV420 is stored with all Y’s first then a combination of U and V bytes one after another as illustrated by this image :
(source : wikimedia.org)However I also learned that YUV444 is stored in the order U, Y, V and repeats like this picture shows :
I tried changing some things around in code :
// I changed SDL_PIXELFORMAT_YV12 to SDL_PIXELFORMAT_UYVY
// as to reflect the order of YUV444
texture = SDL_CreateTexture(
renderer,
SDL_PIXELFORMAT_UYVY,
SDL_TEXTUREACCESS_STREAMING,
pCodecCtx->width,
pCodecCtx->height
);
if (!texture) {
fprintf(stderr, "SDL: could not create texture - exiting\n");
exit(1);
}
// Changed AV_PIX_FMT_YUV420P to AV_PIX_FMT_YUV444P
// for rather obvious reasons
sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_YUV444P,
SWS_BILINEAR,
NULL,
NULL,
NULL);
// There are as many Y, U and V bytes as pixels I just
// made yPlaneSz and uvPlaneSz equal to the number of pixels
yPlaneSz = pCodecCtx->width * pCodecCtx->height;
uvPlaneSz = pCodecCtx->width * pCodecCtx->height;
yPlane = (Uint8*)malloc(yPlaneSz);
uPlane = (Uint8*)malloc(uvPlaneSz);
vPlane = (Uint8*)malloc(uvPlaneSz);
if (!yPlane || !uPlane || !vPlane) {
fprintf(stderr, "Could not allocate pixel buffers - exiting\n");
exit(1);
}
uvPitch = pCodecCtx->width * 2;
while (av_read_frame(pFormatCtx, &packet) >= 0) {
// Is this a packet from the video stream?
if (packet.stream_index == videoStream) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
// Rearranged the order of the planes to reflect UYV order
// then set linesize to the number of Y, U and V bytes
// per row
if (frameFinished) {
AVPicture pict;
pict.data[0] = uPlane;
pict.data[1] = yPlane;
pict.data[2] = vPlane;
pict.linesize[0] = pCodecCtx->width;
pict.linesize[1] = pCodecCtx->width;
pict.linesize[2] = pCodecCtx->width;
// Convert the image into YUV format that SDL uses
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pict.data,
pict.linesize);
SDL_UpdateYUVTexture(
texture,
NULL,
yPlane,
1,
uPlane,
uvPitch,
vPlane,
uvPitch
);
//.................................................But now I get an access violation at the call to
SDL_UpdateYUVTexture
... I’m honestly not sure what’s wrong. I think it may have to do with settingAVPicture pic
’s memberdata
andlinesize
improperly but I’m not positive. -
Using Accord.Video.FFMPEG, I get "parameter is not valid exception". How can I solve it ?
31 mai 2023, par Sheron BlumentalI want to extract all the frames from an MP4 video file and display them on a PictureBox.


The original code comes from this Q&A : How can I time the presentation and extraction of frames from a video file ?


The exception happens after clicking the start button on the line :


var frame = videoReader.ReadVideoFrame();



The message


System.ArgumentException
 HResult=0x80070057
 Message=Parameter is not valid.
 Source=System.Drawing
 StackTrace:
 at System.Drawing.Bitmap..ctor(Int32 width, Int32 height, PixelFormat format)
 at Accord.Video.FFMPEG.VideoFileReader.DecodeVideoFrame(BitmapData bitmapData)
 at Accord.Video.FFMPEG.VideoFileReader.readVideoFrame(Int32 frameIndex, BitmapData output)
 at Accord.Video.FFMPEG.VideoFileReader.ReadVideoFrame()
 at Extract_Frames.Form1.<getvideoframesasync>d__15.MoveNext() in D:\Csharp Projects\Extract Frames\Form1.cs:line 114
 at System.Runtime.CompilerServices.TaskAwaiter.ThrowForNonSuccess(Task task)
 at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)
 at System.Runtime.CompilerServices.TaskAwaiter.GetResult()
 at Extract_Frames.Form1.d__17.MoveNext() in D:\Csharp Projects\Extract Frames\Form1.cs:line 151
</getvideoframesasync>


The full code


using Accord.IO;
using Accord.Video;
using Accord.Video.FFMPEG;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Reflection.Emit;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;

namespace Extract_Frames
{
 public partial class Form1 : Form
 {
 Bitmap frame = null;
 Graphics frameGraphics = null;
 bool isVideoRunning = false;
 IProgress<bitmap> videoProgress = null;
 private CancellationTokenSource cts = null;
 private readonly object syncRoot = new object();
 private static long pause = 0;
 private int frameRate = 0;
 private List<bitmap> frames = new List<bitmap>();
 string fileName;

 public Form1()
 {
 InitializeComponent();

 }

 private void Form1_Load(object sender, EventArgs e)
 {

 }

 private void StopPlayback(bool cancel)
 {
 lock (syncRoot)
 {
 if (cancel) cts?.Cancel();
 cts?.Dispose();
 cts = null;
 }
 }

 int counter =1;
 private void Updater(Bitmap videoFrame)
 {
 frames.Add(videoFrame);

 label1.Text = "Current Frame Number : " + counter;
 trackBar1.Value = counter;
 counter++;

 //Size size = new Size(videoFrame.Width, videoFrame.Height);
 //pictureBox1.ClientSize = size;
 using (videoFrame) frameGraphics.DrawImage(videoFrame, Point.Empty);

 pictureBox1.Invalidate();
 }

 private async Task GetVideoFramesAsync(IProgress<bitmap> updater, string fileName, int intervalMs, CancellationToken token = default)
 {
 using (var videoReader = new VideoFileReader())
 {
 if (token.IsCancellationRequested) return;
 videoReader.Open(fileName);

 videoReader.ReadVideoFrame(1);
 trackBar1.Value = 1;

 label1.Text = "Current Frame Number : " + counter.ToString();

 while (true)
 {
 if (Interlocked.Read(ref pause) == 0)
 {
 var frame = videoReader.ReadVideoFrame();

 if (token.IsCancellationRequested || frame is null) break;
 updater.Report(frame);
 }
 await Task.Delay(frameRate).ConfigureAwait(false);
 }
 }
 }

 private void trackBar2_Scroll(object sender, EventArgs e)
 {
 frameRate = trackBar2.Value / 25;
 }

 private async void buttonStart_Click(object sender, EventArgs e)
 {
 string fileName = textBox1.Text;

 if (isVideoRunning) return;
 isVideoRunning = true;

 using (var videoReader = new VideoFileReader())
 {
 videoReader.Open(fileName);
 frame = new Bitmap(videoReader.Width + 2, videoReader.Height + 2);
 trackBar1.Maximum = (int)videoReader.FrameCount;
 }

 videoProgress = new Progress<bitmap>((bitmap) => Updater(bitmap));
 cts = new CancellationTokenSource();
 pictureBox1.Image = frame;
 try
 {
 frameGraphics = Graphics.FromImage(frame);
 // Set the fame rate to 25 frames per second
 //int frameRate = 1000 / 25;
 await GetVideoFramesAsync(videoProgress, fileName, frameRate, cts.Token);
 }
 finally
 {
 frameGraphics?.Dispose();
 StopPlayback(false);
 isVideoRunning = false;
 }
 }

 private void buttonPause_Click(object sender, EventArgs e)
 {
 if (pause == 0)
 {
 buttonPause.Text = "Resume";
 Interlocked.Increment(ref pause);
 }
 else
 {
 Interlocked.Decrement(ref pause);
 buttonPause.Text = "Pause";
 }
 }

 private void buttonStop_Click(object sender, EventArgs e)
 {
 StopPlayback(true);
 }

 protected override void OnFormClosing(FormClosingEventArgs e)
 {
 if (isVideoRunning) StopPlayback(true);
 pictureBox1.Image?.Dispose();
 base.OnFormClosing(e);
 }

 private void pictureBox1_Paint(object sender, PaintEventArgs e)
 {
 ControlPaint.DrawBorder(e.Graphics, pictureBox1.ClientRectangle, Color.Red, ButtonBorderStyle.Solid);
 }

 private void trackBar1_Scroll(object sender, EventArgs e)
 {
 pictureBox1.Image = frames[trackBar1.Value];
 }

 private void button1_Click(object sender, EventArgs e)
 {
 using (OpenFileDialog openFileDialog = new OpenFileDialog())
 {
 openFileDialog.InitialDirectory = "c:\\";
 openFileDialog.Filter = "video files (*.mp4)|*.mp4|All files (*.*)|*.*";
 openFileDialog.FilterIndex = 2;
 openFileDialog.RestoreDirectory = true;

 if (openFileDialog.ShowDialog() == DialogResult.OK)
 {
 // Get the path of specified file
 textBox1.Text = openFileDialog.FileName;
 }
 }
 }
 }
}
</bitmap></bitmap></bitmap></bitmap></bitmap>


-
Today we celebrate Data Privacy Day 2019
28 janvier 2019, par Jake Thornton — Privacy