
Recherche avancée
Autres articles (101)
-
MediaSPIP 0.1 Beta version
25 avril 2011, parMediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
The zip file provided here only contains the sources of MediaSPIP in its standalone version.
To get a working installation, you must manually install all-software dependencies on the server.
If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...) -
Multilang : améliorer l’interface pour les blocs multilingues
18 février 2011, parMultilang est un plugin supplémentaire qui n’est pas activé par défaut lors de l’initialisation de MediaSPIP.
Après son activation, une préconfiguration est mise en place automatiquement par MediaSPIP init permettant à la nouvelle fonctionnalité d’être automatiquement opérationnelle. Il n’est donc pas obligatoire de passer par une étape de configuration pour cela. -
MediaSPIP v0.2
21 juin 2013, parMediaSPIP 0.2 est la première version de MediaSPIP stable.
Sa date de sortie officielle est le 21 juin 2013 et est annoncée ici.
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Comme pour la version précédente, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...)
Sur d’autres sites (11568)
-
JavaScript MediaSource && ffmpeg chunks
17 mai 2023, par OmriHalifaI have written the following code for a player that can receive chunks sent by ffmpeg through stdout and display them using mediaSource :


index.js (server of this request)


const express = require('express')
const app = express()
const port = 4545
const cp = require('child_process')
const cors = require('cors')
const { Readable } = require('stream');



app.use(cors())

app.get('/startRecording', (req, res) => {
 const ffmpeg = cp.spawn('ffmpeg', ['-f', 'dshow', '-i', 'video=HP Wide Vision HD Camera', '-profile:v', 'high', '-pix_fmt', 'yuvj420p', '-level:v', '4.1', '-preset', 'ultrafast', '-tune', 'zerolatency', '-vcodec', 'libx264', '-r', '10', '-b:v', '512k', '-s', '640x360', '-acodec', 'aac', '-ac', '2', '-ab', '32k', '-ar', '44100', '-f', 'mpegts', '-flush_packets', '0', '-' /*'udp://235.235.235.235:12345?pkt_size=1316'*/ ]);
 
 ffmpeg.stdout.on('data', (data) => {
 //console.log(`stdout: ${data}`);
 res.write(data)
 });

 ffmpeg.stderr.on('data', (data) => {
 const byteData = Buffer.from(data, 'utf8'); // Replace with your actual byte data
 const byteStream = new Readable();
 byteStream.push(byteData);
 byteStream.push(null);
 const encoding = 'utf8';
 let text = '';
 byteStream.on('data', (chunk) => {
 text += chunk.toString(encoding);
 });

 byteStream.on('end', () => {
 console.log(text); // Output the converted text
 });


 //console.log({data})
 //res.write(data)
 });

 ffmpeg.on('close', (code) => {
 console.log(`child process exited with code ${code}`);
 });
})

app.listen(port, () => {
 console.log(`Video's Server listening on port ${port}`); 
});



App.js (In react, the side of the player) :


import { useEffect } from 'react';

function App() {
 async function transcode() {
 const mediaSource = new MediaSource();
 const videoElement = document.getElementById('videoElement');
 videoElement.src = URL.createObjectURL(mediaSource);
 
 
 mediaSource.addEventListener('sourceopen', async () => {
 console.log('MediaSource open');
 const sourceBuffer = mediaSource.addSourceBuffer('video/mp4; codecs="avc1.42c01e"');
 try {
 const response = await fetch('http://localhost:4545/startRecording');
 const reader = response.body.getReader();
 
 reader.read().then(async function processText({ done, value }) {
 if (done) {
 console.log('Stream complete');
 return;
 }

 console.log("B4 append", videoElement)
 await sourceBuffer.appendBuffer(value);
 console.log("after append",value);
 // Display the contents of the sourceBuffer
 sourceBuffer.addEventListener('updateend', function(e) { if (!sourceBuffer.updating && mediaSource.readyState === 'open') { mediaSource.endOfStream(); } });
 
 // Call next read and repeat the process
 return reader.read().then(processText);
 });
 } catch (error) {
 console.error(error);
 }
 });

 console.log("B4 play")
 await videoElement.play();
 console.log("after play")

 }
 
 
 useEffect(() => {}, []);

 return (
 <div classname="App">
 <div>
 <video></video>
 </div>
 <button>start streaming</button>
 </div>
 );
}

export default App;




this what i get :
what i get


the chunks are being received and passed to the Uint8Array correctly, but the video is not being displayed. why can be the result of this and how to correct it ?


-
How to properly pipe adb screenrecord (h264 stream) to ffplay in a WinForms app ?
23 avril 2022, par GeneralFuzzHow to pipe ADB's exec out to ffplay ?


I have been struggling to get this "Live view" C# WinForms app working properly this last week. The goal is to have the android screen in the native app window where I then have other controls implemented as an overlay.


I am able to live stream by piping adb's screen record H264 into FFplay via CMD.
A CMD process that launches a .BAT does function, but I can't manipulate FFplay as control seems to be lost with how it's launched (Correct if wrong).
I just need a programmatic version of this where I can then control the FFplay window to merge it as a child into my form.


adb exec-out screenrecord --output-format=h264 - | ffplay -window_title "Live View" -framerate 60 -framedrop -probesize 32 -sync video -



I also attempted creating a ADB and FFplay process, manually trying to write the standard in from ADB's standard out. The standard out was received but I couldn't figure out writing to ffplay correctly. May have had a same thread deadlock issue.


//Configure ffplay process and start
 //ffplayProcess.SynchronizingObject();
 ffplayProcess.OutputDataReceived += (o, ev) => Debug.WriteLine(ev.Data ?? "NULL", "ffplay");
 ffplayProcess.ErrorDataReceived += (o, ev) => Debug.WriteLine(ev.Data ?? "NULL", "ffplay");
 ffplayProcess.Exited += (o, ev) => Debug.WriteLine("Exited", "ffplay");
 try
 {
 ffplayProcess.Start();
 }
 catch (Exception err)
 {
 MessageBox.Show($"Failed to start livestream. {err.Message}", "Live Stream Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
 return;
 }

 //Wait/check the process started, then...
 System.Threading.Thread.Sleep(200);

 //Run only if ffplay has not exited
 if (ffplayProcess.HasExited == false)
 {
 // make 'this' the parent of ffmpeg (presuming you are in scope of a Form or Control)
 SetParent(ffplayProcess.MainWindowHandle, this.Handle);
 MoveWindow(ffplayProcess.MainWindowHandle, 0, 0, 240, 320, true);
 }

 adbProcess.OutputDataReceived += (o, ev) => {
 Debug.WriteLine(ev.Data ?? "NULL", "adb");

 if (ev.Data != "NULL" || ev.Data != null)
 {
 //Convert data to byte array
 //byte[] dataBytes = Encoding.ASCII.GetBytes(ev.Data);
 byte[] dataBytes = Encoding.UTF8.GetBytes(ev.Data);
 ffplayProcess.StandardInput.BaseStream.WriteAsync(dataBytes, 0, dataBytes.Length);
 ffplayProcess.StandardInput.BaseStream.FlushAsync();
 }
 };
 adbProcess.ErrorDataReceived += (o, ev) => Debug.WriteLine(ev.Data ?? "NULL", "adb");
 adbProcess.Exited += (o, ev) => Debug.WriteLine("Exited", "adb");

 adbProcess.Start();

 adbProcess.BeginOutputReadLine();
 adbProcess.BeginErrorReadLine();



My current attempt is using MedallionShell to pipe into the FFplay process. ADB and FFPlay launch, but I never get FFplay's video out window.


private void FormLiveView_Load(object sender, EventArgs e)
{
 var command = Medallion.Shell.Command.Run(tmpPath + "/adb.exe", new[] { "exec-out screenrecord --output-format=h264 -" }, options => { options.DisposeOnExit(false); });
 
 command.PipeTo(Medallion.Shell.Command.Run(tmpPath + "/ffplay.exe", new[] { "-framerate 60 -framedrop -probesize 32 -sync video -" }, options => { options.DisposeOnExit(false); }));
}



-
Bluebird promise, promise.each only executes once
20 mars 2017, par kenpeterThere is a function called musicPromise(). What this function does is
- It gets all mp4 files and loop through it.
- then it tries to convert each mp4 to mp3, using
fluent-ffmpeg
The problem I am facing is
-
It only converts 1 file, no matter how many mp4 files I have.
-
And it seems never reach to
proc.on('end', (x) => {
Full code here :
// search
const glob = require('glob');
// wait for
const Promise = require('bluebird');
// fs
const fs = require('fs');
// mp3
const ffmpeg = require('fluent-ffmpeg');
// video source file path
const videoPath = '/home/kenpeter/Videos/4K\ Video\ Downloader';
// audio source file path
const audioPath = __dirname + "/audio";
// child process, exec
const exec = require('child_process').exec;
// now rename promise
function renamePromise() { return new Promise((resolve, reject) => {
glob(videoPath + "/**/*.mp4", (er, files) => {
Promise.each(files, (singleClipFile) => {
return new Promise((resolve1, reject1) => {
let arr = singleClipFile.split("/");
let lastElement = arr[arr.length - 1];
let tmpFileName = lastElement.replace(/[&\/\\#,+()$~%'":*?<>{}\ ]/g, "_");
let tmpFullFile = videoPath + "/"+ tmpFileName;
// rename it
fs.rename(singleClipFile, tmpFullFile, function(err) {
if ( err ) console.log('ERROR: ' + err);
console.log("-- Rename one file --");
console.log(tmpFullFile);
resolve1();
}); // end rename
});
})
.then(() => {
console.log('--- rename all files done ---');
resolve();
});
});
}); // end promise
};
// music promise
function musicPromise() { new Promise((resolve, reject) => {
glob(videoPath + "/**/*.mp4", (er, files) => {
Promise.each(files, (singleClipFile) => {
return new Promise((resolve1, reject1) => {
// test
console.log('-- music promise --');
console.log(singleClipFile);
// split
let arr = singleClipFile.split("/");
// e.g. xxxx.mp4
let clipFile = arr[arr.length - 1];
// e.g. xxxx no mp4
let fileName = clipFile.replace(/\.[^/.]+$/, "");
// music file name
let musicFile = fileName + '.mp3';
// set source
let proc = new ffmpeg({source: singleClipFile});
// set ffmpeg path
proc.setFfmpegPath('/usr/bin/ffmpeg');
// save mp3
proc.output("./audio/" + musicFile);
// proc on error
proc.on('error', (err) => {
console.log(err);
});
// done mp3 conversion
proc.on('end', (x) => {
console.log("single mp3 done!");
console.log(x);
// it is resolve1..............
resolve1();
});
// Run !!!!!!!!!!!!!
proc.run();
});
})
.then(() => {
console.log('--------- all mp3 conversion done --------');
resolve();
});
}); // end glob
});
};
// adb kill
function adbKillPromise() { return new Promise((resolve, reject) => {
exec("adb kill-server", (err, stdout, stderr) => {
if (err) {
console.error(err);
return;
}
console.log(stdout);
console.log('---adb kill---');
resolve();
});
});
};
// adb start
function adbStartPromise() { return new Promise((resolve, reject) => {
exec("adb start-server", (err, stdout, stderr) => {
if (err) {
console.error(err);
return;
}
console.log(stdout);
console.log('---adb start---');
resolve();
});
});
};
// adb push promise
function adbPushPromise() { return new Promise((resolve, reject) => {
glob(audioPath + "/**/*.mp3", (er, files) => {
Promise.each(files, (singleMusicFile) => {
return new Promise((resolve1, reject1) => {
let cmd = "adb push" + " " + singleMusicFile + " " + "/sdcard/Music";
exec(cmd, (err, stdout, stderr) => {
console.log(cmd);
resolve1();
});
});
})
.then(() => {
console.log('---- done push all music ---');
resolve();
});
});
});
};
// Run !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
renamePromise()
.then(musicPromise)
.then(adbKillPromise)
.then(adbStartPromise)
.then(adbPushPromise)
.then(() => {
console.log('---- all done----');
process.exit(0);
})
.catch(err => {
console.log('Error', err);
process.exit(1);
});