
Recherche avancée
Médias (91)
-
Les Miserables
9 décembre 2019, par
Mis à jour : Décembre 2019
Langue : français
Type : Textuel
-
VideoHandle
8 novembre 2019, par
Mis à jour : Novembre 2019
Langue : français
Type : Video
-
Somos millones 1
21 juillet 2014, par
Mis à jour : Juin 2015
Langue : français
Type : Video
-
Un test - mauritanie
3 avril 2014, par
Mis à jour : Avril 2014
Langue : français
Type : Textuel
-
Pourquoi Obama lit il mes mails ?
4 février 2014, par
Mis à jour : Février 2014
Langue : français
-
IMG 0222
6 octobre 2013, par
Mis à jour : Octobre 2013
Langue : français
Type : Image
Autres articles (94)
-
Amélioration de la version de base
13 septembre 2013Jolie sélection multiple
Le plugin Chosen permet d’améliorer l’ergonomie des champs de sélection multiple. Voir les deux images suivantes pour comparer.
Il suffit pour cela d’activer le plugin Chosen (Configuration générale du site > Gestion des plugins), puis de configurer le plugin (Les squelettes > Chosen) en activant l’utilisation de Chosen dans le site public et en spécifiant les éléments de formulaires à améliorer, par exemple select[multiple] pour les listes à sélection multiple (...) -
Contribute to a better visual interface
13 avril 2011MediaSPIP is based on a system of themes and templates. Templates define the placement of information on the page, and can be adapted to a wide range of uses. Themes define the overall graphic appearance of the site.
Anyone can submit a new graphic theme or template and make it available to the MediaSPIP community. -
Support de tous types de médias
10 avril 2011Contrairement à beaucoup de logiciels et autres plate-formes modernes de partage de documents, MediaSPIP a l’ambition de gérer un maximum de formats de documents différents qu’ils soient de type : images (png, gif, jpg, bmp et autres...) ; audio (MP3, Ogg, Wav et autres...) ; vidéo (Avi, MP4, Ogv, mpg, mov, wmv et autres...) ; contenu textuel, code ou autres (open office, microsoft office (tableur, présentation), web (html, css), LaTeX, Google Earth) (...)
Sur d’autres sites (10294)
-
Socket.io client in js and server in Socket.io go doesn't send connected messege and data
24 mars 2023, par OmriHalifaI am using
ffmpeg
andsocket.io
and I have some issues. I'm trying to send a connection request to a server written in Go through React, but I'm unable to connect to it. I tried adding the events in useEffect and it's still not working, what should I do ? i attaching my code in js and in go :
main.go


package main

import (
 "log"

 "github.com/gin-gonic/gin"

 socketio "github.com/googollee/go-socket.io"
)

func main() {
 router := gin.New()

 server := socketio.NewServer(nil)

 server.OnConnect("/", func(s socketio.Conn) error {
 s.SetContext("")
 log.Println("connected:", s.ID())
 return nil
 })

 server.OnEvent("/", "notice", func(s socketio.Conn, msg string) {
 log.Println("notice:", msg)
 s.Emit("reply", "have "+msg)
 })

 server.OnEvent("/", "transcoded-video", func(s socketio.Conn, data string) {
 log.Println("transcoded-video:", data)
 })

 server.OnEvent("/", "bye", func(s socketio.Conn) string {
 last := s.Context().(string)
 s.Emit("bye", last)
 s.Close()
 return last
 })

 server.OnError("/", func(s socketio.Conn, e error) {
 log.Println("meet error:", e)
 })

 server.OnDisconnect("/", func(s socketio.Conn, reason string) {
 log.Println("closed", reason)
 })

 go func() {
 if err := server.Serve(); err != nil {
 log.Fatalf("socketio listen error: %s\n", err)
 }
 }()
 defer server.Close()

 if err := router.Run(":8000"); err != nil {
 log.Fatal("failed run app: ", err)
 }
}




App.js


import './App.css';
import { useEffect } from 'react';
import { createFFmpeg, fetchFile } from '@ffmpeg/ffmpeg';
import { io } from 'socket.io-client'; 

function App() {
 const socket = io("http://localhost:8000",function() {
 // Send a message to the server when the client is connected
 socket.emit('clientConnected', 'Client has connected to the server!');
 })

 const ffmpegWorker = createFFmpeg({
 log: true
 })

 // Initialize FFmpeg when the component is mounted
 async function initFFmpeg() {
 await ffmpegWorker.load();
 }

 async function transcode(webcamData) {
 const name = 'record.webm';
 await ffmpegWorker.FS('writeFile', name, await fetchFile(webcamData));
 await ffmpegWorker.run('-i', name, '-preset', 'ultrafast', '-threads', '4', 'output.mp4');
 const data = ffmpegWorker.FS('readFile', 'output.mp4');
 
 // Set the source of the output video element to the transcoded video data
 const video = document.getElementById('output-video');
 video.src = URL.createObjectURL(new Blob([data.buffer], { type: 'video/mp4' }));
 
 // Remove the output.mp4 file from the FFmpeg virtual file system
 ffmpegWorker.FS('unlink', 'output.mp4');
 
 // Emit a "transcoded-video" event to the server with the transcoded video data
 socket.emit("transcoded-video", data.buffer)
 }
 
 

 let mediaRecorder;
 let chunks = [];
 
 // Request access to the user's camera and microphone and start recording
 function requestMedia() {
 const webcam = document.getElementById('webcam');
 navigator.mediaDevices.getUserMedia({ video: true, audio: true })
 .then(async (stream) => {
 webcam.srcObject = stream;
 await webcam.play();

 // Set up a MediaRecorder instance to record the video and audio
 mediaRecorder = new MediaRecorder(stream);

 // Add the recorded data to the chunks array
 mediaRecorder.ondataavailable = async (e) => {
 chunks.push(e.data);
 }

 // Transcode the recorded video data after the MediaRecorder stops
 mediaRecorder.onstop = async () => {
 await transcode(new Uint8Array(await (new Blob(chunks)).arrayBuffer()));

 // Clear the chunks array after transcoding
 chunks = [];

 // Start the MediaRecorder again after a 0 millisecond delay
 setTimeout(() => {
 mediaRecorder.start();
 
 // Stop the MediaRecorder after 3 seconds
 setTimeout(() => {
 mediaRecorder.stop();
 }, 500);
 }, 0);
 }

 // Start the MediaRecorder
 mediaRecorder.start();

 // Stop the MediaRecorder after 3 seconds
 setTimeout(() => {
 mediaRecorder.stop();
 }, 700);
 })
 }
 
 useEffect(() => {
 // Set up event listeners for the socket connection
 socket.on('/', function(){
 // Log a message when the client is connected to the server
 console.log("Connected to server!"); 
 });

 socket.on('transcoded-video', function(data){
 // Log the received data for debugging purposes
 console.log("Received transcoded video data:", data); 
 });

 socket.on('notice', function(data){
 // Emit a "notice" event back to the server to acknowledge the received data
 socket.emit("notice", "ping server!");
 });

 socket.on('bye', function(data){
 // Log the received data and disconnect from the server
 console.log("Server sent:", data); 
 socket.disconnect();
 });

 socket.on('disconnect', function(){
 // Log a message when the client is disconnected from the server
 console.log("Disconnected from server!"); 
 });
 }, [])

 return (
 <div classname="App">
 <div>
 <video muted="{true}"></video>
 <video autoplay="autoplay"></video>
 </div>
 <button>start streaming</button>
 </div>
 );
}

export default App;



What can i do to fix it ? thank you !!


-
Is there a way to work with fluent-ffmpeg library for audio encoding (webm to wav/mp3) in Angular 2+ ?
24 avril 2019, par binarysynthesisSo i’m using Browser media objects to record audio using microphone for speech to text transcription. The recorded media gives me a blob/file which is in webm format. I want to convert the blob/file to wav or mp3 format which will be sent to AWS S3 Storage from where i intend to use the AWS Transcribe (Speech to Text) service to pick up the file and produce a transcript of the speech. AWS Transcribe doesn’t support webm format due to which i need to encode the audio on the client side to either wav or mp3. I’m trying to use the fluent-ffmpeg (third-party) [https://www.npmjs.com/package/fluent-ffmpeg] npm library to accomplish that but i keep getting the following error in the Typescript compiler when building using ng serve. I have already tried using the RecorderJS and the WebAudioRecoderJS npm libraries and i get the same ’Module not found’ error. -
ERROR in ./node_modules/fluent-ffmpeg/index.js
Module not found: Error: Can't resolve './lib-cov/fluent-ffmpeg' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg'
ERROR in ./node_modules/fluent-ffmpeg/lib/ffprobe.js
Module not found: Error: Can't resolve 'child_process' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
ERROR in ./node_modules/fluent-ffmpeg/lib/processor.js
Module not found: Error: Can't resolve 'child_process' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
ERROR in ./node_modules/fluent-ffmpeg/lib/utils.js
Module not found: Error: Can't resolve 'child_process' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
ERROR in ./node_modules/fluent-ffmpeg/lib/recipes.js
Module not found: Error: Can't resolve 'fs' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
ERROR in ./node_modules/fluent-ffmpeg/lib/capabilities.js
Module not found: Error: Can't resolve 'fs' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
ERROR in ./node_modules/fluent-ffmpeg/lib/processor.js
Module not found: Error: Can't resolve 'fs' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
ERROR in ./node_modules/isexe/index.js
Module not found: Error: Can't resolve 'fs' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\isexe'
ERROR in ./node_modules/isexe/windows.js
Module not found: Error: Can't resolve 'fs' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\isexe'
ERROR in ./node_modules/isexe/mode.js
Module not found: Error: Can't resolve 'fs' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\isexe'
ERROR in ./node_modules/fluent-ffmpeg/lib/utils.js
Module not found: Error: Can't resolve 'os' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
ERROR in ./node_modules/fluent-ffmpeg/lib/recipes.js
Module not found: Error: Can't resolve 'path' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
ERROR in ./node_modules/fluent-ffmpeg/lib/fluent-ffmpeg.js
Module not found: Error: Can't resolve 'path' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
ERROR in ./node_modules/fluent-ffmpeg/lib/capabilities.js
Module not found: Error: Can't resolve 'path' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
ERROR in ./node_modules/fluent-ffmpeg/lib/processor.js
Module not found: Error: Can't resolve 'path' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
ERROR in ./node_modules/fluent-ffmpeg/lib/options/misc.js
Module not found: Error: Can't resolve 'path' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib\options'
ERROR in ./node_modules/which/which.js
Module not found: Error: Can't resolve 'path' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\which'
ERROR in ./node_modules/fluent-ffmpeg/lib/recipes.js
Module not found: Error: Can't resolve 'stream' in 'C:\Users\banshuman\Desktop\AWS-Transcribe-Angular\aws-transcribe-angular\node_modules\fluent-ffmpeg\lib'
i 「wdm」: Failed to compile.I am using Angular 7.2.0 and TypeScript 3.2.4.
I have also installed the type definitions for fluent-ffmpeg [https://www.npmjs.com/package/@types/fluent-ffmpeg] within the node_modules to specify the typings for TypeScript.
Below is my angular component file where i have implemented the audio recording functionality in the Browser -/// <reference types="@types/dom-mediacapture-record"></reference>
import { Component } from '@angular/core';
import * as aws from 'aws-sdk';
import * as TranscribeService from 'aws-sdk/clients/transcribeservice';
import * as Ffmpeg from 'fluent-ffmpeg';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent {
speechToText() {
console.log(Ffmpeg);
// Begin streaming audio
navigator.mediaDevices.getUserMedia({ audio: true })
.then(stream => {
const mediaRecorder = new MediaRecorder(stream);
// Start recording audio
mediaRecorder.start();
const audioChunks = [];
// When recording starts
mediaRecorder.addEventListener("dataavailable", event => {
audioChunks.push((<any>event).data);
});
// When recording stops
mediaRecorder.addEventListener("stop", () => {
const audioBlob = new Blob(audioChunks, { type: 'audio/webm;codecs=opus' });
const audioFile = new File([audioBlob], 'outputAudioFile');
const audioUrl = URL.createObjectURL(audioBlob);
</any>I’m not posting the entire component code as the rest is part of the AWS SDK and is irrelevant to the problem statement. I need to convert the audioBlob or the audioFile which are currently in the webm format to wav or mp3 for uploading to the AWS services. How can i achieve that in Angular using the ffmpeg library ? I’m open to other solutions as well and not just ffmpeg to get the job done on the client side.
-
Using ffmpeg to merge video segments created by the MediaRecorder API
10 avril 2023, par Dario CimminoI am recording a live video from a webcam using mediarecorder API un chunks of 3 seconds :


startButton.addEventListener('click', () => {
navigator.mediaDevices.getUserMedia({
 video: {
 width: 1280,
 height: 720,
 frameRate: { ideal: 30, max: 30 }
 }
})
 .then(stream => {
 video.srcObject = stream;
 mediaRecorder = new MediaRecorder(stream, { mimeType: 'video/webm' });
 mediaRecorder.ondataavailable = async (event) => {
 const blob = new Blob([event.data], { type: 'video/mp4' });
 const formData = new FormData();
 formData.append('segment', blob, `segment${segmentNumber}.mp4`);

 // When a new video segment is ready
 fetch('http://localhost:3000/upload', {
 method: 'POST',
 body: formData
 })
 .then((response) => response.text())
 .then((result) => {
 console.log('Upload result:', result);
 })
 .catch((error) => {
 console.error('Error uploading video segment:', error);
 });
 //Upload data to mysql
 fetch('upload.php', {
 method: 'POST',
 body: formData
 })
 .then(response => response.text())
 .then(result => {
 console.log('Upload result to MYSQL:', result);
 })
 .catch(error => {
 console.error('Error uploading video segment to MYSQL:', error);
 });
 segmentNumber++;
 };

 mediaRecorder.start(3000);
 })
 .catch(error => {
 console.error('Error accessing camera:', error);
 });



}) ;


I am left with only the first segment playable, as is expected.


How ever when the recording stops, i'd like to merge all those segments recorded using ffmpeg (or any other) with the help of my nodeJs server.


I am having difficulty understand the parsing of mp4 files.


if I try the command :


ffmpeg -i segment1.mp4 -i segment2.mp4 -i segment3.mp4 out.mp4



I get the following error :


ffmpeg version N-110223-gb18a9c2971-20230410 Copyright (c) 2000-2023 the FFmpeg developers
 built with gcc 12.2.0 (crosstool-NG 1.25.0.152_89671bf)
 configuration: --prefix=/ffbuild/prefix --pkg-config-flags=--static --pkg-config=pkg-config --cross-prefix=x86_64-w64-mingw32- --arch=x86_64 --target-os=mingw32 --enable-gpl --enable-version3 --disable-debug --disable-w32threads --enable-pthreads --enable-iconv --enable-libxml2 --enable-zlib --enable-libfreetype --enable-libfribidi --enable-gmp --enable-lzma --enable-fontconfig --enable-libvorbis --enable-opencl --disable-libpulse --enable-libvmaf --disable-libxcb --disable-xlib --enable-amf --enable-libaom --enable-libaribb24 --enable-avisynth --enable-chromaprint --enable-libdav1d --enable-libdavs2 --disable-libfdk-aac --enable-ffnvcodec --enable-cuda-llvm --enable-frei0r --enable-libgme --enable-libkvazaar --enable-libass --enable-libbluray --enable-libjxl --enable-libmp3lame --enable-libopus --enable-librist --enable-libssh --enable-libtheora --enable-libvpx --enable-libwebp --enable-lv2 --disable-libmfx --enable-libvpl --enable-openal --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 --enable-libopenjpeg --enable-libopenmpt --enable-librav1e --enable-librubberband --enable-schannel --enable-sdl2 --enable-libsoxr --enable-libsrt --enable-libsvtav1 --enable-libtwolame --enable-libuavs3d --disable-libdrm --disable-vaapi --enable-libvidstab --enable-vulkan --enable-libshaderc --enable-libplacebo --enable-libx264 --enable-libx265 --enable-libxavs2 --enable-libxvid --enable-libzimg --enable-libzvbi --extra-cflags=-DLIBTWOLAME_STATIC --extra-cxxflags= --extra-ldflags=-pthread --extra-ldexeflags= --extra-libs=-lgomp --extra-version=20230410
 libavutil 58. 6.100 / 58. 6.100
 libavcodec 60. 9.100 / 60. 9.100
 libavformat 60. 4.101 / 60. 4.101
 libavdevice 60. 2.100 / 60. 2.100
 libavfilter 9. 5.100 / 9. 5.100
 libswscale 7. 2.100 / 7. 2.100
 libswresample 4. 11.100 / 4. 11.100
 libpostproc 57. 2.100 / 57. 2.100
Input #0, matroska,webm, from 'segment1.mp4':
 Metadata:
 encoder : Chrome
 Duration: N/A, start: 0.000000, bitrate: N/A
 Stream #0:0(eng): Video: h264 (Constrained Baseline), yuv420p(progressive), 1280x720 [SAR 1:1 DAR 16:9], 30.30 fps, 30 tbr, 1k tbn (default)
[mov,mp4,m4a,3gp,3g2,mj2 @ 000001d93cf25fc0] Format mov,mp4,m4a,3gp,3g2,mj2 detected only with low score of 1, misdetection possible!
[mov,mp4,m4a,3gp,3g2,mj2 @ 000001d93cf25fc0] moov atom not found
segment2.mp4: Invalid data found when processing input



any help or inputs are appreciated. THanks !