
Recherche avancée
Autres articles (57)
-
Les vidéos
21 avril 2011, parComme les documents de type "audio", Mediaspip affiche dans la mesure du possible les vidéos grâce à la balise html5 .
Un des inconvénients de cette balise est qu’elle n’est pas reconnue correctement par certains navigateurs (Internet Explorer pour ne pas le nommer) et que chaque navigateur ne gère en natif que certains formats de vidéos.
Son avantage principal quant à lui est de bénéficier de la prise en charge native de vidéos dans les navigateur et donc de se passer de l’utilisation de Flash et (...) -
Websites made with MediaSPIP
2 mai 2011, parThis page lists some websites based on MediaSPIP.
-
Possibilité de déploiement en ferme
12 avril 2011, parMediaSPIP peut être installé comme une ferme, avec un seul "noyau" hébergé sur un serveur dédié et utilisé par une multitude de sites différents.
Cela permet, par exemple : de pouvoir partager les frais de mise en œuvre entre plusieurs projets / individus ; de pouvoir déployer rapidement une multitude de sites uniques ; d’éviter d’avoir à mettre l’ensemble des créations dans un fourre-tout numérique comme c’est le cas pour les grandes plate-formes tout public disséminées sur le (...)
Sur d’autres sites (4947)
-
How to decrease the latency of the RTP streaming with ffmpeg in Android ?
24 août 2018, par Douglas Lima DantasI’m creating a App that do a RTP streaming. It uses a ParcelFileDescriptor pipe pair, where a MediaRecorder writes in the pipe while the ffmpeg receives the audio from the pipe and sends by RTP.
In my desktop, using the same wifi router, I receive the RTP stream using ffplay, and it has a delay between 5s 10s.
I tried capture audio using ffmpeg in Android, but itsn’t possible. I tried to use ffplay -fflags nobuffer, use MIC as source, change the encoder, etc.
I need the lowest delay possible. How can I do it ?
The code :
public class MainActivity extends AppCompatActivity implements View.OnClickListener {
MediaRecorder mediaRecorder;
AudioRecord record;
MediaPlayer mediaPlayer;
ParcelFileDescriptor[] pipePair;
ParcelFileDescriptor pipeRead;
ParcelFileDescriptor pipeWrite;
Process ffmpegProc;
// Requesting permission to RECORD_AUDIO
private boolean permissionsAccepted = false;
private String [] permissions = {
Manifest.permission.RECORD_AUDIO,
Manifest.permission.INTERNET,
Manifest.permission.ACCESS_NETWORK_STATE
};
private static final int REQUEST_PERMISSIONS = 200;
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode){
case REQUEST_PERMISSIONS:
permissionsAccepted = grantResults[0] == PackageManager.PERMISSION_GRANTED;
break;
}
if (!permissionsAccepted ) finish();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ActivityCompat.requestPermissions(this, permissions, REQUEST_PERMISSIONS);
TextView hello = (TextView) findViewById(R.id.hello);
hello.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Toast.makeText(MainActivity.this, "Clicado",Toast.LENGTH_SHORT)
.show();
copiarFFMpeg();
}
});
}
private void executarFFMpeg(final String[] cmd, ParcelFileDescriptor read) {
try {
ffmpegProc = Runtime.getRuntime().exec(cmd);
} catch (IOException e) {
e.printStackTrace();
}
(new Thread(new Runnable() {
@Override
public void run() {
try {
InputStream inStream = ffmpegProc.getInputStream();
InputStreamReader sReader = new InputStreamReader(inStream);
BufferedReader bufferedReader = new BufferedReader(sReader);
String line;
while ((line = bufferedReader.readLine()) != null) {
Log.d("FFMPEG",line);
}
} catch (IOException e) {
e.printStackTrace();
}
}
})).start();
(new Thread(new Runnable() {
@Override
public void run() {
byte[] buffer = new byte[8192];
int read = 0;
OutputStream ffmpegInput = ffmpegProc.getOutputStream();
FileInputStream reader = new FileInputStream(pipeRead.getFileDescriptor());
try {
while (true) {
if (reader.available()>0) {
read = reader.read(buffer);
ffmpegInput.write(buffer, 0, read);
ffmpegInput.flush();
} else {
Thread.sleep(10);
}
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
onDestroy();
}
}
})).start();
Log.d("FFMPEG","Executado");
}
private boolean estaExecutando(Process ffmpegProc) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
return ffmpegProc.isAlive();
} else {
try {
ffmpegProc.exitValue();
return false;
} catch (Exception e) {
return true;
}
}
}
private void criarMediaRecorder() {
if (pipeWrite != null) {
try {
//ffplay.exe -protocol_whitelist rtp,file,udp ..\file.sdp
mediaRecorder = new MediaRecorder();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION);
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.AAC_ADTS);
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
//mediaRecorder.setAudioChannels(2);
mediaRecorder.setOutputFile(pipeWrite.getFileDescriptor());
mediaRecorder.prepare();
mediaRecorder.start();
Log.d("MREC","MediaRecorder criado");
} catch (Exception e) {
e.printStackTrace();
}
}
}
private void criarPipe() {
try {
pipePair =ParcelFileDescriptor.createPipe();
} catch (IOException e) {
Log.e("PIPE","Deu zica na criação do pipe");
e.printStackTrace();
finish();
}
pipeRead = new ParcelFileDescriptor(pipePair[0]);
pipeWrite = new ParcelFileDescriptor(pipePair[1]);
}
private void copiarFFMpeg() {
FFmpeg ffmpeg = FFmpeg.getInstance(this);
try {
ffmpeg.loadBinary(new LoadBinaryResponseHandler() {
@Override
public void onStart() {
Log.d("FFMPEG","Iniciar cópia");
}
@Override
public void onFailure() {
Log.e("FFMPEG","Cópia falhou");
}
@Override
public void onSuccess() {
Log.d("FFMPEG","Cópiado com sucesso");
criarPipe();
//mediaRecorder.start();
File ffmpegBin = new File(getFilesDir().getAbsolutePath()+"/ffmpeg");
String[] cmd = new String[] {
ffmpegBin.getAbsolutePath(),
"-re",
"-thread_queue_size","4",
"-i","pipe:",
//"-c:a","libx264",
//"-preset","veryfast",
//"-tune","zerolatency",
"-f",
"rtp",
"rtp://192.168.0.33:1234"
};
executarFFMpeg(cmd, pipeRead);
criarMediaRecorder();
}
@Override
public void onFinish() {
}
});
} catch (FFmpegNotSupportedException e) {
// Handle if FFmpeg is not supported by device
Log.e("FFMPEG", "Não sou suportado :/");
}
}
@Override
protected void onDestroy() {
super.onDestroy();
}
@Override
public void onClick(View view) {
}
}The command :
ffplay rtp://192.168.0.33:1234
ffplay -fflags nobuffer rtp://192.168.0.33:1234 -
How to make a videoplayer using FFmpeg on android ndk
24 juillet 2012, par LMDSI'm trying to make a Video Player using ffmpeg and I use this tutorial http://dranger.com/ffmpeg/tutorial08.html, what I understand this tutorial convert a video to a video Image YUV, I'm trying to make the file that interact from .c to .java, I have this
code c from the tutorial08(http://dranger.com/ffmpeg/tutorial08.c), then I made
public class RtspReceiver extends Activity
public SurfaceView sfv;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.principal);
sfv=(SurfaceView) findViewById(R.id.im);
}
/* load our native library */
static {
System.loadLibrary("Interface");
}
private static native void Receive(SurfaceView sf);In the c I'm trying to understand how I can use this
JNIEXPORT void JNICALL isec_projecto_rtspreceiver_RtspReceiver_Receive(JNIEnv * env, jobject obj, jobject Surface)
{
//what I have to put in here?
}How can I put the SurfaceView that I have in the java, in the c ???
and other thing, in the tutorial08.c how can I extract the video and put them in the java ? am I thinking correctly ? -
geting no result from GetStringUTFChars JNI
9 avril 2013, par talhamalik22android NDK for my android application. i am stuck on the starting lines and it is not compiling further. Following is my code. It doest not compile after "str = (*env)->GetStringUTFChars(env, filename, NULL) ;". Please check my java and c code
The java code :
public class MyffmpegActivity extends Activity {
private static native int logFileInfo(String filename);
static
{
Log.i("HEHA", "HOHA");
System.loadLibrary("mylib");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_myffmpeg);
String path=Environment.getExternalStorageDirectory().getPath();
path=path+"/test.mp4";
Log.i("Name Returned is ", ":"+path);
int x=logFileInfo(path);
}The C code
jint Java_com_example_myffmpegtest_MyffmpegActivity_logFileInfo(JNIEnv * env, jobject this, jstring filename)
{
av_register_all();
AVFormatContext *pFormatCtx;
const jbyte *str;
str = (*env)->GetStringUTFChars(env, filename, NULL);
if(av_open_input_file(&pFormatCtx, str, NULL, 0, NULL)!=0)
{
LOGE("Can't open file '%s'\n", str);
return 1;
}
else
{
LOGI("File was opened\n");
LOGI("File '%s', Codec %s",
pFormatCtx->filename,
pFormatCtx->iformat->name
);
}
return 0;}