
Recherche avancée
Médias (1)
-
The Great Big Beautiful Tomorrow
28 octobre 2011, par
Mis à jour : Octobre 2011
Langue : English
Type : Texte
Autres articles (44)
-
Les formats acceptés
28 janvier 2010, parLes commandes suivantes permettent d’avoir des informations sur les formats et codecs gérés par l’installation local de ffmpeg :
ffmpeg -codecs ffmpeg -formats
Les format videos acceptés en entrée
Cette liste est non exhaustive, elle met en exergue les principaux formats utilisés : h264 : H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 m4v : raw MPEG-4 video format flv : Flash Video (FLV) / Sorenson Spark / Sorenson H.263 Theora wmv :
Les formats vidéos de sortie possibles
Dans un premier temps on (...) -
MediaSPIP Player : les contrôles
26 mai 2010, parLes contrôles à la souris du lecteur
En plus des actions au click sur les boutons visibles de l’interface du lecteur, il est également possible d’effectuer d’autres actions grâce à la souris : Click : en cliquant sur la vidéo ou sur le logo du son, celui ci se mettra en lecture ou en pause en fonction de son état actuel ; Molette (roulement) : en plaçant la souris sur l’espace utilisé par le média (hover), la molette de la souris n’exerce plus l’effet habituel de scroll de la page, mais diminue ou (...) -
Contribute to translation
13 avril 2011You can help us to improve the language used in the software interface to make MediaSPIP more accessible and user-friendly. You can also translate the interface into any language that allows it to spread to new linguistic communities.
To do this, we use the translation interface of SPIP where the all the language modules of MediaSPIP are available. Just subscribe to the mailing list and request further informantion on translation.
MediaSPIP is currently available in French and English (...)
Sur d’autres sites (7248)
-
how to send audio or video by packet though udp and sync the iamge and audio
24 janvier 2019, par Wei Wenhow to send part of video and audio from mp4 as packet though udp from server
Client will play the part of packet resevice.import java.awt.Dimension ; import java.awt.image.BufferedImage ; import
java.io.ByteArrayOutputStream ; import java.io.IOException ; import
java.io.ObjectOutputStream ; import java.math.BigInteger ; import
java.net.DatagramPacket ; import java.net.DatagramSocket ; import
java.net.ServerSocket ; import java.net.Socket ; import
java.nio.ByteBuffer ; import java.nio.ShortBuffer ; import
java.util.ArrayList ; import java.util.Arrays ; import
javax.imageio.ImageIO ; import javax.sound.sampled.AudioFileFormat ;
import javax.sound.sampled.AudioFormat ; import javax.swing.JTextArea ;import org.bytedeco.javacv.FFmpegFrameGrabber ; import
org.bytedeco.javacv.Frame ; import
org.bytedeco.javacv.Java2DFrameConverter ;import Enum.EType.ClientState ; import View.SingleDisplayWindow ;
import java.security.InvalidKeyException ; import
java.security.NoSuchAlgorithmException ; import java.util.Timer ; import
java.util.TimerTask ; import java.util.concurrent.CountDownLatch ;
import java.util.concurrent.ExecutionException ;import javax.crypto.BadPaddingException ; import
javax.crypto.IllegalBlockSizeException ; import
javax.crypto.NoSuchPaddingException ; import
org.bytedeco.javacv.FrameGrabber ;public class SCon private final static int PORT = 8888 ;
private final JTextArea TEXT_AREA ; private volatile
SingleDisplayWindow DISPLAY ; /////private final String BD_USER_NAME, DB_PASSWORD ; private Database
database ;private boolean isRunning ;
private RSA serverRSA, clientRSA ;
private int keyIndex, typeID = 0 ; private String mediatype = "" ;
private ArrayList sHandlers ;private FileStreamingThread fileStreamingThread ; private
VideoStreamingThread videoStreamingThread ; private BroadcastThread
broadcastThread ; private ConnectThread connectThread ;private volatile static byte[] currentVideoFrame = new byte[0],
currentAudioFrame = new byte[0] ; // current image musicpublic void run() startServer() ;
isRunning = true; fileStreamingThread = new
FileStreamingThread(videoFile) ; videoStreamingThread = new
VideoStreamingThread(videoFile) ;
//CountDownLatch latch = new CountDownLatch(1) ; fileStreamingThread.start() ; videoStreamingThread.start() ;
//latch.countDown() ;broadcastThread = new BroadcastThread(); broadcastThread.start();
connectThread = new ConnectThread(); connectThread.start(); }public void stop() isRunning = false ;
try { new Socket("localhost", PORT);
} catch (IOException e) { e.printStackTrace(); }
while (fileStreamingThread.isAlive()) {
}
while (broadcastThread.isAlive()) {
}
while (connectThread.isAlive()) {
}
for (SHandler sHandler : sHandlers) { sHandler.connectionClose();
} sHandlers.clear(); DISPLAY.dispose();
TEXT_AREA.append("\nServer stop\n"); }
private class VideoStreamingThread extends Thread { privateFFmpegFrameGrabber grabber ; // Used to extract frames from video file.
private Java2DFrameConverter converter ; // Used to convert frames to
image private int curIndex ; // Current key indexpublic VideoStreamingThread(String video_file) { videoFile =
videoFile ; grabber = new FFmpegFrameGrabber(videoFile) ;
converter = new Java2DFrameConverter() ; try
grabber.restart() ;} catch (FrameGrabber.Exception e) {
e.printStackTrace(); } curIndex = keyIndex; }
public void run() { try {
while (isRunning) {
curIndex = keyIndex;
Frame frame = null;
System.out.println("v1");
if ((frame = grabber.grab()) != null) { // Grab next frame from video file
if (frame.image != null) { // image frame
BufferedImage bi = converter.convert(frame); // convert frame to image
// Convert BufferedImage to byte[]
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write(bi, "jpg", baos);
// Encrypt data and store as the current image of byte[] type
currentVideoFrame = ciphers[curIndex].doFinal(baos.toByteArray());
//////////////////
DISPLAY.setSize(new Dimension(bi.getWidth(), bi.getHeight()));
DISPLAY.updateImage(bi); // Display image
// Thread.sleep((long) ( 999 / grabber.getFrameRate()));
///////////////
typeID = 1;
mediatype = grabber.getFormat();
}
} else {
grabber.restart();
} // Restart when reached end of video
}
grabber.close();
} catch (IOException e) {
e.printStackTrace();
} catch (IllegalBlockSizeException e) {
e.printStackTrace();
} catch (BadPaddingException e) {
e.printStackTrace();
} //catch (InterruptedException e) {e.printStackTrace(); } }
public synchronized int getCurKeyIndex() { return curIndex; }
public synchronized void getVideoFile(String video_file) {
videoFile = video_file; grabber = newFFmpegFrameGrabber(video_file) ; converter = new
Java2DFrameConverter() ;try {
grabber.release();
grabber.restart();
} catch (FrameGrabber.Exception e) {
e.printStackTrace(); } } } private class FileStreamingThread extends Thread { private FFmpegFrameGrabbergrabber ; // Used to extract frames from video file. private int
curIndex ; // Current key indexpublic FileStreamingThread(String video_file) { videoFile =
videoFile ; grabber = new FFmpegFrameGrabber(videoFile) ; try
grabber.restart() ;} catch (FrameGrabber.Exception e) {
e.printStackTrace(); } curIndex = keyIndex; }
public void run() { try {
while (isRunning) {
curIndex = keyIndex;
Frame frame = null;
System.out.println("a2");
if ((frame = grabber.grabSamples()) != null) { // Grab next frame from video file
if (frame.samples != null) { // audio frame
// Encrypt audio
ShortBuffer channelSamplesShortBuffer = (ShortBuffer) frame.samples[0];
channelSamplesShortBuffer.rewind();
ByteBuffer outBuffer = ByteBuffer.allocate(channelSamplesShortBuffer.capacity() * 2);
for (int i = 0; i < channelSamplesShortBuffer.capacity(); i++) {
short val = channelSamplesShortBuffer.get(i);
outBuffer.putShort(val);
}
AudioFileFormat audiofileFormat = new AudioFileFormat(null, null, typeID);
AudioFormat audioFormat = new AudioFormat(44100, 16, 2, true, true);
//System.out.println(grabber.getSampleFormat());
// Encrypt data and store as the current audio of byte[] type
currentAudioFrame = ciphers[curIndex].doFinal(outBuffer.array());
DISPLAY.updateAudio(outBuffer.array(), grabber.getFormat()); // Display image audio
// Thread.sleep((long) (1000 / grabber.getSampleRate()));
// AudioFormat audioFormat = new AudioFormat(grabber.getSampleRate(), grabber.getAudioBitrate(),grabber.getAudioChannels(), true, true) ;
// DISPLAY.updateAudio(outBuffer.array(), audioFormat) ; //
Display image audio
outBuffer.clear() ;typeID = 2;
mediatype = grabber.getFormat();
}
} else {
grabber.restart();
} // Restart when reached end of video
}
grabber.close();
} catch (IOException e) {
e.printStackTrace();
} catch (IllegalBlockSizeException e) {
e.printStackTrace();
} catch (BadPaddingException e) {
e.printStackTrace();
} }
public synchronized int getCurKeyIndex() { return curIndex; }
public synchronized void getVideoFile(String video_file) {
videoFile = video_file; grabber = newFFmpegFrameGrabber(video_file) ;
try {
grabber.release();
grabber.restart();
} catch (FrameGrabber.Exception e) {
e.printStackTrace(); } } }public void setVideoFile(String videoFile) this.videoFile =
videoFile ;public void setThreadFile(String video_file)
fileStreamingThread.getVideoFile(video_file) ;
videoStreamingThread.getVideoFile(video_file) ;private class BroadcastThread extends Thread public void run()
while (isRunning)
Thread.yield() ;for (int i = 0; i < sHandlers.size(); i++) {
if (sHandlers.get(i).getClientState() == ClientState.R) {
sHandlers.get(i).setClientState(ClientState.W);
BroadcastWorker workerThread = new BroadcastWorker(sHandlers.get(i));
workerThread.start();
}
} } } }private class BroadcastWorker extends Thread SHandler sHandler =
null ;public BroadcastWorker(SHandler sHandler) { this.sHandler =
sHandler ;
public void run() { try {
DatagramSocket out = new DatagramSocket(); // used to send UDP packets
while (sHandler.getClientState() == ClientState.W) {
Thread.yield();
StreamFile s = new StreamFile(typeID, currentVideoFrame, currentAudioFrame, mediatype);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutputStream os = new ObjectOutputStream(outputStream);
os.writeObject(s);
byte[] data = outputStream.toByteArray();
// Create and send UDP packet
DatagramPacket videoPacket = new DatagramPacket(data, data.length,
sHandler.getClientSocket().getInetAddress(),
Integer.parseInt(sHandler.getClientPort()));
out.send(videoPacket);
} } catch (IOException e) {
e.printStackTrace(); } } }private class ConnectThread extends Thread public void run()
TEXT_AREA.append("\nWaiting for clients’ connection.....\n") ;try {
ServerSocket serverSocket = new ServerSocket(PORT);
Socket clientSocket = null;
while (isRunning) {
clientSocket = serverSocket.accept();
if (isRunning) {
SHandler sHandler = new SHandler(clientSocket, serverRSA, clientRSA, sessionKeys[keyIndex],
TEXT_AREA);
sHandler.start();
sHandlers.add(sHandler);
}
}
serverSocket.close();
if (clientSocket != null) {
clientSocket.close();
}
} catch (IOException e) {
e.printStackTrace(); } } } }my audio and image not sync.
-
how to send audio or video by packet though udp
20 janvier 2019, par Wei Wenhow to send part of video and audio from mp4 as packet though udp from server
Client will play the part of packet resevice.import java.awt.Dimension ; import java.awt.image.BufferedImage ; import
java.io.ByteArrayOutputStream ; import java.io.IOException ; import
java.io.ObjectOutputStream ; import java.math.BigInteger ; import
java.net.DatagramPacket ; import java.net.DatagramSocket ; import
java.net.ServerSocket ; import java.net.Socket ; import
java.nio.ByteBuffer ; import java.nio.ShortBuffer ; import
java.util.ArrayList ; import java.util.Arrays ; import
javax.imageio.ImageIO ; import javax.sound.sampled.AudioFileFormat ;
import javax.sound.sampled.AudioFormat ; import javax.swing.JTextArea ;import org.bytedeco.javacv.FFmpegFrameGrabber ; import
org.bytedeco.javacv.Frame ; import
org.bytedeco.javacv.Java2DFrameConverter ;import Enum.EType.ClientState ; import View.SingleDisplayWindow ;
import java.security.InvalidKeyException ; import
java.security.NoSuchAlgorithmException ; import java.util.Timer ; import
java.util.TimerTask ; import java.util.concurrent.CountDownLatch ;
import java.util.concurrent.ExecutionException ;import javax.crypto.BadPaddingException ; import
javax.crypto.IllegalBlockSizeException ; import
javax.crypto.NoSuchPaddingException ; import
org.bytedeco.javacv.FrameGrabber ;public class SCon private final static int PORT = 8888 ;
private final JTextArea TEXT_AREA ; private volatile
SingleDisplayWindow DISPLAY ; /////private final String BD_USER_NAME, DB_PASSWORD ; private Database
database ;private boolean isRunning ;
private RSA serverRSA, clientRSA ;
private int keyIndex, typeID = 0 ; private String mediatype = "" ;
private ArrayList sHandlers ;private FileStreamingThread fileStreamingThread ; private
VideoStreamingThread videoStreamingThread ; private BroadcastThread
broadcastThread ; private ConnectThread connectThread ;private volatile static byte[] currentVideoFrame = new byte[0],
currentAudioFrame = new byte[0] ; // current image musicpublic void run() startServer() ;
isRunning = true; fileStreamingThread = new
FileStreamingThread(videoFile) ; videoStreamingThread = new
VideoStreamingThread(videoFile) ;
//CountDownLatch latch = new CountDownLatch(1) ; fileStreamingThread.start() ; videoStreamingThread.start() ;
//latch.countDown() ;broadcastThread = new BroadcastThread(); broadcastThread.start();
connectThread = new ConnectThread(); connectThread.start(); }public void stop() isRunning = false ;
try { new Socket("localhost", PORT);
} catch (IOException e) { e.printStackTrace(); }
while (fileStreamingThread.isAlive()) {
}
while (broadcastThread.isAlive()) {
}
while (connectThread.isAlive()) {
}
for (SHandler sHandler : sHandlers) { sHandler.connectionClose();
} sHandlers.clear(); DISPLAY.dispose();
TEXT_AREA.append("\nServer stop\n"); }
private class VideoStreamingThread extends Thread { privateFFmpegFrameGrabber grabber ; // Used to extract frames from video file.
private Java2DFrameConverter converter ; // Used to convert frames to
image private int curIndex ; // Current key indexpublic VideoStreamingThread(String video_file) { videoFile =
videoFile ; grabber = new FFmpegFrameGrabber(videoFile) ;
converter = new Java2DFrameConverter() ; try
grabber.restart() ;} catch (FrameGrabber.Exception e) {
e.printStackTrace(); } curIndex = keyIndex; }
public void run() { try {
while (isRunning) {
curIndex = keyIndex;
Frame frame = null;
System.out.println("v1");
if ((frame = grabber.grab()) != null) { // Grab next frame from video file
if (frame.image != null) { // image frame
BufferedImage bi = converter.convert(frame); // convert frame to image
// Convert BufferedImage to byte[]
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write(bi, "jpg", baos);
// Encrypt data and store as the current image of byte[] type
currentVideoFrame = ciphers[curIndex].doFinal(baos.toByteArray());
//////////////////
DISPLAY.setSize(new Dimension(bi.getWidth(), bi.getHeight()));
DISPLAY.updateImage(bi); // Display image
// Thread.sleep((long) ( 999 / grabber.getFrameRate()));
///////////////
typeID = 1;
mediatype = grabber.getFormat();
}
} else {
grabber.restart();
} // Restart when reached end of video
}
grabber.close();
} catch (IOException e) {
e.printStackTrace();
} catch (IllegalBlockSizeException e) {
e.printStackTrace();
} catch (BadPaddingException e) {
e.printStackTrace();
} //catch (InterruptedException e) {e.printStackTrace(); } }
public synchronized int getCurKeyIndex() { return curIndex; }
public synchronized void getVideoFile(String video_file) {
videoFile = video_file; grabber = newFFmpegFrameGrabber(video_file) ; converter = new
Java2DFrameConverter() ;try {
grabber.release();
grabber.restart();
} catch (FrameGrabber.Exception e) {
e.printStackTrace(); } } } private class FileStreamingThread extends Thread { private FFmpegFrameGrabbergrabber ; // Used to extract frames from video file. private int
curIndex ; // Current key indexpublic FileStreamingThread(String video_file) { videoFile =
videoFile ; grabber = new FFmpegFrameGrabber(videoFile) ; try
grabber.restart() ;} catch (FrameGrabber.Exception e) {
e.printStackTrace(); } curIndex = keyIndex; }
public void run() { try {
while (isRunning) {
curIndex = keyIndex;
Frame frame = null;
System.out.println("a2");
if ((frame = grabber.grabSamples()) != null) { // Grab next frame from video file
if (frame.samples != null) { // audio frame
// Encrypt audio
ShortBuffer channelSamplesShortBuffer = (ShortBuffer) frame.samples[0];
channelSamplesShortBuffer.rewind();
ByteBuffer outBuffer = ByteBuffer.allocate(channelSamplesShortBuffer.capacity() * 2);
for (int i = 0; i < channelSamplesShortBuffer.capacity(); i++) {
short val = channelSamplesShortBuffer.get(i);
outBuffer.putShort(val);
}
AudioFileFormat audiofileFormat = new AudioFileFormat(null, null, typeID);
AudioFormat audioFormat = new AudioFormat(44100, 16, 2, true, true);
//System.out.println(grabber.getSampleFormat());
// Encrypt data and store as the current audio of byte[] type
currentAudioFrame = ciphers[curIndex].doFinal(outBuffer.array());
DISPLAY.updateAudio(outBuffer.array(), grabber.getFormat()); // Display image audio
// Thread.sleep((long) (1000 / grabber.getSampleRate()));
// Thread.sleep((long) (1000 / grabber.getAudioBitrate()));
// System.out.println(grabber.getFormat());
// System.out.println("audioInputStream.getFormat() = " +grabber.getFormat()) ; // System.out.println("Sample.length
= " + grabber.length) ; // System.out.println("FrameLength :" + grabber.getFrameLength()) ; //
System.out.println("Frame Size :" + grabber.getFrameSize()) ; //
System.out.println("SampleSizeInBits :" +
grabber.getSampleSizeInBits()) ; //
System.out.println("Frame Rate : " + grabber.getFrameRate()) ; //
System.out.println("Sample Rate :" + grabber.getSampleRate()) ; //
System.out.println("Encoding :" + grabber.getEncoding()) ; //
System.out.println("Channels : " + grabber.getChannels()) ;
// AudioFormat audioFormat = new AudioFormat(grabber.getSampleRate(), grabber.getAudioBitrate(),
grabber.getAudioChannels(), true, true) ;
// DISPLAY.updateAudio(outBuffer.array(), audioFormat) ; //
Display image audio
outBuffer.clear() ;typeID = 2;
mediatype = grabber.getFormat();
}
} else {
grabber.restart();
} // Restart when reached end of video
}
grabber.close();
} catch (IOException e) {
e.printStackTrace();
} catch (IllegalBlockSizeException e) {
e.printStackTrace();
} catch (BadPaddingException e) {
e.printStackTrace();
} }
public synchronized int getCurKeyIndex() { return curIndex; }
public synchronized void getVideoFile(String video_file) {
videoFile = video_file; grabber = newFFmpegFrameGrabber(video_file) ;
try {
grabber.release();
grabber.restart();
} catch (FrameGrabber.Exception e) {
e.printStackTrace(); } } }public void setVideoFile(String videoFile) this.videoFile =
videoFile ;public void setThreadFile(String video_file)
fileStreamingThread.getVideoFile(video_file) ;
videoStreamingThread.getVideoFile(video_file) ;private class BroadcastThread extends Thread public void run()
while (isRunning)
Thread.yield() ;for (int i = 0; i < sHandlers.size(); i++) {
if (sHandlers.get(i).getClientState() == ClientState.R) {
sHandlers.get(i).setClientState(ClientState.W);
BroadcastWorker workerThread = new BroadcastWorker(sHandlers.get(i));
workerThread.start();
}
} } } }private class BroadcastWorker extends Thread SHandler sHandler =
null ;public BroadcastWorker(SHandler sHandler) { this.sHandler =
sHandler ;
public void run() { try {
DatagramSocket out = new DatagramSocket(); // used to send UDP packets
while (sHandler.getClientState() == ClientState.W) {
Thread.yield();
StreamFile s = new StreamFile(typeID, currentVideoFrame, currentAudioFrame, mediatype);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutputStream os = new ObjectOutputStream(outputStream);
os.writeObject(s);
byte[] data = outputStream.toByteArray();
// Create and send UDP packet
DatagramPacket videoPacket = new DatagramPacket(data, data.length,
sHandler.getClientSocket().getInetAddress(),
Integer.parseInt(sHandler.getClientPort()));
out.send(videoPacket);
} } catch (IOException e) {
e.printStackTrace(); } } }private class ConnectThread extends Thread public void run()
TEXT_AREA.append("\nWaiting for clients’ connection.....\n") ;try {
ServerSocket serverSocket = new ServerSocket(PORT);
Socket clientSocket = null;
while (isRunning) {
clientSocket = serverSocket.accept();
if (isRunning) {
SHandler sHandler = new SHandler(clientSocket, serverRSA, clientRSA, sessionKeys[keyIndex],
TEXT_AREA);
sHandler.start();
sHandlers.add(sHandler);
}
}
serverSocket.close();
if (clientSocket != null) {
clientSocket.close();
}
} catch (IOException e) {
e.printStackTrace(); } } } }my audio and image not sync.
-
avformat_write_header() return -22 ? [duplicate]
9 janvier 2019, par TTGroupThis question already has an answer here :
I’m using the following code for Re-Stream the exist RTSP stream.
But it is failed at
avformat_write_header()
, return value is-22
.I have searched many times but there is not any solution.
int ReStream()
{
AVOutputFormat *ofmt = NULL;
//Input AVFormatContext and Output AVFormatContext
AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
AVPacket pkt;
const char *in_filename, *out_filename;
int ret, i;
int videoindex = -1;
int frame_index = 0;
int64_t start_time = 0;
//in_filename = "cuc_ieschool.mov";
//in_filename = "cuc_ieschool.mkv";
//in_filename = "cuc_ieschool.ts";
//in_filename = "cuc_ieschool.mp4";
//in_filename = "cuc_ieschool.h264";
in_filename = "rtsp://admin:Admin@123@192.168.1.81:554/Streaming/Channels/101?transportmode=unicast&profile=Profile_1";//输入URL(Input file URL)
//in_filename = "shanghai03_p.h264";
out_filename = "rtsp://localhost/publishlive/livestream";//输出 URL(Output URL)[RTMP]
//out_filename = "rtp://233.233.233.233:6666";//输出 URL(Output URL)[UDP]
av_register_all();
//Network
avformat_network_init();
//Input
if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0)
{
printf("Could not open input file.");
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0)
{
printf("Failed to retrieve input stream information");
goto end;
}
for (i = 0; i < ifmt_ctx->nb_streams; i++)
{
if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoindex = i;
break;
}
}
av_dump_format(ifmt_ctx, 0, in_filename, 0);
//Output
ret = avformat_alloc_output_context2(&ofmt_ctx, NULL, "rtsp", out_filename); //RTMP
//avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", out_filename);//UDP
if (ret < 0)
{
char strErr[STR_LENGTH_256];
av_strerror(AVERROR(ret), strErr, STR_LENGTH_256);
CommonGlobalUlti::UltiFunctions::WriteRuntimeLogs("avformat_alloc_output_context2() " + gcnew String(strErr));
goto end;
}
if (!ofmt_ctx)
{
printf("Could not create output context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ofmt = ofmt_ctx->oformat;
for (i = 0; i < ifmt_ctx->nb_streams; i++)
{
//Create output AVStream according to input AVStream
AVStream *in_stream = ifmt_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
if (!out_stream)
{
printf("Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
//Copy the settings of AVCodecContext
ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
if (ret < 0)
{
printf("Failed to copy context from input to output stream codec context\n");
goto end;
}
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
//Dump Format------------------
av_dump_format(ofmt_ctx, 0, out_filename, 1);
//Open output URL
if (!(ofmt->flags & AVFMT_NOFILE))
{
ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
if (ret < 0)
{
char strErr[STR_LENGTH_256];
av_strerror(AVERROR(ret), strErr, STR_LENGTH_256);
CommonGlobalUlti::UltiFunctions::WriteRuntimeLogs("avio_open() " + gcnew String(strErr));
printf("Could not open output URL '%s'", out_filename);
goto end;
}
}
//Write file header
ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0)
{
char strErr[STR_LENGTH_256];
av_strerror(AVERROR(ret), strErr, STR_LENGTH_256);
CommonGlobalUlti::UltiFunctions::WriteRuntimeLogs("avformat_write_header() " + gcnew String(strErr));
printf("Error occurred when opening output URL\n");
goto end;
}
start_time = av_gettime();
while (1)
{
AVStream *in_stream, *out_stream;
//Get an AVPacket
ret = av_read_frame(ifmt_ctx, &pkt);
if (ret < 0)
break;
//FIX:No PTS (Example: Raw H.264)
//Simple Write PTS
if (pkt.pts == AV_NOPTS_VALUE)
{
//Write PTS
AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base;
//Duration between 2 frames (us)
int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
//Parameters
pkt.pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt.dts = pkt.pts;
pkt.duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
}
//Important:Delay => ko can sleep khi la Camera, chi can khi File
if (pkt.stream_index == videoindex)
{
AVRational time_base = ifmt_ctx->streams[videoindex]->time_base;
AVRational time_base_q = { 1,AV_TIME_BASE };
int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
int64_t now_time = av_gettime() - start_time;
if (pts_time > now_time)
av_usleep(pts_time - now_time);
}
in_stream = ifmt_ctx->streams[pkt.stream_index];
out_stream = ofmt_ctx->streams[pkt.stream_index];
/* copy packet */
//Convert PTS/DTS
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
//Print to Screen
if (pkt.stream_index == videoindex)
{
printf("Send %8d video frames to output URL\n", frame_index);
frame_index++;
}
//ret = av_write_frame(ofmt_ctx, &pkt);
ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
if (ret < 0)
{
printf("Error muxing packet\n");
break;
}
av_free_packet(&pkt);
} //End while (1)
//Write file trailer
av_write_trailer(ofmt_ctx);
end:
avformat_close_input(&ifmt_ctx);
/* close output */
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
if (ret < 0 && ret != AVERROR_EOF) {
printf("Error occurred.\n");
return -1;
}
return 0;
}