Recherche avancée

Médias (1)

Mot : - Tags -/wave

Autres articles (15)

  • Creating farms of unique websites

    13 avril 2011, par

    MediaSPIP platforms can be installed as a farm, with a single "core" hosted on a dedicated server and used by multiple websites.
    This allows (among other things) : implementation costs to be shared between several different projects / individuals rapid deployment of multiple unique sites creation of groups of like-minded sites, making it possible to browse media in a more controlled and selective environment than the major "open" (...)

  • Les autorisations surchargées par les plugins

    27 avril 2010, par

    Mediaspip core
    autoriser_auteur_modifier() afin que les visiteurs soient capables de modifier leurs informations sur la page d’auteurs

  • Gestion des droits de création et d’édition des objets

    8 février 2011, par

    Par défaut, beaucoup de fonctionnalités sont limitées aux administrateurs mais restent configurables indépendamment pour modifier leur statut minimal d’utilisation notamment : la rédaction de contenus sur le site modifiables dans la gestion des templates de formulaires ; l’ajout de notes aux articles ; l’ajout de légendes et d’annotations sur les images ;

Sur d’autres sites (3910)

  • Android FFmpeg Video Recording Delete Last Recorded Part

    17 avril 2015, par user3587194

    I’m trying to do exactly what this picture shows.

    Anyway, how can I delete part of a video ? The code I was testing is on github.

    It uses a progress bar so that when you record the progress bar will move, and keep them in separate segments. What is confusing to me is trying to figure out where and how to grab each segment to see if I want to delete that segment or not.

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {

       long frameTimeStamp = 0L;

       if (mAudioTimestamp == 0L && firstTime > 0L)
           frameTimeStamp = 1000L * (System.currentTimeMillis() - firstTime);

       else if (mLastAudioTimestamp == mAudioTimestamp)
           frameTimeStamp = mAudioTimestamp + frameTime;

       else {
           long l2 = (System.nanoTime() - mAudioTimeRecorded) / 1000L;
           frameTimeStamp = l2 + mAudioTimestamp;
           mLastAudioTimestamp = mAudioTimestamp;
       }

       synchronized (mVideoRecordLock) {
           //Log.e("recorder", "mVideoRecordLock " + mVideoRecordLock);

           if (recording && rec && lastSavedframe != null && lastSavedframe.getFrameBytesData() != null && yuvIplImage != null) {

               if (isFirstFrame) {
                   isFirstFrame = false;
                   firstData = data;
               }

               totalTime = System.currentTimeMillis() - firstTime - pausedTime - ((long) (1.0 / (double) frameRate) * 1000);

               if (lastSavedframe != null && !deleteEnabled) {
                   deleteEnabled = true;
                   deleteBtn.setVisibility(View.VISIBLE);
                   cancelBtn.setVisibility(View.GONE);
               }

               if (!nextEnabled && totalTime >= recordingChangeTime) {
                   Log.e("recording", "totalTime >= recordingChangeTime " + totalTime + " " + recordingChangeTime);
                   nextEnabled = true;
                   nextBtn.setVisibility(View.VISIBLE);
               }

               if (nextEnabled && totalTime >= recordingMinimumTime) {
                   mHandler.sendEmptyMessage(5);
               }

               if (currentRecorderState == RecorderState.PRESS && totalTime >= recordingChangeTime) {
                   currentRecorderState = RecorderState.LOOSEN;
                   mHandler.sendEmptyMessage(2);
               }              
               mVideoTimestamp += frameTime;

               if (lastSavedframe.getTimeStamp() > mVideoTimestamp)
                   mVideoTimestamp = lastSavedframe.getTimeStamp();

               try {
                   yuvIplImage.getByteBuffer().put(lastSavedframe.getFrameBytesData());
                   videoRecorder.setTimestamp(lastSavedframe.getTimeStamp());
                   videoRecorder.record(yuvIplImage);

               } catch (com.googlecode.javacv.FrameRecorder.Exception e) {
                       e.printStackTrace();
               }

           }
           byte[] tempData = rotateYUV420Degree90(data, previewWidth, previewHeight);

           if (cameraSelection == 1)
               tempData = rotateYUV420Degree270(data, previewWidth, previewHeight);
           lastSavedframe = new SavedFrames(tempData, frameTimeStamp);
           //Log.e("recorder", "lastSavedframe " + lastSavedframe);
           }
       }
    }






    public class Util {

    public static ContentValues videoContentValues = null;

    public static String getRecordingTimeFromMillis(long millis) {

       String strRecordingTime = null;

       int seconds = (int) (millis / 1000);
       int minutes = seconds / 60;
       int hours = minutes / 60;

       if (hours >= 0 && hours < 10)
           strRecordingTime = "0" + hours + ":";
       else
           strRecordingTime = hours + ":";

       if (hours > 0)
           minutes = minutes % 60;

       if (minutes >= 0 && minutes < 10)
           strRecordingTime += "0" + minutes + ":";
       else
           strRecordingTime += minutes + ":";

       seconds = seconds % 60;

       if (seconds >= 0 && seconds < 10)
           strRecordingTime += "0" + seconds ;
       else
           strRecordingTime += seconds ;

       return strRecordingTime;
    }

    public static int determineDisplayOrientation(Activity activity, int defaultCameraId) {

       int displayOrientation = 0;

       if (Build.VERSION.SDK_INT > Build.VERSION_CODES.FROYO) {
           CameraInfo cameraInfo = new CameraInfo();
           Camera.getCameraInfo(defaultCameraId, cameraInfo);

           int degrees  = getRotationAngle(activity);

           if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
               displayOrientation = (cameraInfo.orientation + degrees) % 360;
               displayOrientation = (360 - displayOrientation) % 360;

           } else {
               displayOrientation = (cameraInfo.orientation - degrees + 360) % 360;
           }
       }
       return displayOrientation;
    }

    public static int getRotationAngle(Activity activity) {

       int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
       int degrees  = 0;

       switch (rotation) {
       case Surface.ROTATION_0:
           degrees = 0;
           break;

       case Surface.ROTATION_90:
           degrees = 90;
           break;

       case Surface.ROTATION_180:
           degrees = 180;
           break;

       case Surface.ROTATION_270:
           degrees = 270;
           break;
       }
       return degrees;
    }

    public static int getRotationAngle(int rotation) {

       int degrees  = 0;

       switch (rotation) {
       case Surface.ROTATION_0:
           degrees = 0;
           break;

       case Surface.ROTATION_90:
           degrees = 90;
           break;

       case Surface.ROTATION_180:
           degrees = 180;
           break;

       case Surface.ROTATION_270:
           degrees = 270;
           break;
       }
       return degrees;
    }

    public static String createImagePath(Context context){
       long dateTaken = System.currentTimeMillis();
       String title = Constants.FILE_START_NAME + dateTaken;
       String filename = title + Constants.IMAGE_EXTENSION;

       String dirPath = Environment.getExternalStorageDirectory()+"/Android/data/" + context.getPackageName()+"/video";
       File file = new File(dirPath);

       if(!file.exists() || !file.isDirectory())
           file.mkdirs();

       String filePath = dirPath + "/" + filename;
       return filePath;
    }

    public static String createFinalPath(Context context) {
       Log.e("util", "createFinalPath");
       long dateTaken = System.currentTimeMillis();
       String title = Constants.FILE_START_NAME + dateTaken;
       String filename = title + Constants.VIDEO_EXTENSION;
       String filePath = genrateFilePath(context, String.valueOf(dateTaken), true, null);

       ContentValues values = new ContentValues(7);
       values.put(Video.Media.TITLE, title);
       values.put(Video.Media.DISPLAY_NAME, filename);
       values.put(Video.Media.DATE_TAKEN, dateTaken);
       values.put(Video.Media.MIME_TYPE, "video/3gpp");
       values.put(Video.Media.DATA, filePath);
       videoContentValues = values;

       Log.e("util", "filePath " + filePath);
       return filePath;
    }

    public static void deleteTempVideo(Context context) {
       final String filePath = Environment.getExternalStorageDirectory() + "/Android/data/" + context.getPackageName() + "/video";
       new Thread(new Runnable() {

           @Override
           public void run() {
               File file = new File(filePath);
               if (file != null && file.isDirectory()) {
                   Log.e("util", "file.isDirectory() " + file.isDirectory());
                   for (File file2 : file.listFiles()) {
                       Log.e("util", "file.listFiles() " + file.listFiles());
                       file2.delete();
                   }
               }
           }
       }).start();
    }

    private static String genrateFilePath(Context context,String uniqueId, boolean isFinalPath, File tempFolderPath) {
       String fileName = Constants.FILE_START_NAME + uniqueId + Constants.VIDEO_EXTENSION;
       String dirPath = Environment.getExternalStorageDirectory() + "/Android/data/" + context.getPackageName() + "/video";

       if (isFinalPath) {
           File file = new File(dirPath);
           if (!file.exists() || !file.isDirectory())
               file.mkdirs();
       } else
           dirPath = tempFolderPath.getAbsolutePath();
       String filePath = dirPath + "/" + fileName;
       return filePath;
    }

    public static String createTempPath(Context context, File tempFolderPath ) {
       long dateTaken = System.currentTimeMillis();
       String filePath = genrateFilePath(context,String.valueOf(dateTaken), false, tempFolderPath);
       return filePath;
    }

    public static File getTempFolderPath() {
       File tempFolder = new File(Constants.TEMP_FOLDER_PATH +"_" +System.currentTimeMillis());
       return tempFolder;
    }

    public static List getResolutionList(Camera camera) {
       Parameters parameters = camera.getParameters();
       List previewSizes = parameters.getSupportedPreviewSizes();
       return previewSizes;
    }

    public static RecorderParameters getRecorderParameter(int currentResolution) {
       RecorderParameters parameters = new RecorderParameters();
       if (currentResolution ==  Constants.RESOLUTION_HIGH_VALUE) {
           parameters.setAudioBitrate(128000);
           parameters.setVideoQuality(0);

       } else if (currentResolution ==  Constants.RESOLUTION_MEDIUM_VALUE) {
           parameters.setAudioBitrate(128000);
           parameters.setVideoQuality(5);

       } else if (currentResolution == Constants.RESOLUTION_LOW_VALUE) {
           parameters.setAudioBitrate(96000);
           parameters.setVideoQuality(20);
       }
       return parameters;
    }

    public static int calculateMargin(int previewWidth, int screenWidth) {

       int margin = 0;

       if (previewWidth <= Constants.RESOLUTION_LOW) {
           margin = (int) (screenWidth*0.12);

       } else if (previewWidth > Constants.RESOLUTION_LOW && previewWidth <= Constants.RESOLUTION_MEDIUM) {
           margin = (int) (screenWidth*0.08);

       } else if (previewWidth > Constants.RESOLUTION_MEDIUM && previewWidth <= Constants.RESOLUTION_HIGH) {
           margin = (int) (screenWidth*0.08);
       }
       return margin;
    }

    public static int setSelectedResolution(int previewHeight) {

       int selectedResolution = 0;

       if(previewHeight <= Constants.RESOLUTION_LOW) {
           selectedResolution = 0;

       } else if (previewHeight > Constants.RESOLUTION_LOW && previewHeight <= Constants.RESOLUTION_MEDIUM) {
           selectedResolution = 1;

       } else if (previewHeight > Constants.RESOLUTION_MEDIUM && previewHeight <= Constants.RESOLUTION_HIGH) {
           selectedResolution = 2;
       }
       return selectedResolution;
    }

    public static class ResolutionComparator implements Comparator {

       @Override
       public int compare(Camera.Size size1, Camera.Size size2) {

           if(size1.height != size2.height)
               return size1.height -size2.height;
           else
               return size1.width - size2.width;
       }
    }


    public static void concatenateMultipleFiles(String inpath, String outpath)
    {
       File Folder = new File(inpath);
       File files[];
       files = Folder.listFiles();

       if(files.length>0)
       {
           for(int i = 0;ilibencoding.so";
    }

    private static HashMap getMetaData()
    {
       HashMap localHashMap = new HashMap();
       localHashMap.put("creation_time", new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSSZ").format(new Date()));
       return localHashMap;
    }

    public static int getTimeStampInNsFromSampleCounted(int paramInt) {
       return (int)(paramInt / 0.0441D);
    }

    /*public static void saveReceivedFrame(SavedFrames frame) {

       File cachePath = new File(frame.getCachePath());
       BufferedOutputStream bos;

       try {
           bos = new BufferedOutputStream(new FileOutputStream(cachePath));
           if (bos != null) {
               bos.write(frame.getFrameBytesData());
               bos.flush();
               bos.close();
           }

       } catch (FileNotFoundException e) {
           e.printStackTrace();
           cachePath = null;

       } catch (IOException e) {
           e.printStackTrace();
           cachePath = null;
       }
    }*/

    public static Toast showToast(Context context, String textMessage, int timeDuration) {

       if (null == context) {
           return null;
       }

       textMessage = (null == textMessage ? "Oops! " : textMessage.trim());
       Toast t = Toast.makeText(context, textMessage, timeDuration);
       t.show();
       return t;
    }

    public static void showDialog(Context context, String title, String content, int type, final Handler handler) {
       final Dialog dialog = new Dialog(context, R.style.Dialog_loading);
       dialog.setCancelable(true);

       LayoutInflater inflater = LayoutInflater.from(context);
       View view = inflater.inflate(R.layout.global_dialog_tpl, null);

       Button confirmButton = (Button) view.findViewById(R.id.setting_account_bind_confirm);
       Button cancelButton = (Button) view.findViewById(R.id.setting_account_bind_cancel);

       TextView dialogTitle = (TextView) view.findViewById(R.id.global_dialog_title);

       View line_hori_center = view.findViewById(R.id.line_hori_center);
       confirmButton.setVisibility(View.GONE);
       line_hori_center.setVisibility(View.GONE);
       TextView textView = (TextView) view.findViewById(R.id.setting_account_bind_text);

       Window dialogWindow = dialog.getWindow();
       WindowManager.LayoutParams lp = dialogWindow.getAttributes();
       lp.width = (int) (context.getResources().getDisplayMetrics().density*288);
       dialogWindow.setAttributes(lp);

       if(type != 1 && type != 2){
           type = 1;
       }
       dialogTitle.setText(title);
       textView.setText(content);

       if(type == 1 || type == 2){
           confirmButton.setVisibility(View.VISIBLE);
           confirmButton.setOnClickListener(new OnClickListener(){
               @Override
               public void onClick(View v){
                   if(handler != null){
                       Message msg = handler.obtainMessage();
                       msg.what = 1;
                       handler.sendMessage(msg);
                   }
                   dialog.dismiss();
               }
           });
       }
       // 取消按钮事件
       if(type == 2){
           cancelButton.setVisibility(View.VISIBLE);
           line_hori_center.setVisibility(View.VISIBLE);
           cancelButton.setOnClickListener(new OnClickListener(){
               @Override
               public void onClick(View v){
                   if(handler != null){
                       Message msg = handler.obtainMessage();
                       msg.what = 0;
                       handler.sendMessage(msg);
                   }
                   dialog.dismiss();
               }
           });
       }
       dialog.addContentView(view, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
       dialog.setCancelable(true);// 点击返回键关闭
       dialog.setCanceledOnTouchOutside(true);// 点击外部关闭
       dialog.show();
    }

    public IplImage getFrame(String filePath) {
       Log.e("util", "getFrame" + filePath);
       CvCapture capture = cvCreateFileCapture(filePath);
       Log.e("util", "capture " + capture);
       IplImage image = cvQueryFrame(capture);
       Log.e("util", "image " + image);
       return image;
    }
  • ffmpeg and boost::asio NULL pointer

    9 avril 2015, par Georgi

    I am trying to make a special video software which will run on multiple core machines.

    I want many c++ object to stream video files and many other c++ objects to store the streamed data into file.

    I have created some simple classes, but when I try to create 2 and more objects I got :

    opening stream9079.sdp
    [udp @ 0xaef5380] bind failed: Address already in use
    Could not open input file stream9079.sdp
    Segmentation fault (core dumped)

    When I use only one object everything is fine.

    I use the following code

    int main(int argc, char **argv)
    {
       boost::asio::io_service ios;
       boost::asio::io_service ios1;

       Channel *channels[100];

       channels[0] = new Channel(ios, 9078, atoi(argv[1]));
       channels[0]->StartTimer(0);

       channels[1] = new Channel(ios1, 9079, atoi(argv[1]));
       channels[1]->StartTimer(0);

       boost::thread t(boost::bind(&worker, &ios));
       boost::thread t1(boost::bind(&worker, &ios1));


       t.join();
       t1.join();

       CEVLOG_MSG << "done" << std::endl;

       return 0;
    }

    My Channel class implementation is :

    #include "channel.hpp"
    #include "utils.hpp"
    #include "boost/lexical_cast.hpp"
    Channel::Channel(boost::asio::io_service &ioP, int i, bool to_send):
       Runnable(ioP),
       work( new boost::asio::io_service::work(ioP) ),
       ofmt(NULL),
       ifmt_ctx(NULL),
       ofmt_ctx(NULL)
    {
       id = i;
       sender = to_send;

       if (sender)
       {
               input.assign("/home/georgi/Downloads/video/IMG_0019.MOV");
               output.assign("rtp://10.101.3.60:"); output += boost::lexical_cast(id);
       }
       else
       {
               input.assign("stream"); input += boost::lexical_cast(id); input += ".sdp";
               output.assign("test"); output += boost::lexical_cast(id); output += ".mp4";
       }

    video_idx = audio_idx = sub_idx = -1;

       if (OpenInput())
       {
               if (sender)
                       OpenOutput(eStreamOutput);
               else
                       OpenOutput(eFileOutput);
       }
    }

    Channel::~Channel()
    {
       av_write_trailer(ofmt_ctx);

       avformat_close_input(&ifmt_ctx);

       if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
               avio_closep(&ofmt_ctx->pb);

       avformat_free_context(ofmt_ctx);

       work.reset();
    }

    bool Channel::OpenInput()
    {
       CEVLOG_MSG << "opening " << input << std::endl;

       int ret;
       if ((ret = avformat_open_input(&ifmt_ctx, input.c_str(), 0, 0)) < 0)
       {
               CEVLOG_ERR << "Could not open input file " << input << std::endl;
               return false;
       }

       CEVLOG_MSG << " " << ifmt_ctx << std::endl;

       if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0)
       {
               CEVLOG_ERR << "Failed to retrieve input stream information" << std::endl;
               return false;
       }

       ifmt_ctx->flags |= AVFMT_FLAG_GENPTS;

       //read and set timestamps to 0
    av_read_frame(ifmt_ctx, &pkt);
    pkt.pts = pkt.dts = 0;

    return true;
    }

    bool Channel::OpenOutput(tOutputType WhatToOpen)
    {
       int SDP_size;

       switch (WhatToOpen)
       {
       case eFileOutput:
               avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, output.c_str());
               break;

       case eStreamOutput:
               avformat_alloc_output_context2(&ofmt_ctx, NULL, "rtp", output.c_str());

               char SDP[4096];
               SDP_size = 4096;

               av_sdp_create(&ofmt_ctx, 1, SDP, SDP_size);
               CEVLOG_DBG << "SDP=" << SDP << std::endl;
               break;

       default:
               assert(false);
               break;
       }

       if (!ofmt_ctx)
       {
               CEVLOG_ERR << "Could not create output context" << std::endl;
               return false;
       }

       ofmt = ofmt_ctx->oformat;

       video_idx = FindIndex(AVMEDIA_TYPE_VIDEO);

       if (!(ofmt->flags & AVFMT_NOFILE))
       {
               if (avio_open(&ofmt_ctx->pb, output.c_str(), AVIO_FLAG_WRITE) < 0)
               {
                       CEVLOG_ERR << "Could not open output file " << output << std::endl;
                       return false;
               }
       }

       if (avformat_write_header(ofmt_ctx, NULL) < 0)
       {
               CEVLOG_ERR << "Error occurred when opening output file " << output << std::endl;
               return false;
       }

       return true;
    }

    unsigned int Channel::FindIndex(AVMediaType Type)
    {
       int idx;

       for (idx = 0; idx < ifmt_ctx->nb_streams; idx++)
       {
               if (ifmt_ctx->streams[idx]->codec->codec_type == Type)
               {
                       AVStream *in_stream = ifmt_ctx->streams[idx];
                       AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);

                       if (!out_stream)
                       {
                               CEVLOG_ERR << "Failed allocating output stream" << std::endl;
                               break;
                       }

                       if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0)
                       {
                               CEVLOG_ERR << "Failed to copy context from input to output stream codec context" << std::endl;
                               break;
                       }

                       out_stream->codec->codec_tag = 0;
                       if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
                       {
                               out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
                       }

                       break;
               }
       }

       return idx;
    }

    void Channel::Callback()
    {
       if (sender)
               SendVideo();
       else
               RecvVideo();
    }

    void Channel::SendVideo()
    {
       int ret = av_read_frame(ifmt_ctx, &pkt);
       int time_ms = 0;

       if (ret != 0)
       {
               av_write_trailer(ofmt_ctx);
               work.reset();
               return;
       }

       if (pkt.stream_index == video_idx)
       {
               AVStream *in_stream  = ifmt_ctx->streams[pkt.stream_index];
               AVStream *out_stream = ofmt_ctx->streams[pkt.stream_index];

               AVRational time_base = ifmt_ctx->streams[video_idx]->time_base;

               char timestamp[100];
               time_ms = 1000 * 1000 * strtof(timestamp2char(timestamp, pkt.duration, &time_base), NULL);

               pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);
               pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);
               pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
               pkt.pos = -1;

               ret = av_interleaved_write_frame(ofmt_ctx, &pkt);

               if (ret < 0)
               {
                       CEVLOG_ERR << "Error muxing packet" << std::endl;
                       return;
               }
       }

       av_free_packet(&pkt);

       StartTimer(time_ms);
    }

    void Channel::RecvVideo()
    {
       int ret = av_read_frame(ifmt_ctx, &pkt);

       if (ret != 0)
       {
               //Some error or end of stream is detected. Write file trailer
               av_write_trailer(ofmt_ctx);
               work.reset();
               return;
       }

       //if is NOT video just continue reading
       if (pkt.stream_index == video_idx)
       {
               AVStream *in_stream  = ifmt_ctx->streams[pkt.stream_index];
               AVStream *out_stream = ofmt_ctx->streams[pkt.stream_index];

               AVRational time_base = ifmt_ctx->streams[video_idx]->time_base;

               pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);
               pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);
               pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
               pkt.pos = -1;

               ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
               if (ret < 0)
               {
                       CEVLOG_ERR << "Error muxing packet" << std::endl;
                       return;
               }
       }

       av_free_packet(&pkt);

       StartTimer(0);
    }
  • Révision 21939 : gestion des emoji avec MySQL

    14 mars 2015, par Fil Up

    ================

    le charset ’utf8’ de MySQL ne contient pas l’ensemble de l’UTF-8, mais seulement les caractères codés sur 1, 2, ou 3 bytes ; pour contourner ce problème, on pourrait adopter le charset ’utf8mb4’, mais c’est difficile à faire, et ça implique de revoir la structure de la base (notamment les VARCHAR 255 ne passent plus, car 255*4 > 1000)

    la solution adoptée ici est d’échapper les caractères de 4 bytes sous leur forme unicode *