Recherche avancée

Médias (3)

Mot : - Tags -/Valkaama

Autres articles (21)

  • MediaSPIP Core : La Configuration

    9 novembre 2010, par

    MediaSPIP Core fournit par défaut trois pages différentes de configuration (ces pages utilisent le plugin de configuration CFG pour fonctionner) : une page spécifique à la configuration générale du squelettes ; une page spécifique à la configuration de la page d’accueil du site ; une page spécifique à la configuration des secteurs ;
    Il fournit également une page supplémentaire qui n’apparait que lorsque certains plugins sont activés permettant de contrôler l’affichage et les fonctionnalités spécifiques (...)

  • Use, discuss, criticize

    13 avril 2011, par

    Talk to people directly involved in MediaSPIP’s development, or to people around you who could use MediaSPIP to share, enhance or develop their creative projects.
    The bigger the community, the more MediaSPIP’s potential will be explored and the faster the software will evolve.
    A discussion list is available for all exchanges between users.

  • Emballe médias : à quoi cela sert ?

    4 février 2011, par

    Ce plugin vise à gérer des sites de mise en ligne de documents de tous types.
    Il crée des "médias", à savoir : un "média" est un article au sens SPIP créé automatiquement lors du téléversement d’un document qu’il soit audio, vidéo, image ou textuel ; un seul document ne peut être lié à un article dit "média" ;

Sur d’autres sites (2866)

  • Accurately cut video using ffmpeg

    26 septembre 2016, par Santhosh Yedidi

    I tried to cut video accurately with (milliseconds precision) but failed in ffmpeg. Only way i do is use avidemux. I open the file in it. There i select start and end points and select mjpeg codec and aac audio codecs. It cuts exactly. I think when i open it, it decodes to frames, so it cuts exaclty.

    I have an mp4 file. I cant always open in avidemux (again use graphical interface). Whats the best way to cut to milliseconds presicions (eg : 00:00:48.213 to 00:00:48:648)

    I am ok to convert it to any format and any size as long as it keeps the timing precise.

  • Anomalie #3809 : Impossibilité d’écrire "javascript :" ou "data :" dans le texte d’un article

    21 juillet 2016, par Eric Beliveau

    Non, je n’ai pas activé cette fonction. Je vais vérifier de nouveau à la sortie de la prochaine version puisque vous n’avez pas ce problème avec la svn.

    Petite précision, c’est en modifiant des articles déjà en ligne que j’ai observé le problème, je n’ai pas testé avec un nouvel article, c’est peut-être différent.

  • How to change clarity of an h264 video which decoded by FFMPEG and rendered by opengl

    5 juin 2016, par S.Jin

    I’m writing a movie player that use FFMPEG and OpenGL ES. Movie can be decoded succeessfully, but when I use AVFrame as texture to draw in my screen, I found it was so fuzzy. I don’t know where wrong in my code. If I change the AVFrame from YUV to RGB image, it will be clear.
    Does any one know why use YUV as texture to draw will be not clear ?

    My render code :

    #import "SJGLView.h"
    #import <glkit></glkit>GLKit.h>
    #import "SJDecoder.h"

    #include "libavutil/pixfmt.h"

    // MARK: - C Function
    static void sj_logShaderError(GLuint shader) {
     GLint info_len = 0;
     glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &amp;info_len);
     if (info_len == 0) NSLog(@"Empty info");
     else {
       GLchar *log = (GLchar *)malloc(info_len);
       glGetShaderInfoLog(shader, info_len, &amp;info_len, log);
       NSLog(@"Shader compile log: %s", log);
     }
    }

    static void sj_logProgramError(GLuint program) {
     int info_length;
     glGetProgramiv(program, GL_INFO_LOG_LENGTH, &amp;info_length);
     if (info_length) {
       GLchar *log = (GLchar *)malloc(info_length);
       glGetProgramInfoLog(program, info_length, &amp;info_length, log);
       NSLog(@"Program link log: %s", log);
     }
    }

    GLuint sj_loadShader(GLenum shader_type, const char* shader_source) {
     GLuint shader = glCreateShader(shader_type);
     glShaderSource(shader, 1, &amp;shader_source, NULL);
     glCompileShader(shader);
     GLint compile_status = 0;
     glGetShaderiv(shader, GL_COMPILE_STATUS, &amp;compile_status);
     if (!compile_status) goto fail;
     return shader;

    fail:
     if (shader) {
       sj_logShaderError(shader);
       glDeleteShader(shader);
     }
     return 0;
    }

    void loadOrtho(float *matrix, float left, float right, float bottom, float top, float near, float far) {
     float r_l = right - left;
     float t_b = top - bottom;
     float f_n = far - near;
     float tx = (right + left)/(right - left);
     float ty = (top + bottom)/(top - bottom);
     float tz = (far + near)/(far - near);

     matrix[0] = 2.0f / r_l;
     matrix[1] = 0.0f;
     matrix[2] = 0.0f;
     matrix[3] = 0.0f;

     matrix[4] = 0.0f;
     matrix[5] = 2.0f / t_b;
     matrix[6] = 0.0f;
     matrix[7] = 0.0f;

     matrix[8] = 0.0f;
     matrix[9] = 0.0f;
     matrix[10] = -2.0f / f_n;
     matrix[11] = 0.0f;

     matrix[12] = tx;
     matrix[13] = ty;
     matrix[14] = tz;
     matrix[15] = 1.0f;
    }


    // BT.709, standard for HDTV
    static const GLfloat g_bt709[] = {
     1.164,  1.164,  1.164,
     0.0,   -0.213,  2.112,
     1.793, -0.533,  0.0,
    };

    const GLfloat *getColorMatrix_bt709() {
     return g_bt709;
    }

    enum {
     ATTRIBUTE_VERTEX,
     ATTRIBUTE_TEXCOORD,
    };

    @implementation SJGLView {
     EAGLContext *_context;

     GLuint      _framebuffer;
     GLuint      _renderbuffer;
     GLint       _backingWidth;
     GLint       _backingHeight;
     GLfloat     _vertices[8];
     GLuint      _program;
     GLuint      _av4Position;
     GLuint      _av2Texcoord;
     GLuint      _um4Mvp;
     GLfloat     _texcoords[8];
     GLuint      _us2Sampler[3];
     GLuint      _um3ColorConversion;
     GLuint      _textures[3];

     SJDecoder *_decoder;
    }

    + (Class)layerClass {
     return [CAEAGLLayer class];
    }

    - (instancetype)initWithFrame:(CGRect)frame decoder:(SJDecoder *)decoder {
     self = [super initWithFrame:frame];
     if (self) {
       _decoder = decoder;
       [self setupGL];
     }
     return self;
    }

    - (void)layoutSubviews {
     glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
     [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
     glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &amp;_backingWidth);
     glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &amp;_backingHeight);

     [self updateVertices];
     [self render: nil];
    }

    - (void)setContentMode:(UIViewContentMode)contentMode
    {
     [super setContentMode:contentMode];
     [self updateVertices];

     [self render:nil];
    }


    - (void)setupGL {
     _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
     NSAssert(_context != nil, @"Failed to init EAGLContext");

     CAEAGLLayer *eaglLayer= (CAEAGLLayer *)self.layer;
     eaglLayer.opaque = YES;
     eaglLayer.drawableProperties = @{
                                      kEAGLDrawablePropertyRetainedBacking: [NSNumber numberWithBool:YES],
                                      kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8
                                      };

     [EAGLContext setCurrentContext:_context];
     if ([self setupEAGLContext]) {
       NSLog(@"Success to setup EAGLContext");
       if ([self loadShaders]) {
         NSLog(@"Success to load shader");
         _us2Sampler[0] = glGetUniformLocation(_program, "us2_SamplerX");
         _us2Sampler[1] = glGetUniformLocation(_program, "us2_SamplerY");
         _us2Sampler[2] = glGetUniformLocation(_program, "us2_SamplerZ");
         _um3ColorConversion = glGetUniformLocation(_program, "um3_ColorConversion");
       }
     }
    }

    - (BOOL)setupEAGLContext {
     glGenFramebuffers(1, &amp;_framebuffer);
     glGenRenderbuffers(1, &amp;_renderbuffer);
     glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
     glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
     [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
     glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &amp;_backingWidth);
     glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &amp;_backingHeight);
     glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _renderbuffer);

     GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
     if (status != GL_FRAMEBUFFER_COMPLETE) {
       NSLog(@"Failed to make complete framebuffer object: %x", status);
       return NO;
     }

     GLenum glError = glGetError();
     if (glError != GL_NO_ERROR) {
       NSLog(@"Failed to setup EAGLContext: %x", glError);
       return NO;
     }
     return YES;
    }

    - (BOOL)loadShaders {
     NSString *vertexPath = [[NSBundle mainBundle] pathForResource:@"vertex" ofType:@"vsh"];
     const char *vertexString = [[NSString stringWithContentsOfFile:vertexPath encoding:NSUTF8StringEncoding error:nil] UTF8String];

     NSString *fragmentPath = _decoder.format == SJVideoFrameFormatYUV ? [[NSBundle mainBundle] pathForResource:@"yuv420p" ofType:@"fsh"] :
     [[NSBundle mainBundle] pathForResource:@"rgb" ofType:@"fsh"];
     const char *fragmentString = [[NSString stringWithContentsOfFile:fragmentPath encoding:NSUTF8StringEncoding error:nil] UTF8String];

     GLuint vertexShader = sj_loadShader(GL_VERTEX_SHADER, vertexString);
     GLuint fragmentShader = sj_loadShader(GL_FRAGMENT_SHADER, fragmentString);

     _program = glCreateProgram();
     glAttachShader(_program, vertexShader);
     glAttachShader(_program, fragmentShader);
     glLinkProgram(_program);
     GLint link_status = GL_FALSE;
     glGetProgramiv(_program, GL_LINK_STATUS, &amp;link_status);
     if(!link_status) goto fail;

     _av4Position = glGetAttribLocation(_program, "av4_Position");
     _av2Texcoord = glGetAttribLocation(_program, "av2_Texcoord");
     _um4Mvp = glGetUniformLocation(_program, "um4_ModelViewProjection");
     return YES;
    fail:
     sj_logProgramError(_program);
     glDeleteShader(vertexShader);
     glDeleteShader(fragmentShader);
     glDeleteProgram(_program);
     return NO;
    }

    - (void)useRenderer {
     glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
     glUseProgram(_program);

     if (0 == _textures[0]) glGenTextures(3, _textures);
     for (int i = 0; i &lt; 3; i++) {
       glActiveTexture(GL_TEXTURE0 + i);
       glBindTexture(GL_TEXTURE_2D, _textures[i]);

       glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
       glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
       glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
       glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

       glUniform1i(_us2Sampler[i], i);
     }
     glUniformMatrix3fv(_um3ColorConversion, 1, GL_FALSE, getColorMatrix_bt709());
    }

    - (void)uploadTexture:(SJVideoFrame *)frame {
     if (frame.format == SJVideoFrameFormatYUV) {
       SJVideoYUVFrame *yuvFrame = (SJVideoYUVFrame *)frame;
       const GLubyte *pixel[3] = { yuvFrame.luma.bytes, yuvFrame.chromaB.bytes, yuvFrame.chromaR.bytes };
       const GLsizei widths[3] = { yuvFrame.width, yuvFrame.width/2, yuvFrame.width/2 };
       const GLsizei heights[3] = { yuvFrame.height, yuvFrame.height/2, yuvFrame.height/2 };
       for (int i = 0; i &lt; 3; i++) {
         glBindTexture(GL_TEXTURE_2D, _textures[i]);
         glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widths[i], heights[i], 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, pixel[i]);
       }
     }

    }

    - (void)render:(SJVideoFrame *)frame {
     [EAGLContext setCurrentContext:_context];
     glUseProgram(_program);
     [self useRenderer];
     GLfloat modelviewProj[16];
     loadOrtho(modelviewProj, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f);
     glUniformMatrix4fv(_um4Mvp, 1, GL_FALSE, modelviewProj);
     [self updateVertices];
     [self updateTexcoords];

     glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
     glViewport(0, 0, _backingWidth, _backingHeight);

     [self uploadTexture:frame];
     glClear(GL_COLOR_BUFFER_BIT);
     glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

     glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
     [_context presentRenderbuffer:GL_RENDERBUFFER];
    }

    - (void)updateVertices {
     [self resetVertices];

     BOOL fit        = (self.contentMode == UIViewContentModeScaleAspectFit);
     float width     = _decoder.frameWidth;
     float height    = _decoder.frameHeight;
     const float dW  = (float)_backingWidth / width;
     const float dH  = (float)_backingHeight / height;
     float dd        = fit ? MIN(dH, dW) : MAX(dH, dW);
     float nW        = (width  * dd / (float)_backingWidth);
     float nH        = (height * dd / (float)_backingHeight);

     _vertices[0] = -nW;
     _vertices[1] = -nH;
     _vertices[2] =  nW;
     _vertices[3] = -nH;
     _vertices[4] = -nW;
     _vertices[5] =  nH;
     _vertices[6] =  nW;
     _vertices[7] =  nH;

     glVertexAttribPointer(_av4Position, 2, GL_FLOAT, GL_FALSE, 0, _vertices);
     glEnableVertexAttribArray(_av4Position);
    }

    - (void)resetVertices {
     _vertices[0] = -1.0f;
     _vertices[1] = -1.0f;
     _vertices[2] =  1.0f;
     _vertices[3] = -1.0f;
     _vertices[4] = -1.0f;
     _vertices[5] =  1.0f;
     _vertices[6] =  1.0f;
     _vertices[7] =  1.0f;
    }

    - (void)updateTexcoords {
     [self resetTexcoords];

     glVertexAttribPointer(_av2Texcoord, 2, GL_FLOAT, GL_FALSE, 0, _texcoords);
     glEnableVertexAttribArray(_av2Texcoord);
    }

    - (void)resetTexcoords {
     _texcoords[0] = 0.0f;
     _texcoords[1] = 1.0f;
     _texcoords[2] = 1.0f;
     _texcoords[3] = 1.0f;
     _texcoords[4] = 0.0f;
     _texcoords[5] = 0.0f;
     _texcoords[6] = 1.0f;
     _texcoords[7] = 0.0f;
    }

    .fsh :

    precision highp float;
    varying   highp vec2 vv2_Texcoord;
    uniform   mat3  um3_ColorConversion;
    uniform   lowp  sampler2D us2_SamplerX;
    uniform   lowp  sampler2D us2_SamplerY;
    uniform   lowp  sampler2D us2_SamplerZ;
    void main() {
     mediump vec3 yuv;
     lowp    vec3 rgb;
     yuv.x = (texture2D(us2_SamplerX, vv2_Texcoord).r - (16.0/255.0));
     yuv.y = (texture2D(us2_SamplerY, vv2_Texcoord).r - 0.5);
     yuv.z = (texture2D(us2_SamplerZ, vv2_Texcoord).r - 0.5);
     rgb = um3_ColorConversion * yuva;
     gl_FragColor = vec4(rgb, 1.0);
    }

    .vsh file :

    precision highp float;
    varying highp vec2 vv2_Texcoord;
    uniform lowp sampler2D us2_SamplerX;

    void main() {
     gl_FragColor = vec4(texture2D(us2_SamplerX, vv2_Texcoord).rgb, 1)
    }

    The rgb image :
    RGB image


    Update to add a GL_NEAREST image :

    GL_NEAREST