OpenGL播放yuv视频
2012-10-12 18:10
381 查看
http://www.cocoachina.com/bbs/simple/?t100908.html
//data指向yuv420p数据
- (void)playVideoData:(void *)data
{
[EAGLContext setCurrentContext:_glContext];
if (!_textureY)
{
glGenTextures(1, &_textureY);
glGenTextures(1, &_textureU);
glGenTextures(1, &_textureV);
}
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _textureY);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW, _videoH, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, data);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, _textureU);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW/2, _videoH/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, data + _videoW * _videoH);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, _textureV);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW/2, _videoH/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, data + _videoW * _videoH * 5 / 4);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
[self render];
}
- (void)render
{
[EAGLContext setCurrentContext:_glContext];
glViewport(0, 0, 320, 320);
glClearColor(0.0, 0.6, 0.0, 1.0);
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat coordVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
// GLuint vertices;
// glGenBuffers(1, &vertices);
// glBindBuffer(GL_ARRAY_BUFFER, vertices);
// glBufferData(GL_ARRAY_BUFFER, sizeof(squareVertices), squareVertices, GL_STATIC_DRAW);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(_program);
// Update uniform value
//glUniform1f(uniforms[UNIFORM_TRANSLATE], 0.0f);
GLuint textureUniformY = glGetUniformLocation(_program, "SamplerY");
GLuint textureUniformU = glGetUniformLocation(_program, "SamplerU");
GLuint textureUniformV = glGetUniformLocation(_program, "SamplerV");
// Update attribute values
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, coordVertices);
glEnableVertexAttribArray(ATTRIB_TEXTURE);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _textureY);
glUniform1i(textureUniformY, 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, _textureU);
glUniform1i(textureUniformU, 1);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, _textureV);
glUniform1i(textureUniformV, 2);
// Draw
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
//glFlush();
[_glContext presentRenderbuffer:GL_RENDERBUFFER];
}
//Shader.vsh
attribute vec4 position; // 1
//uniform float translate;
attribute vec2 TexCoordIn; // New
varying vec2 TexCoordOut; // New
void main(void)
{
gl_Position = position; // 6
TexCoordOut = TexCoordIn;
}
//Shader.fsh
varying lowp vec2 TexCoordOut;
uniform sampler2D SamplerY;
uniform sampler2D SamplerU;
uniform sampler2D SamplerV;
void main(void)
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(SamplerY, TexCoordOut).r;
yuv.y = texture2D(SamplerU, TexCoordOut).r - 0.5;
yuv.z = texture2D(SamplerV, TexCoordOut).r - 0.5;
rgb = mat3( 1, 1, 1,
0, -0.39465, 2.03211,
1.13983, -0.58060, 0) * yuv;
gl_FragColor = vec4(rgb, 1);
}
//data指向yuv420p数据
- (void)playVideoData:(void *)data
{
[EAGLContext setCurrentContext:_glContext];
if (!_textureY)
{
glGenTextures(1, &_textureY);
glGenTextures(1, &_textureU);
glGenTextures(1, &_textureV);
}
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _textureY);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW, _videoH, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, data);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, _textureU);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW/2, _videoH/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, data + _videoW * _videoH);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, _textureV);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW/2, _videoH/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, data + _videoW * _videoH * 5 / 4);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
[self render];
}
- (void)render
{
[EAGLContext setCurrentContext:_glContext];
glViewport(0, 0, 320, 320);
glClearColor(0.0, 0.6, 0.0, 1.0);
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat coordVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
// GLuint vertices;
// glGenBuffers(1, &vertices);
// glBindBuffer(GL_ARRAY_BUFFER, vertices);
// glBufferData(GL_ARRAY_BUFFER, sizeof(squareVertices), squareVertices, GL_STATIC_DRAW);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(_program);
// Update uniform value
//glUniform1f(uniforms[UNIFORM_TRANSLATE], 0.0f);
GLuint textureUniformY = glGetUniformLocation(_program, "SamplerY");
GLuint textureUniformU = glGetUniformLocation(_program, "SamplerU");
GLuint textureUniformV = glGetUniformLocation(_program, "SamplerV");
// Update attribute values
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, coordVertices);
glEnableVertexAttribArray(ATTRIB_TEXTURE);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _textureY);
glUniform1i(textureUniformY, 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, _textureU);
glUniform1i(textureUniformU, 1);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, _textureV);
glUniform1i(textureUniformV, 2);
// Draw
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
//glFlush();
[_glContext presentRenderbuffer:GL_RENDERBUFFER];
}
//Shader.vsh
attribute vec4 position; // 1
//uniform float translate;
attribute vec2 TexCoordIn; // New
varying vec2 TexCoordOut; // New
void main(void)
{
gl_Position = position; // 6
TexCoordOut = TexCoordIn;
}
//Shader.fsh
varying lowp vec2 TexCoordOut;
uniform sampler2D SamplerY;
uniform sampler2D SamplerU;
uniform sampler2D SamplerV;
void main(void)
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(SamplerY, TexCoordOut).r;
yuv.y = texture2D(SamplerU, TexCoordOut).r - 0.5;
yuv.z = texture2D(SamplerV, TexCoordOut).r - 0.5;
rgb = mat3( 1, 1, 1,
0, -0.39465, 2.03211,
1.13983, -0.58060, 0) * yuv;
gl_FragColor = vec4(rgb, 1);
}
相关文章推荐
- 视频和音频播放的演示最简单的例子6:OpenGL广播YUV420P(T经exture,采用Shader)
- 利用Qt + OpenGL 渲染 YUV数据,播放视频 mac版
- OpenGL播放yuv视频
- yuv视频用opengl播放
- OpenGL播放yuv视频
- OpenGL播放yuv视频
- OpenGL播放yuv视频
- qt5_qml_Opengl_shader 第四弹----------------------纹理贴图(YUV视频循环播放)
- 最简单的视音频播放示例6:OpenGL播放YUV420P(通过Texture,使用Shader)
- 最简单的视音频播放示例6:OpenGL播放YUV420P(通过Texture,使用Shader)
- 视频学习笔记:Android OpenGL渲染YUV420P图像
- 高效率视频播放: GPU支持的YUV RGB 转化例子(2)
- android jni基于ffmpeg,opengles,egl的yuv视频播放功能
- vs2010MFC D3D播放YUV格式视频详细制作全过程
- Ogre 播放 YUV 视频文件
- 最简单的视音频播放示例6:OpenGL播放YUV420P(通过Texture,使用Shader)
- 用OpenGLES实现yuv420p视频播放界面
- 高效率视频播放: GPU支持的YUV RGB 转化例子(2)
- ios opengl 播放 yuv数据 标签: ios
- 【OpenGL】用OpenGL shader实现将YUV(YUV420,YV12)转RGB-(直接调用GPU实现,纯硬件方式,效率高) 这段时间一直在搞视频格式的转换问题,终于最近将一个图片的YUV