模仿Flickr 使用GPUImage的实时相机滤镜(iOS源代码)
2016-08-28 16:17
639 查看
from: http://code1.okbase.net/codefile/GPUImagePoissonBlendFilter.m_2016011433675_220.htm
#import "GPUImagePoissonBlendFilter.h"
#if
TARGET_IPHONE_SIMULATOR ||
TARGET_OS_IPHONE
NSString
*const kGPUImagePoissonBlendFragmentShaderString
= SHADER_STRING
(
precision mediump
float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 leftTextureCoordinate2;
varying vec2 rightTextureCoordinate2;
varying vec2 topTextureCoordinate2;
varying vec2 bottomTextureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform lowp float mixturePercent;
void
main()
{
vec4 centerColor
= texture2D(inputImageTexture, textureCoordinate);
vec3 bottomColor
= texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
vec3 leftColor
= texture2D(inputImageTexture, leftTextureCoordinate).rgb;
vec3 rightColor
= texture2D(inputImageTexture, rightTextureCoordinate).rgb;
vec3 topColor
= texture2D(inputImageTexture, topTextureCoordinate).rgb;
vec4 centerColor2
= texture2D(inputImageTexture2, textureCoordinate2);
vec3 bottomColor2
= texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb;
vec3 leftColor2
= texture2D(inputImageTexture2, leftTextureCoordinate2).rgb;
vec3 rightColor2
= texture2D(inputImageTexture2, rightTextureCoordinate2).rgb;
vec3 topColor2
= texture2D(inputImageTexture2, topTextureCoordinate2).rgb;
vec3 meanColor
= (bottomColor + leftColor
+ rightColor + topColor)
/ 4.0;
vec3 diffColor
= centerColor.rgb - meanColor;
vec3 meanColor2
= (bottomColor2 + leftColor2
+ rightColor2 + topColor2)
/ 4.0;
vec3 diffColor2
= centerColor2.rgb - meanColor2;
vec3 gradColor
= (meanColor + diffColor2);
gl_FragColor
= vec4(mix(centerColor.rgb, gradColor, centerColor2.a
* mixturePercent), centerColor.a);
}
);
#else
NSString
*const kGPUImagePoissonBlendFragmentShaderString
= SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 leftTextureCoordinate2;
varying vec2 rightTextureCoordinate2;
varying vec2 topTextureCoordinate2;
varying vec2 bottomTextureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform float mixturePercent;
void
main()
{
vec4 centerColor
= texture2D(inputImageTexture, textureCoordinate);
vec3 bottomColor
= texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
vec3 leftColor
= texture2D(inputImageTexture, leftTextureCoordinate).rgb;
vec3 rightColor
= texture2D(inputImageTexture, rightTextureCoordinate).rgb;
vec3 topColor
= texture2D(inputImageTexture, topTextureCoordinate).rgb;
vec4 centerColor2
= texture2D(inputImageTexture2, textureCoordinate2);
vec3 bottomColor2
= texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb;
vec3 leftColor2
= texture2D(inputImageTexture2, leftTextureCoordinate2).rgb;
vec3 rightColor2
= texture2D(inputImageTexture2, rightTextureCoordinate2).rgb;
vec3 topColor2
= texture2D(inputImageTexture2, topTextureCoordinate2).rgb;
vec3 meanColor
= (bottomColor + leftColor
+ rightColor + topColor)
/ 4.0;
vec3 diffColor
= centerColor.rgb - meanColor;
vec3 meanColor2
= (bottomColor2 + leftColor2
+ rightColor2 + topColor2)
/ 4.0;
vec3 diffColor2
= centerColor2.rgb - meanColor2;
vec3 gradColor
= (meanColor + diffColor2);
gl_FragColor
= vec4(mix(centerColor.rgb, gradColor, centerColor2.a
* mixturePercent), centerColor.a);
}
);
#endif
@implementation
GPUImagePoissonBlendFilter
@synthesize mix
= _mix;
@synthesize numIterations
= _numIterations;
- (id)init;
{
if (!(self
= [super initWithFragmentShaderFromString:kGPUImagePoissonBlendFragmentShaderString]))
{
return nil;
}
mixUniform
= [filterProgram uniformIndex:@"mixturePercent"];
self.mix
= 0.5;
self.numIterations
= 10;
return self;
}
- (void)setMix:(CGFloat)newValue;
{
_mix = newValue;
[self setFloat:_mix forUniform:mixUniform program:filterProgram];
}
//- (void)setOutputFBO;
//{
// if (self.numIterations % 2 == 1) {
// [self setSecondFilterFBO];
// } else {
// [self setFilterFBO];
// }
//}
- (void)renderToTextureWithVertices:(const
GLfloat *)vertices textureCoordinates:(const
GLfloat *)textureCoordinates;
{
// Run the first stage of the two-pass filter
[GPUImageContext setActiveShaderProgram:filterProgram];
[super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
for (int pass
= 1; pass
< self.numIterations; pass++) {
if (pass
% 2
== 0) {
[GPUImageContext setActiveShaderProgram:filterProgram];
// TODO: This will over-unlock the incoming framebuffer
[super renderToTextureWithVertices:vertices textureCoordinates:[[self
class] textureCoordinatesForRotation:kGPUImageNoRotation]];
} else {
// Run the second stage of the two-pass filter
secondOutputFramebuffer
= [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[secondOutputFramebuffer activateFramebuffer];
[GPUImageContext setActiveShaderProgram:filterProgram];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
glUniform1i(filterInputTextureUniform,
2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform2,
3);
glVertexAttribPointer(filterPositionAttribute,
2, GL_FLOAT,
0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute,
2, GL_FLOAT,
0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
glVertexAttribPointer(filterSecondTextureCoordinateAttribute,
2, GL_FLOAT,
0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
glDrawArrays(GL_TRIANGLE_STRIP,
0, 4);
}
}
}
@end// 鐗堟潈灞炰簬鍘熶綔鑰?//
http://code4app.com (cn) http://code4app.net (en)
// 鍙戝竷浠g爜浜庢渶涓撲笟鐨勬簮鐮佸垎浜綉绔? Code4App.com
#import "GPUImagePoissonBlendFilter.h"
#if
TARGET_IPHONE_SIMULATOR ||
TARGET_OS_IPHONE
NSString
*const kGPUImagePoissonBlendFragmentShaderString
= SHADER_STRING
(
precision mediump
float;
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 leftTextureCoordinate2;
varying vec2 rightTextureCoordinate2;
varying vec2 topTextureCoordinate2;
varying vec2 bottomTextureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform lowp float mixturePercent;
void
main()
{
vec4 centerColor
= texture2D(inputImageTexture, textureCoordinate);
vec3 bottomColor
= texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
vec3 leftColor
= texture2D(inputImageTexture, leftTextureCoordinate).rgb;
vec3 rightColor
= texture2D(inputImageTexture, rightTextureCoordinate).rgb;
vec3 topColor
= texture2D(inputImageTexture, topTextureCoordinate).rgb;
vec4 centerColor2
= texture2D(inputImageTexture2, textureCoordinate2);
vec3 bottomColor2
= texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb;
vec3 leftColor2
= texture2D(inputImageTexture2, leftTextureCoordinate2).rgb;
vec3 rightColor2
= texture2D(inputImageTexture2, rightTextureCoordinate2).rgb;
vec3 topColor2
= texture2D(inputImageTexture2, topTextureCoordinate2).rgb;
vec3 meanColor
= (bottomColor + leftColor
+ rightColor + topColor)
/ 4.0;
vec3 diffColor
= centerColor.rgb - meanColor;
vec3 meanColor2
= (bottomColor2 + leftColor2
+ rightColor2 + topColor2)
/ 4.0;
vec3 diffColor2
= centerColor2.rgb - meanColor2;
vec3 gradColor
= (meanColor + diffColor2);
gl_FragColor
= vec4(mix(centerColor.rgb, gradColor, centerColor2.a
* mixturePercent), centerColor.a);
}
);
#else
NSString
*const kGPUImagePoissonBlendFragmentShaderString
= SHADER_STRING
(
varying vec2 textureCoordinate;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 leftTextureCoordinate2;
varying vec2 rightTextureCoordinate2;
varying vec2 topTextureCoordinate2;
varying vec2 bottomTextureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform float mixturePercent;
void
main()
{
vec4 centerColor
= texture2D(inputImageTexture, textureCoordinate);
vec3 bottomColor
= texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
vec3 leftColor
= texture2D(inputImageTexture, leftTextureCoordinate).rgb;
vec3 rightColor
= texture2D(inputImageTexture, rightTextureCoordinate).rgb;
vec3 topColor
= texture2D(inputImageTexture, topTextureCoordinate).rgb;
vec4 centerColor2
= texture2D(inputImageTexture2, textureCoordinate2);
vec3 bottomColor2
= texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb;
vec3 leftColor2
= texture2D(inputImageTexture2, leftTextureCoordinate2).rgb;
vec3 rightColor2
= texture2D(inputImageTexture2, rightTextureCoordinate2).rgb;
vec3 topColor2
= texture2D(inputImageTexture2, topTextureCoordinate2).rgb;
vec3 meanColor
= (bottomColor + leftColor
+ rightColor + topColor)
/ 4.0;
vec3 diffColor
= centerColor.rgb - meanColor;
vec3 meanColor2
= (bottomColor2 + leftColor2
+ rightColor2 + topColor2)
/ 4.0;
vec3 diffColor2
= centerColor2.rgb - meanColor2;
vec3 gradColor
= (meanColor + diffColor2);
gl_FragColor
= vec4(mix(centerColor.rgb, gradColor, centerColor2.a
* mixturePercent), centerColor.a);
}
);
#endif
@implementation
GPUImagePoissonBlendFilter
@synthesize mix
= _mix;
@synthesize numIterations
= _numIterations;
- (id)init;
{
if (!(self
= [super initWithFragmentShaderFromString:kGPUImagePoissonBlendFragmentShaderString]))
{
return nil;
}
mixUniform
= [filterProgram uniformIndex:@"mixturePercent"];
self.mix
= 0.5;
self.numIterations
= 10;
return self;
}
- (void)setMix:(CGFloat)newValue;
{
_mix = newValue;
[self setFloat:_mix forUniform:mixUniform program:filterProgram];
}
//- (void)setOutputFBO;
//{
// if (self.numIterations % 2 == 1) {
// [self setSecondFilterFBO];
// } else {
// [self setFilterFBO];
// }
//}
- (void)renderToTextureWithVertices:(const
GLfloat *)vertices textureCoordinates:(const
GLfloat *)textureCoordinates;
{
// Run the first stage of the two-pass filter
[GPUImageContext setActiveShaderProgram:filterProgram];
[super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
for (int pass
= 1; pass
< self.numIterations; pass++) {
if (pass
% 2
== 0) {
[GPUImageContext setActiveShaderProgram:filterProgram];
// TODO: This will over-unlock the incoming framebuffer
[super renderToTextureWithVertices:vertices textureCoordinates:[[self
class] textureCoordinatesForRotation:kGPUImageNoRotation]];
} else {
// Run the second stage of the two-pass filter
secondOutputFramebuffer
= [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[secondOutputFramebuffer activateFramebuffer];
[GPUImageContext setActiveShaderProgram:filterProgram];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
glUniform1i(filterInputTextureUniform,
2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform2,
3);
glVertexAttribPointer(filterPositionAttribute,
2, GL_FLOAT,
0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute,
2, GL_FLOAT,
0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
glVertexAttribPointer(filterSecondTextureCoordinateAttribute,
2, GL_FLOAT,
0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
glDrawArrays(GL_TRIANGLE_STRIP,
0, 4);
}
}
}
@end// 鐗堟潈灞炰簬鍘熶綔鑰?//
http://code4app.com (cn) http://code4app.net (en)
// 鍙戝竷浠g爜浜庢渶涓撲笟鐨勬簮鐮佸垎浜綉绔? Code4App.com
相关文章推荐
- iOS使用GPUImage实现实时反色相机
- iOS—使用GPUImage对照片和相机添加滤镜
- 使用GPUImage 实现利用GPU来实习滤镜(实时滤镜 和iphone手机一样)
- iOS --- 使用GPUImage实现的简单滤镜效果
- iOS 使用GPUImage为本地视频添加滤镜
- iOS实时滤镜实现--基于GPUImage。
- iOS 使用GPUImage录制实时视频第一帧黑屏的解决办法
- iOS基于AVFoundation + GPUImage的实时图像处理
- GPUImage简单滤镜使用之色阶(三)
- iOS GPUImage 添加到工程 使用GPUImage渲染图片教程
- GPUimage 简单使用(-)单个滤镜使用
- iOS GPUImage 的使用
- 基于GPUImage的实时美颜滤镜
- iOS开发之滤镜的使用技巧(CoreImage)
- ios GPUImage简单滤镜 -- 录制视频(保存+聚焦)
- 使用iOS原生框架实现实时滤镜效果
- 使用GPUImage开启的相机进行摄像,保存写入到Path
- 用GPUImage开启相机并且开启滤镜效果
- GPUImage教程(一)安装,使用例子, 原理和内置滤镜列表
- iOS第三方做滤镜最主流的开源框架GPUImage