项目源码:https://github.com/smzhldr/AGLFramework
一、前言
前面的章节中的内容差不多能让初学者对OpenGL 有个初步的理解,基础知识很重要但基础不是核心,我们该有新的目标了,基础部分忘了的可以复习下前几章的内容,想拓宽基础的可以参考文档或者看巨著。
这一节我们从OpenGL的实际应用出发,逐步来掌握OpenGL各种各样的功能,之后的的章节将以应用为背景,以进阶为目的。我们选取类似于美颜相机中的美白和磨皮两个比较典型的功能,来抛砖引玉,进一步提高。
二、效果一览
子怡姐姐得罪了,阿弥陀佛,不过美女怎么看都很美,随便调了个程度,看下效果对比还是挺明显的

三、美白浅析
在OpenGL中一个像素可以拆分成RGBA格式来处理,RGBA分别为,红、绿,蓝,透明度通道,大部分的特效处理都是按一定的规则改变不同位置像素的RGBA的值,取值为0.0 - 1.0,我们只是演示美白功能,为了简单,同时改变每个像素的RGB的值,若果RGB都为0则是黑色,RGB全为1则是白色,所以我们对每个像素做如下操作:
vec4 color = texture2D(inputImageTexture,textureCoordinate);
...
//其中0.25是取得经验值,可以随便改多试试,whiteLevel可动态调节美白程度
color.r= color.r + 0.25 * whiteLevel;
color.g= color.g + 0.25 * whiteLevel;
color.b= color.b + 0.25 * whiteLevel;
gl_FragColor = color;
完整的Filter长相如下:
public class WhiteFilter extends AGLFilter {
private static final String WIHITE_FRAGMENT_SHADER =
"precision mediump float;\n" +
"uniform sampler2D inputImageTexture;\n" +
"uniform float level;\n" +
"varying vec2 textureCoordinate;\n" +
"\n " +
"void modifyColor(vec4 color){\n" +
"color.r=max(min(color.r,1.0),0.0);\n" +
"color.g=max(min(color.g,1.0),0.0);\n" +
"color.b=max(min(color.b,1.0),0.0);\n" +
"color.a=max(min(color.a,1.0),0.0);\n" +
" }\n" +
"\n " +
"void main() {\n" +
"vec4 nColor = texture2D(inputImageTexture,textureCoordinate);\n" +
"vec4 deltaColor = nColor+vec4(vec3(level * 0.25),0.0);\n" +
"modifyColor(deltaColor);\n" +
"gl_FragColor = deltaColor;\n" +
"}";
private int glUniformLevel;
private float level;
@Override
protected void onInit() {
programId = OpenGlUtils.loadProgram(NO_FILTER_VERTEX_SHADER, WIHITE_FRAGMENT_SHADER);
glAttrPosition = GLES20.glGetAttribLocation(programId, "position");
glAttrTextureCoordinate = GLES20.glGetAttribLocation(programId, "inputTextureCoordinate");
glUniformTexture = GLES20.glGetUniformLocation(programId, "inputImageTexture");
glUniformLevel = GLES20.glGetUniformLocation(programId, "level");
}
@Override
protected void onDrawArraysPre(Frame frame) {
GLES20.glUniform1f(glUniformLevel, level);
}
public void setWhiteLevel(float level) {
this.level = level;
}
}
具体使用可以看源码:https://github.com/smzhldr/AGLFramework
四、磨皮浅析
磨皮相对美白稍微要复杂一些,人物的正常肤,色应该是偏红,所以则会导致人物图像的红色通道偏亮,使红色通道保留的细节成分较少,相比之下,绿色通道保留更多细节,所以,一般情况下我们也可以只采用过滤绿色通道来实现实时磨皮。
1.取出绿色通道,对绿色通道进行模糊处理,例如高斯模糊,得到模糊后的值sampleColor
2.用原图绿色通道值减去sampleColor,加上0.5(即128),1+2两个步骤即PS中的高反差保留
3.对上述结果值进行3-5次强光处理,此步骤可以使得噪声更加突出
4.计算原图的灰度值,公式为0.299R + 0.587G + 0.114*B
5.将灰度值作为阈值,用来排除非皮肤部分,根据灰度值计算,将原图与1-3后的结果图合成
6.对混合后结果增加亮度
7.以灰度值作为透明度将原图与混合后结果进行滤色、柔光等混合,并调节饱和度
在有了美白的基础之后是不是觉得磨皮像在做数学题一样,写出公式即可,虽然步骤多一点,但是却有迹可循,上面提到的模糊一般的做法是在一个像素周围取一圈像素,然后按一定的权重取均值,混合利用OpenGL内置函数mix()即可,柔光等光线调节则与前面的美白异曲同工。
完整的Filter长这样:
public class SmoothFilter extends AGLFilter {
private static final String SMOOTH_FRAGMENT_SHADER = "precision mediump float;\n" +
"\n" +
"varying mediump vec2 textureCoordinate;\n" +
"\n" +
"uniform sampler2D inputImageTexture;\n" +
"uniform vec2 singleStepOffset;\n" +
"uniform mediump float params;\n" +
"\n" +
"const highp vec3 W = vec3(0.299,0.587,0.114);\n" +
"vec2 blurCoordinates[20];\n" +
"\n" +
"float hardLight(float color)\n" +
"{\n" +
" if(color <= 0.5)\n" +
" color = color * color * 2.0;\n" +
" else\n" +
" color = 1.0 - ((1.0 - color)*(1.0 - color) * 2.0);\n" +
" return color;\n" +
"}\n" +
"\n" +
"void main(){\n" +
"\n" +
" vec3 centralColor = texture2D(inputImageTexture, textureCoordinate).rgb;\n" +
" blurCoordinates[0] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -10.0);\n" +
" blurCoordinates[1] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 10.0);\n" +
" blurCoordinates[2] = textureCoordinate.xy + singleStepOffset * vec2(-10.0, 0.0);\n" +
" blurCoordinates[3] = textureCoordinate.xy + singleStepOffset * vec2(10.0, 0.0);\n" +
" blurCoordinates[4] = textureCoordinate.xy + singleStepOffset * vec2(5.0, -8.0);\n" +
" blurCoordinates[5] = textureCoordinate.xy + singleStepOffset * vec2(5.0, 8.0);\n" +
" blurCoordinates[6] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, 8.0);\n" +
" blurCoordinates[7] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, -8.0);\n" +
" blurCoordinates[8] = textureCoordinate.xy + singleStepOffset * vec2(8.0, -5.0);\n" +
" blurCoordinates[9] = textureCoordinate.xy + singleStepOffset * vec2(8.0, 5.0);\n" +
" blurCoordinates[10] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, 5.0);\n" +
" blurCoordinates[11] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, -5.0);\n" +
" blurCoordinates[12] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -6.0);\n" +
" blurCoordinates[13] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 6.0);\n" +
" blurCoordinates[14] = textureCoordinate.xy + singleStepOffset * vec2(6.0, 0.0);\n" +
" blurCoordinates[15] = textureCoordinate.xy + singleStepOffset * vec2(-6.0, 0.0);\n" +
" blurCoordinates[16] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, -4.0);\n" +
" blurCoordinates[17] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, 4.0);\n" +
" blurCoordinates[18] = textureCoordinate.xy + singleStepOffset * vec2(4.0, -4.0);\n" +
" blurCoordinates[19] = textureCoordinate.xy + singleStepOffset * vec2(4.0, 4.0);\n" +
"\n" +
" float sampleColor = centralColor.g * 20.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[0]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[1]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[2]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[3]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[4]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[5]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[6]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[7]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[8]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[9]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[10]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[11]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[12]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[13]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[14]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[15]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[16]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[17]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[18]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[19]).g * 2.0;\n" +
"\n" +
" sampleColor = sampleColor / 48.0;\n" +
"\n" +
" float highPass = centralColor.g - sampleColor + 0.5;\n" +
"\n" +
" for(int i = 0; i < 5;i++)\n" +
" {\n" +
" highPass = hardLight(highPass);\n" +
" }\n" +
" float luminance = dot(centralColor, W);\n" +
"\n" +
" float alpha = pow(luminance, params);\n" +
"\n" +
" vec3 smoothColor = centralColor + (centralColor-vec3(highPass))*alpha*0.1;\n" +
"\n" +
" gl_FragColor = vec4(mix(smoothColor.rgb, max(smoothColor, centralColor), alpha), 1.0);\n" +
"}";
private int glUniformsample;
private int glUniformLevel;
private float smoothLevel;
@Override
protected void onInit() {
programId = OpenGlUtils.loadProgram(NO_FILTER_VERTEX_SHADER, SMOOTH_FRAGMENT_SHADER);
glAttrPosition = GLES20.glGetAttribLocation(programId, "position");
glAttrTextureCoordinate = GLES20.glGetAttribLocation(programId, "inputTextureCoordinate");
glUniformTexture = GLES20.glGetUniformLocation(programId, "inputImageTexture");
glUniformLevel = GLES20.glGetUniformLocation(programId, "params");
glUniformsample = GLES20.glGetUniformLocation(programId, "singleStepOffset");
}
@Override
protected void onDrawArraysPre(Frame frame) {
GLES20.glUniform1f(glUniformLevel, smoothLevel);
GLES20.glUniform2fv(glUniformsample, 1, new float[]{2.0f / frame.getTextureWidth(), 2.0f / frame.getTextureHeight()}, 0);
}
public void setSmoothLevel(float level) {
this.smoothLevel = level;
}
}
其中大部分都是shader代码,如果将shader放到文件中,filter的内容则相当简单,掌握了上面简易美白和磨皮,就可以尝试写更复杂的shader,随心所欲添加一些让人眼花缭乱的特效了,多尝试会有意想不到的效果。
具体使用可以参考
源码:https://github.com/smzhldr/AGLFramework