3

我有一些我想“放入气泡中”的图像。气泡在屏幕周围漂浮,这些图像被困在其中。

最好的方法是将内部图像与气泡图像结合起来,并以某种方式扭曲内部图像,使其看起来像是反映在气泡内部。

有谁知道如何在不使用纹理和网格的情况下实现这种效果?也许有人记得一个旧项目或做过类似事情的事情?

这是我的意思的一个例子:

在此处输入图像描述

4

2 回答 2

11

您可以使用我的开源GPUImage框架中的 GPUImageSphereRefractionFilter 执行此操作:

球面折射示例

我在this answer to an question about an similar effects on Android中详细描述了它是如何工作的。基本上,我使用片段着色器来折射穿过假想球体的光,然后使用它来查找包含源图像的纹理。使用简单的高斯模糊来模糊背景。

如果您想获得您显示的图像的确切外观,您可能需要调整此片段着色器以向球体添加一些掠角颜色,但这应该让您相当接近。

为了好玩,我决定尝试更接近地复制上面的玻璃球。我在球体上添加了掠射角光照和镜面光照反射,以及不反转折射纹理坐标,导致了以下结果:

放牧角照明球

我为这个较新的版本使用了以下片段着色器:

 varying highp vec2 textureCoordinate;

 uniform sampler2D inputImageTexture;

 uniform highp vec2 center;
 uniform highp float radius;
 uniform highp float aspectRatio;
 uniform highp float refractiveIndex;
// uniform vec3 lightPosition;
 const highp vec3 lightPosition = vec3(-0.5, 0.5, 1.0);
 const highp vec3 ambientLightPosition = vec3(0.0, 0.0, 1.0);

 void main()
 {
     highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
     highp float distanceFromCenter = distance(center, textureCoordinateToUse);
     lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius);

     distanceFromCenter = distanceFromCenter / radius;

     highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);
     highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));

     highp vec3 refractedVector = 2.0 * refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);
     refractedVector.xy = -refractedVector.xy;

     highp vec3 finalSphereColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5).rgb;

     // Grazing angle lighting
     highp float lightingIntensity = 2.5 * (1.0 - pow(clamp(dot(ambientLightPosition, sphereNormal), 0.0, 1.0), 0.25));
     finalSphereColor += lightingIntensity;

     // Specular lighting
     lightingIntensity  = clamp(dot(normalize(lightPosition), sphereNormal), 0.0, 1.0);
     lightingIntensity  = pow(lightingIntensity, 15.0);
     finalSphereColor += vec3(0.8, 0.8, 0.8) * lightingIntensity;

     gl_FragColor = vec4(finalSphereColor, 1.0) * checkForPresenceWithinSphere;
 }

这个过滤器可以使用 GPUImageGlassSphereFilter 运行。

于 2012-07-23T14:51:49.103 回答
2

作为记录,我最终使用了@BradLarson 建议的 GPUImage,但我必须编写一个自定义过滤器,如下所示。该滤镜采用“内部”图像和气泡纹理并将两者混合,同时还执行折射计算但不反转图像坐标。效果:

在此处输入图像描述

。H

@interface GPUImageBubbleFilter : GPUImageTwoInputFilter

@property (readwrite, nonatomic) CGFloat refractiveIndex;   
@property (readwrite, nonatomic) CGFloat radius;            

@end

.m

#import "GPUImageBubbleFilter.h"

NSString *const kGPUImageBubbleShaderString = SHADER_STRING
(
 varying highp vec2 textureCoordinate;
 varying highp vec2 textureCoordinate2;

 uniform sampler2D inputImageTexture;
 uniform sampler2D inputImageTexture2;

 uniform highp vec2 center;
 uniform highp float radius;
 uniform highp float aspectRatio;
 uniform highp float refractiveIndex;

 void main()
 {
     highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
     highp float distanceFromCenter = distance(center, textureCoordinateToUse);
     lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius);

     distanceFromCenter = distanceFromCenter / radius;

     highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);
     highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));

     highp vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);

     lowp vec4 textureColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere; 
     lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2) * checkForPresenceWithinSphere;

     gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);    
 }

 );


@interface GPUImageBubbleFilter () {
    GLint radiusUniform, centerUniform, aspectRatioUniform, refractiveIndexUniform;
}

@property (readwrite, nonatomic) CGFloat aspectRatio;

@end

@implementation GPUImageBubbleFilter
@synthesize radius = _radius, refractiveIndex = _refractiveIndex, aspectRatio = _aspectRatio;

- (id) init {
    self = [super initWithFragmentShaderFromString: kGPUImageBubbleShaderString];
    if( self ) {
        radiusUniform = [filterProgram uniformIndex: @"radius"];
        aspectRatioUniform = [filterProgram uniformIndex: @"aspectRatio"];
        centerUniform = [filterProgram uniformIndex: @"center"];
        refractiveIndexUniform = [filterProgram uniformIndex: @"refractiveIndex"];

        self.radius = 0.5;
        self.refractiveIndex = 0.5;
        self.aspectRatio = 1.0;

        GLfloat center[2] = {0.5, 0.5};
        [GPUImageOpenGLESContext useImageProcessingContext];
        [filterProgram use];
        glUniform2fv(centerUniform, 1, center);

        [self setBackgroundColorRed: 0 green: 0 blue: 0 alpha: 0];
    }

    return self;
}

#pragma mark - Accessors
- (void) setRadius:(CGFloat)radius {
    _radius = radius;

    [GPUImageOpenGLESContext useImageProcessingContext];
    [filterProgram use];
    glUniform1f(radiusUniform, _radius);
}

- (void) setAspectRatio:(CGFloat)aspectRatio {
    _aspectRatio = aspectRatio;

    [GPUImageOpenGLESContext useImageProcessingContext];
    [filterProgram use];
    glUniform1f(aspectRatioUniform, _aspectRatio);
}

- (void)setRefractiveIndex:(CGFloat)newValue;
{
    _refractiveIndex = newValue;

    [GPUImageOpenGLESContext useImageProcessingContext];
    [filterProgram use];
    glUniform1f(refractiveIndexUniform, _refractiveIndex);
}
于 2012-07-24T09:00:00.927 回答