我找到了一对用 ARB 编写的着色器。我需要帮助转换 GLSL (1.2) 中的着色器。我想扩展一个使用这些 ARB 着色器的项目,但是学习 GLSL 我认为使用 GLSL 着色器会更容易,这就是为什么我需要一个转换。我试图转换它们,但我知道我做错了什么。谁能帮我深入解释那些 ARB 着色器并提供转换?
--- 原始 ARB v1.0 着色器 ----
顶点着色器
!!ARBvp1.0
OPTION ARB_position_invariant;
TEMP r0;
# get eye to vertex
ADD r0, vertex.position, -program.local[0];
# stuff for scaling the detail normal tex by distance
DP3 r0.w, r0, r0;
RCP r0.w, r0.w;
MUL r0.w, r0.w, 250000;
MIN r0.w, r0.w, 1;
# eye to vertex
MOV result.texcoord[1], r0;
# normal tex coord
MOV result.texcoord[0], vertex.texcoord[0];
# detail normal tex coord
MUL result.texcoord[3], vertex.texcoord[0], 5;
# not used currently, surface normal
#MOV result.texcoord[2].x, -r0.y;
END
#
片段着色器
!!ARBfp1.0
PARAM p0 = {-7, .5, 32, .2};
PARAM p1 = {0, 1, 0, .6};
TEMP r0;
TEMP r1;
TEMP r2;
TEMP r3;
TEMP r4;
# normal tex
TEX r0, fragment.texcoord[0], texture[0], 2D;
# detail normal tex
TEX r1, fragment.texcoord[3], texture[0], 2D;
#scale & bias to [-1, 1]
MAD r0, r0, 2, -1;
MAD r1, r1, 2, -1;
# scale detail normal tex by some factor based on eye distance
# maybe replace this with smoothed out mipmap levels
MUL r1, r1, fragment.texcoord[1].w;
# instead of per vertex normals it is all done in textures
#ADD r0, r0, fragment.texcoord[2];
ADD r0, r0, r1;
DP3 r0.w, r0, r0;
RSQ r0.w, r0.w;
MUL r0, r0, r0.w;
# normalize the eye to vertex vector
DP3 r2.w, fragment.texcoord[1], fragment.texcoord[1];
RSQ r2.w, r2.w;
MUL r2, fragment.texcoord[1], r2.w;
# bias towards an up vector when on edge/far away
POW r3.w, -r2.y, p1.w;
MOV r4, r0;
LRP r0, r3.w, r0, p1;
# specular needs to be biased less, or not at all
LRP r4, r3.w, r0, r4;
# vertex to eye * normal, clamped
DP3_SAT r2.w, -r2, r0;
# get the half angle vector and normalize
ADD r3, -r2, program.local[3];
DP3 r3.w, r3, r3;
RSQ r3.w, r3.w;
MUL r3, r3, r3.w;
# half angle * normal
DP3_SAT r0.w, r3, r4;
# specular exponent
POW r0.w, r0.w, p0.z;
# fresnel = (1 + eye-normal dot product) ^ -7 (an approximation)
# could probably approximate with 1 - dot(vertex to eye, normal)
ADD r3.w, 1, r2.w;
POW r3.w, r3.w, p0.x;
# prepare the reflection texture coords (uses n.x and n.z as an offset)
MOV r0.y, r0.z;
# scale and bias screen position to get reflection texcoords - could do this in vertex program?
MAD r1, fragment.position, program.local[0], program.local[1];
# offset coords by normal, scaled down by distance (extra 10x factor stored in program.local[0].z)
RCP r1.z, r1.z;
MAD r1, r0, r1.z, r1;
# offset reflection lookup
TEX r1, r1, texture[1], 2D;
# specular
MUL r3.xyz, r0.w, program.local[4];
# reflection * fresnel
MAD r1, r1, r3.w, r3;
# add water color * (1 - fresnel)
ADD r3.w, 1, -r3.w;
MAD result.color, program.local[2], r3.w, r1;
END
#
我尝试转换它们:
///////// 顶点着色器 //////////
//#version 120
//OPTION ARB_position_invariant;
//var locala vertex shader
uniform vec4 cameraPos; //program.local[0]
//var vertex + fragment shader
varying vec4 waterTex0; //
varying vec4 waterTex1; //eyepos
varying vec4 waterTex2; //
varying vec4 waterTex3; //
void main(void)
{
//TEMP r0;
vec4 eyePos;
// get eye to vertex
//ADD r0, vertex.position, -program.local[0];
eyePos = gl_Vertex - cameraPos;
// stuff for scaling the detail normal tex by distance
//DP3 r0.w, r0, r0;
//RCP r0.w, r0.w;
//MUL r0.w, r0.w, 250000;
//MIN r0.w, r0.w, 1;
eyePos.w = dot(eyePos, eyePos);
eyePos.w = min(1.0 / eyePos.w * 250000, 1.0);
// eye to vertex
//MOV result.texcoord[1], r0;
waterTex1 = eyePos;
// normal tex coord
//MOV result.texcoord[0], vertex.texcoord[0];
waterTex0 = gl_MultiTexCoord0;
// detail normal tex coord
//MUL result.texcoord[3], vertex.texcoord[0], 5;
waterTex3 = gl_MultiTexCoord0 * 5;
// not used currently, surface normal
//MOV result.texcoord[2].x, -r0.y;
//END
//transformam pozitia vertexilor
//gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
gl_Position = ftransform();
}
///////////// 片段着色器 /////////////
//#version 120
//var locale fragment shader
//TEMP r0;
//TEMP r1;
//TEMP r2;
//TEMP r3;
//TEMP r4;
uniform vec4 texScale1; //program.local[0]
uniform vec4 texScale2; //program.local[1]
uniform vec4 waterColor; //program.local[2]
uniform vec4 lightDir; //program.local[3]
uniform vec4 specularColor; //program.local[4]
uniform sampler2D normalTex;
uniform sampler2D reflTex;
//var vertex + fragment shader
varying vec4 waterTex0; //
varying vec4 waterTex1; //eyepos
varying vec4 waterTex2; //
varying vec4 waterTex3; //
void main(void)
{
//PARAM p0 = {-7, 0.5, 32, 0.2};
const vec4 p0 = vec4(-7.0, 0.5, 32.0, 0.2);
//PARAM p1 = {0, 1, 0, .6};
const vec4 p1 = vec4(0.0, 1.0, 0.0, 0.6);
// normal tex
//TEX r0, fragment.texcoord[0], texture[0], 2D;
vec4 vNormT = texture2D(normalTex, vec2(waterTex0));
// detail normal tex
//TEX r1, fragment.texcoord[3], texture[0], 2D;
vec vDetailNormT = texture2D(normalTex, vec(waterTex3));
//scale & bias to [-1, 1]
//MAD r0, r0, 2, -1;
vNormT *= 2;
vNormT = clamp(vNormT, -1, 1);
//MAD r1, r1, 2, -1;
vDetailNormT *= 2;
vDetailNormT = clamp(vNormT, -1.0, 1.0); //clamp la [-1, 1]
// scale detail normal tex by some factor based on eye distance
// maybe replace this with smoothed out mipmap levels
//MUL r1, r1, fragment.texcoord[1].w;
vDetailNormT *= waterTex1.w;
/// instead of per vertex normals it is all done in textures
/// ADD r0, r0, fragment.texcoord[2];
//ADD r0, r0, r1;
//DP3 r0.w, r0, r0;
//RSQ r0.w, r0.w;
//MUL r0, r0, r0.w;
vNormT += vDetailNormT;
vNormT = normalize(vNormT);
// normalize the eye to vertex vector
//DP3 r2.w, fragment.texcoord[1], fragment.texcoord[1];
//RSQ r2.w, r2.w;
//MUL r2, fragment.texcoord[1], r2.w;
vec4 vEyePosN = normalize(waterTex1);
// bias towards an up vector when on edge/far away
//POW r3.w, -r2.y, p1.w;
//MOV r4, r0;
//LRP r0, r3.w, r0, p1;
vec4 vHalfAngle = vec4(0);
vHalfAngle.w = pow(-vEyePosN.y, p1.w); //in loc de r3.w
vec4 vNormTtt = vNormT;
//LRP R0, R0, R1, R2: is R0*R1+(1-R0)*R2
//vNormT = vec4(vNormT * vHalfAngle.w) + vec4(p1 * (1.0 - vHalfAngle.w));
vNormT = mix(vNormT, p1, vHalfAngle.w); //functie interpoalre liniara
// specular needs to be biased less, or not at all
//LRP r4, r3.w, r0, r4;
//vNormTtt = vec4(vNormT * vHalfAngle.w) + vec4(vNormTtt * (1.0 - vHalfAngle.w));
vNormTtt = mix(vNormT, vNormTtt, vHalfAngle.w);
// vertex to eye * normal, clamped
//DP3_SAT r2.w, -r2, r0;
// ???
vEyePosN.w = clamp(dot(-vEyePosN, vNormT), 0.0, 1.0); //DP3_SAT <=> dot() si clamp()
// get the half angle vector and normalize
//ADD r3, -r2, program.local[3];
//DP3 r3.w, r3, r3;
//RSQ r3.w, r3.w;
//MUL r3, r3, r3.w;
vHalfAngle = lightDir - vEyePosN;
vHalfAngle = normalize(vHalfAngle);
// half angle * normal
//DP3_SAT r0.w, r3, r4;
vNormT.w = clamp(dot(vHalfAngle, vNormTtt), 0.0, 1.0);
// specular exponent
//POW r0.w, r0.w, p0.z;
vNormT.w = pow(vNormT, p0.z);
// fresnel = (1 + eye-normal dot product) ^ -7 (an approximation)
// could probably approximate with 1 - dot(vertex to eye, normal)
//ADD r3.w, 1, r2.w;
//POW r3.w, r3.w, p0.x;
vHalfAngle.w = vEyePosN.w + 1.0;
vHalfAngle.w = pow(vHalfAngle.w, p0.x);
// prepare the reflection texture coords (uses n.x and n.z as an offset)
//MOV r0.y, r0.z;
vNormT.y = vNormT.z;
// scale and bias screen position to get reflection texcoords - could do this in vertex program?
//MAD r1, fragment.position, program.local[0], program.local[1];
//trebuie sa mut acesata linie in vertex shader, sa vad cum fac ...?
vDetailNormT = gl_Position * texScale1;
vDetailNormT += texScale2;
// offset coords by normal, scaled down by distance (extra 10x factor stored in program.local[0].z)
//RCP r1.z, r1.z;
//MAD r1, r0, r1.z, r1;
vDetailNormT.z = 1 / vDetailNormT.z;
vNormT *= vDetailNormT.z;
vDetailNormT += vNormT;
// offset reflection lookup
//TEX r1, r1, texture[1], 2D;
vDetailNormT = texture2D(reflTex, vDetailNormT);
// specular
//MUL r3.xyz, r0.w, program.local[4];
vHalfAngle.xyz = specularColor * vNormT.w;
// reflection * fresnel
//MAD r1, r1, r3.w, r3;
vDetailNormT *= vHalfAngle.w;
vDetailNormT += vHalfAngle;
// add water color * (1 - fresnel)
//ADD r3.w, 1, -r3.w;
//MAD result.color, program.local[2], r3.w, r1;
vHalfAngle.w = 1.0 - vHalfAngle.w;
waterColor *= vHalfAngle.w;
gl_FragColor = waterColor + vDetailNormT;
//END
}