顶点shader主要对顶点坐标变换,将顶点坐标从local->world->view->clip 空间变换

local空间:模型物体坐标系

world空间:世界空间坐标系

view空间: 相机空间

clip空间: 裁剪空间

local->world空间简化后其实就是这么一行代码:

vec4 posW = dModelMatrix * vec4(localPos, 1.0);

此图由https://tinygltf.xyz/drawgeometry/网站脚本编写后截图输出

dModelMatrix就是将顶点local坐标系转换到了世界坐标系下。

dModelMatrix是通过getModelMatrix()方法获取

mat4 getModelMatrix() {
#ifdef DYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elif defined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elif defined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}

从代码可以看出这里有不同分支代码,通过不一样的方式得到modelMatrix,如果没有蒙皮动画就是简单的通过外部传进来的全局modelMatrix给返回即可

如果有蒙皮动画会将返回modelMatrix * boneMatrix,意思就是顶点local坐标先会进行一次骨骼变化(通过骨骼矩阵),然后再通过modelMatrix转到世界空间,

boneMatrix的计算方式会通过顶点权重以及对应的骨骼的骨骼矩阵进行加和。

world->clip空间简化后:

screenPos = matrix_viewProjection * posW;

其实就是将之前得到的顶点世界坐标通过matrix_viewprojection矩阵变换到裁剪空间,这里虽然写的是screenPos其实这里坐标还不是屏幕坐标,它只是未归一化的clip空间,然后到fragment shader时候会除以w然后裁剪后xyz归一到[-1,1]的NDC空间

vertex shader只需要输出clip空间的坐标即可也就是getPosition()返回给gl_Position出去。

gl_Position = getPosition();
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}

后面几行代码getWorldPosition(),getNormal(),getUv0(),计算输出顶点的世界坐标以及世界空间法线方向和uv坐标给fragment shader用来光照着色使用!

完整代码如下:

#version 300 es
#define attribute in
#define varying out
#define texture2D texture
#define GL2
#define VERTEXSHADER
varying vec4 vVertexColor;
varying vec3 vPositionW;
varying vec3 vNormalW;
varying vec2 vUv0;
attribute vec3 vertex_position;
attribute vec3 vertex_normal;
attribute vec4 vertex_tangent;
attribute vec2 vertex_texCoord0;
attribute vec2 vertex_texCoord1;
attribute vec4 vertex_color;
uniform mat4 matrix_viewProjection;
uniform mat4 matrix_model;
uniform mat3 matrix_normal;
vec3 dPositionW;
mat4 dModelMatrix;
mat3 dNormalMatrix;
vec3 dLightPosW;
vec3 dLightDirNormW;
vec3 dNormalW;
#ifdef NINESLICED
vec2 getUv0() {
vec2 uv = vertex_position.xz;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
uv += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
uv = uv * -0.5 + 0.5;
uv = uv * atlasRect.zw + atlasRect.xy;
vMask = vertex_texCoord0.xy;
return uv;
}
#else
vec2 getUv0() {
return vertex_texCoord0;
}
#endif
attribute vec4 vertex_boneWeights;
attribute vec4 vertex_boneIndices;
uniform sampler2D texture_poseMap;
uniform vec2 texture_poseMapSize;
mat4 getBoneMatrix(const in float i) {
float j = i * 4.0;
float x = mod(j, float(texture_poseMapSize.x));
float y = floor(j / float(texture_poseMapSize.x));
float dx = 1.0 / float(texture_poseMapSize.x);
float dy = 1.0 / float(texture_poseMapSize.y);
y = dy * (y + 0.5);
vec4 v1 = texture2D(texture_poseMap, vec2(dx * (x + 0.5), y));
vec4 v2 = texture2D(texture_poseMap, vec2(dx * (x + 1.5), y));
vec4 v3 = texture2D(texture_poseMap, vec2(dx * (x + 2.5), y));
vec4 v4 = texture2D(texture_poseMap, vec2(dx * (x + 3.5), y));
mat4 bone = mat4(v1, v2, v3, v4);
return bone;
}
#define SKIN
#ifdef PIXELSNAP
uniform vec4 uScreenSize;
#endif
mat4 getModelMatrix() {
#ifdefDYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elifdefined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elifdefined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}
vec4 getPosition() {
dModelMatrix = getModelMatrix();
vec3 localPos = vertex_position;
#ifdefNINESLICED
// outer and inner vertices are at the same position, scale both
localPos.xz *= outerScale;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
localPos.xz += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
vTiledUv = (localPos.xz - outerScale + innerOffset.xy) * -0.5 + 1.0; // uv = local pos - inner corner
 
localPos.xz *= -0.5; // move from -1;1 to -0.5;0.5
 
localPos = localPos.xzy;
#endif
vec4 posW = dModelMatrix * vec4(localPos, 1.0);
#ifdefSCREENSPACE
posW.zw = vec2(0.0, 1.0);
#endif
dPositionW = posW.xyz;
vec4 screenPos;
#ifdefUV1LAYOUT
screenPos = vec4(vertex_texCoord1.xy * 2.0 - 1.0, 0.5, );
#else
#ifdefSCREENSPACE
screenPos = posW;
#else
screenPos = matrix_viewProjection * posW;
#endif
#ifdefPIXELSNAP
// snap vertex to a pixel boundary
screenPos.xy = (screenPos.xy * 0.5) + 0.5;
screenPos.xy *= uScreenSize.xy;
screenPos.xy = floor(screenPos.xy);
screenPos.xy *= uScreenSize.zw;
screenPos.xy = (screenPos.xy * 2.0) - 1.0;
#endif
#endif
return screenPos;
}
vec3 getWorldPosition() {
return dPositionW;
}
vec3 getNormal() {
#ifdefSKIN
dNormalMatrix = mat3(dModelMatrix[].xyz, dModelMatrix[].xyz, dModelMatrix[].xyz);
#elifdefined(INSTANCING)
dNormalMatrix = mat3(instance_line1.xyz, instance_line2.xyz, instance_line3.xyz);
#else
dNormalMatrix = matrix_normal;
#endif
return normalize(dNormalMatrix * vertex_normal);
}
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}
#version 300 es
#define attribute in
#define varying out
#define texture2D texture
#define GL2
#define VERTEXSHADER
varying vec4 vVertexColor;
varying vec3 vPositionW;
varying vec3 vNormalW;
varying vec2 vUv0;
attribute vec3 vertex_position;
attribute vec3 vertex_normal;
attribute vec4 vertex_tangent;
attribute vec2 vertex_texCoord0;
attribute vec2 vertex_texCoord1;
attribute vec4 vertex_color;
uniform mat4 matrix_viewProjection;
uniform mat4 matrix_model;
uniform mat3 matrix_normal;
vec3 dPositionW;
mat4 dModelMatrix;
mat3 dNormalMatrix;
vec3 dLightPosW;
vec3 dLightDirNormW;
vec3 dNormalW;
#ifdef NINESLICED
vec2 getUv0() {
vec2 uv = vertex_position.xz;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
uv += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
uv = uv * -0.5 + 0.5;
uv = uv * atlasRect.zw + atlasRect.xy;
vMask = vertex_texCoord0.xy;
return uv;
}
#else
vec2 getUv0() {
return vertex_texCoord0;
}
#endif
attribute vec4 vertex_boneWeights;
attribute vec4 vertex_boneIndices;
uniform sampler2D texture_poseMap;
uniform vec2 texture_poseMapSize;
mat4 getBoneMatrix(const in float i) {
float j = i * 4.0;
float x = mod(j, float(texture_poseMapSize.x));
float y = floor(j / float(texture_poseMapSize.x));
float dx = 1.0 / float(texture_poseMapSize.x);
float dy = 1.0 / float(texture_poseMapSize.y);
y = dy * (y + 0.5);
vec4 v1 = texture2D(texture_poseMap, vec2(dx * (x + 0.5), y));
vec4 v2 = texture2D(texture_poseMap, vec2(dx * (x + 1.5), y));
vec4 v3 = texture2D(texture_poseMap, vec2(dx * (x + 2.5), y));
vec4 v4 = texture2D(texture_poseMap, vec2(dx * (x + 3.5), y));
mat4 bone = mat4(v1, v2, v3, v4);
return bone;
}
#define SKIN
#ifdef PIXELSNAP
uniform vec4 uScreenSize;
#endif
mat4 getModelMatrix() {
#ifdefDYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elifdefined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elifdefined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}
vec4 getPosition() {
dModelMatrix = getModelMatrix();
vec3 localPos = vertex_position;
#ifdefNINESLICED
// outer and inner vertices are at the same position, scale both
localPos.xz *= outerScale;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
localPos.xz += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
vTiledUv = (localPos.xz - outerScale + innerOffset.xy) * -0.5 + 1.0; // uv = local pos - inner corner
 
localPos.xz *= -0.5; // move from -1;1 to -0.5;0.5
 
localPos = localPos.xzy;
#endif
vec4 posW = dModelMatrix * vec4(localPos, 1.0);
#ifdefSCREENSPACE
posW.zw = vec2(0.0, 1.0);
#endif
dPositionW = posW.xyz;
vec4 screenPos;
#ifdefUV1LAYOUT
screenPos = vec4(vertex_texCoord1.xy * 2.0 - 1.0, 0.5, );
#else
#ifdefSCREENSPACE
screenPos = posW;
#else
screenPos = matrix_viewProjection * posW;
#endif
#ifdefPIXELSNAP
// snap vertex to a pixel boundary
screenPos.xy = (screenPos.xy * 0.5) + 0.5;
screenPos.xy *= uScreenSize.xy;
screenPos.xy = floor(screenPos.xy);
screenPos.xy *= uScreenSize.zw;
screenPos.xy = (screenPos.xy * 2.0) - 1.0;
#endif
#endif
return screenPos;
}
vec3 getWorldPosition() {
return dPositionW;
}
vec3 getNormal() {
#ifdefSKIN
dNormalMatrix = mat3(dModelMatrix[].xyz, dModelMatrix[].xyz, dModelMatrix[].xyz);
#elifdefined(INSTANCING)
dNormalMatrix = mat3(instance_line1.xyz, instance_line2.xyz, instance_line3.xyz);
#else
dNormalMatrix = matrix_normal;
#endif
return normalize(dNormalMatrix * vertex_normal);
}
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}

PlayCanvas PBR材质shader代码分析(vertex shader)的更多相关文章

  1. PlayCanvas PBR材质shader代码分析(pixel shader)

    #version es #define varying in out highp vec4 pc_fragColor; #define gl_FragColor pc_fragColor #defin ...

  2. 【OpenGL】Shader实例分析(九)- AngryBots中的主角受伤特效

    转发请保持地址:http://blog.csdn.net/stalendp/article/details/40859441 AngryBots是Unity官方的一个非常棒的样例.非常有研究价值. 曾 ...

  3. vertex shader(4)

    Swizzling and Masking 如果你使用输入.常量.临时寄存器作为源寄存器,你可以彼此独立地swizzle .x,.y,.z,.w值.如果你使用输出.临时寄存器作为目标寄存器,你可以把. ...

  4. 【OpenGL】Shader实例分析(七)- 雪花飘落效果

    转发请保持地址:http://blog.csdn.net/stalendp/article/details/40624603 研究了一个雪花飘落效果.感觉挺不错的.分享给大家,效果例如以下: 代码例如 ...

  5. GLSL写vertex shader和fragment shader

    0.一般来说vertex shader处理顶点坐标,然后向后传输,经过光栅化之后,传给fragment shader,其负责颜色.纹理.光照等等. 前者处理之后变成裁剪坐标系(三维),光栅化之后一般认 ...

  6. Stage3d 由浅到深理解AGAL的管线vertex shader和fragment shader || 简易教程 学习心得 AGAL 非常非常好的入门文章

    Everyday Stage3D (一) Everyday Stage3D (二) Triangle Everyday Stage3D (三) AGAL的基本概念 Everyday Stage3D ( ...

  7. vertex shader(3)

    之前我们学习了如何声明顶点着色器.如何设置常量寄存器中的常量.接下来我们学习如何写和编译一个顶点着色器程序. 在我们编译一个顶点着色器之前,首先需要写一个. 有17种不同的指令(instruction ...

  8. 向Vertex Shader传递vertex attribute

    在VBO.VAO和EBO那一节,介绍了如何向Vertex Shader传递vertex attribute的基本方法.现在我准备把这个话题再次扩展开. 传递整型数据 之前我们的顶点属性数据都是floa ...

  9. linearizing the depth in vertex shader

    please refer to http://www.mvps.org/directx/articles/linear_z/linearz.htm When using a programmable ...

随机推荐

  1. Channel 9视频整理【5】

    Jennifer Chiu https://channel9.msdn.com/Niners/JenniferChiu

  2. Vijos1774 机器翻译 [模拟]

    1.题意:给定一段长度为N个单词的文章(一个单词用一个非负整数表示),可以使用一个容量为M个元素的容器.你的任务是使用字典的帮助翻译文章,遇到一个单词,查询之后将此单词的释义放入容器中,下次遇到时若此 ...

  3. 人生苦短,我用Python(4)

    1.创建数值元组: 在Python中,使用tuple()函数直接将range()函数循环出来的结果转换为数值元组. tuple(data) #tuple()函数的基本语法 data表示可以转换为元组的 ...

  4. saltstack操作

    使用分组 修改minon的ID,做一个标识 [root@node2 ~]# vim /etc/salt/minionmaster: 172.25.254.130 id: web1 #node2[roo ...

  5. $NOIp$普及组做题记录

    \([NOIp2014]\) 螺旋矩阵 \(Sol\) 直接模拟,一次走一整行或者一整列.复杂度\(O(n)\). \(Code\) #include<bits/stdc++.h> #de ...

  6. VS运行速度缓慢卡顿 2020年1月3日23:25:35

    Java中一个类文件对应一个类 C#中一个类文件中可以包含多个类 使用visual studio2017过程中,发现启动调试时,总是会很慢,结束调试也会很慢,在这里可以通过关闭掉IntelliTrac ...

  7. 「CH2601」 电路维修 解题报告

    CH2601 电路维修 描述 Ha'nyu是来自异世界的魔女,她在漫无目的地四处漂流的时候,遇到了善良的少女Rika,从而被收留在地球上.Rika的家里有一辆飞行车.有一天飞行车的电路板突然出现了故障 ...

  8. JS 中检测数组的四种方法

    今天和大家分享一下 JS 中检测是不是数组的四种方法,虽然篇幅不长,不过方法应该算是比较全面了. 1. instanceof 方法 instanceof 用于检测一个对象是不是某个类的实例,数组也是一 ...

  9. less实现if else

    less没有我们平常使用的if,else条件判断,而是用when来实现这种用法 1.比如我们要设置宽度 宽度可以百分比,也可以是像素,当是百分比时做对应处理,当是px时做另一种处理,这时候就需要用wh ...

  10. ArcEngine版本管理(Version)项目总结

    需求: 在ArcGIS项目中,大型的数据库都是使用ArcSDE进行连接管理.使用的数据版本(Version)都是默认版本(sde.default).这样多个人员在编辑的过程中就直接编辑的是默认版本数据 ...