顶点shader主要对顶点坐标变换,将顶点坐标从local->world->view->clip 空间变换

local空间:模型物体坐标系

world空间:世界空间坐标系

view空间: 相机空间

clip空间: 裁剪空间

local->world空间简化后其实就是这么一行代码:

vec4 posW = dModelMatrix * vec4(localPos, 1.0);

此图由https://tinygltf.xyz/drawgeometry/网站脚本编写后截图输出

dModelMatrix就是将顶点local坐标系转换到了世界坐标系下。

dModelMatrix是通过getModelMatrix()方法获取

mat4 getModelMatrix() {
#ifdef DYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elif defined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elif defined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}

从代码可以看出这里有不同分支代码,通过不一样的方式得到modelMatrix,如果没有蒙皮动画就是简单的通过外部传进来的全局modelMatrix给返回即可

如果有蒙皮动画会将返回modelMatrix * boneMatrix,意思就是顶点local坐标先会进行一次骨骼变化(通过骨骼矩阵),然后再通过modelMatrix转到世界空间,

boneMatrix的计算方式会通过顶点权重以及对应的骨骼的骨骼矩阵进行加和。

world->clip空间简化后:

screenPos = matrix_viewProjection * posW;

其实就是将之前得到的顶点世界坐标通过matrix_viewprojection矩阵变换到裁剪空间,这里虽然写的是screenPos其实这里坐标还不是屏幕坐标,它只是未归一化的clip空间,然后到fragment shader时候会除以w然后裁剪后xyz归一到[-1,1]的NDC空间

vertex shader只需要输出clip空间的坐标即可也就是getPosition()返回给gl_Position出去。

gl_Position = getPosition();
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}

后面几行代码getWorldPosition(),getNormal(),getUv0(),计算输出顶点的世界坐标以及世界空间法线方向和uv坐标给fragment shader用来光照着色使用!

完整代码如下:

#version 300 es
#define attribute in
#define varying out
#define texture2D texture
#define GL2
#define VERTEXSHADER
varying vec4 vVertexColor;
varying vec3 vPositionW;
varying vec3 vNormalW;
varying vec2 vUv0;
attribute vec3 vertex_position;
attribute vec3 vertex_normal;
attribute vec4 vertex_tangent;
attribute vec2 vertex_texCoord0;
attribute vec2 vertex_texCoord1;
attribute vec4 vertex_color;
uniform mat4 matrix_viewProjection;
uniform mat4 matrix_model;
uniform mat3 matrix_normal;
vec3 dPositionW;
mat4 dModelMatrix;
mat3 dNormalMatrix;
vec3 dLightPosW;
vec3 dLightDirNormW;
vec3 dNormalW;
#ifdef NINESLICED
vec2 getUv0() {
vec2 uv = vertex_position.xz;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
uv += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
uv = uv * -0.5 + 0.5;
uv = uv * atlasRect.zw + atlasRect.xy;
vMask = vertex_texCoord0.xy;
return uv;
}
#else
vec2 getUv0() {
return vertex_texCoord0;
}
#endif
attribute vec4 vertex_boneWeights;
attribute vec4 vertex_boneIndices;
uniform sampler2D texture_poseMap;
uniform vec2 texture_poseMapSize;
mat4 getBoneMatrix(const in float i) {
float j = i * 4.0;
float x = mod(j, float(texture_poseMapSize.x));
float y = floor(j / float(texture_poseMapSize.x));
float dx = 1.0 / float(texture_poseMapSize.x);
float dy = 1.0 / float(texture_poseMapSize.y);
y = dy * (y + 0.5);
vec4 v1 = texture2D(texture_poseMap, vec2(dx * (x + 0.5), y));
vec4 v2 = texture2D(texture_poseMap, vec2(dx * (x + 1.5), y));
vec4 v3 = texture2D(texture_poseMap, vec2(dx * (x + 2.5), y));
vec4 v4 = texture2D(texture_poseMap, vec2(dx * (x + 3.5), y));
mat4 bone = mat4(v1, v2, v3, v4);
return bone;
}
#define SKIN
#ifdef PIXELSNAP
uniform vec4 uScreenSize;
#endif
mat4 getModelMatrix() {
#ifdefDYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elifdefined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elifdefined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}
vec4 getPosition() {
dModelMatrix = getModelMatrix();
vec3 localPos = vertex_position;
#ifdefNINESLICED
// outer and inner vertices are at the same position, scale both
localPos.xz *= outerScale;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
localPos.xz += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
vTiledUv = (localPos.xz - outerScale + innerOffset.xy) * -0.5 + 1.0; // uv = local pos - inner corner
 
localPos.xz *= -0.5; // move from -1;1 to -0.5;0.5
 
localPos = localPos.xzy;
#endif
vec4 posW = dModelMatrix * vec4(localPos, 1.0);
#ifdefSCREENSPACE
posW.zw = vec2(0.0, 1.0);
#endif
dPositionW = posW.xyz;
vec4 screenPos;
#ifdefUV1LAYOUT
screenPos = vec4(vertex_texCoord1.xy * 2.0 - 1.0, 0.5, );
#else
#ifdefSCREENSPACE
screenPos = posW;
#else
screenPos = matrix_viewProjection * posW;
#endif
#ifdefPIXELSNAP
// snap vertex to a pixel boundary
screenPos.xy = (screenPos.xy * 0.5) + 0.5;
screenPos.xy *= uScreenSize.xy;
screenPos.xy = floor(screenPos.xy);
screenPos.xy *= uScreenSize.zw;
screenPos.xy = (screenPos.xy * 2.0) - 1.0;
#endif
#endif
return screenPos;
}
vec3 getWorldPosition() {
return dPositionW;
}
vec3 getNormal() {
#ifdefSKIN
dNormalMatrix = mat3(dModelMatrix[].xyz, dModelMatrix[].xyz, dModelMatrix[].xyz);
#elifdefined(INSTANCING)
dNormalMatrix = mat3(instance_line1.xyz, instance_line2.xyz, instance_line3.xyz);
#else
dNormalMatrix = matrix_normal;
#endif
return normalize(dNormalMatrix * vertex_normal);
}
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}
#version 300 es
#define attribute in
#define varying out
#define texture2D texture
#define GL2
#define VERTEXSHADER
varying vec4 vVertexColor;
varying vec3 vPositionW;
varying vec3 vNormalW;
varying vec2 vUv0;
attribute vec3 vertex_position;
attribute vec3 vertex_normal;
attribute vec4 vertex_tangent;
attribute vec2 vertex_texCoord0;
attribute vec2 vertex_texCoord1;
attribute vec4 vertex_color;
uniform mat4 matrix_viewProjection;
uniform mat4 matrix_model;
uniform mat3 matrix_normal;
vec3 dPositionW;
mat4 dModelMatrix;
mat3 dNormalMatrix;
vec3 dLightPosW;
vec3 dLightDirNormW;
vec3 dNormalW;
#ifdef NINESLICED
vec2 getUv0() {
vec2 uv = vertex_position.xz;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
uv += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
uv = uv * -0.5 + 0.5;
uv = uv * atlasRect.zw + atlasRect.xy;
vMask = vertex_texCoord0.xy;
return uv;
}
#else
vec2 getUv0() {
return vertex_texCoord0;
}
#endif
attribute vec4 vertex_boneWeights;
attribute vec4 vertex_boneIndices;
uniform sampler2D texture_poseMap;
uniform vec2 texture_poseMapSize;
mat4 getBoneMatrix(const in float i) {
float j = i * 4.0;
float x = mod(j, float(texture_poseMapSize.x));
float y = floor(j / float(texture_poseMapSize.x));
float dx = 1.0 / float(texture_poseMapSize.x);
float dy = 1.0 / float(texture_poseMapSize.y);
y = dy * (y + 0.5);
vec4 v1 = texture2D(texture_poseMap, vec2(dx * (x + 0.5), y));
vec4 v2 = texture2D(texture_poseMap, vec2(dx * (x + 1.5), y));
vec4 v3 = texture2D(texture_poseMap, vec2(dx * (x + 2.5), y));
vec4 v4 = texture2D(texture_poseMap, vec2(dx * (x + 3.5), y));
mat4 bone = mat4(v1, v2, v3, v4);
return bone;
}
#define SKIN
#ifdef PIXELSNAP
uniform vec4 uScreenSize;
#endif
mat4 getModelMatrix() {
#ifdefDYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elifdefined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elifdefined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}
vec4 getPosition() {
dModelMatrix = getModelMatrix();
vec3 localPos = vertex_position;
#ifdefNINESLICED
// outer and inner vertices are at the same position, scale both
localPos.xz *= outerScale;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
localPos.xz += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
vTiledUv = (localPos.xz - outerScale + innerOffset.xy) * -0.5 + 1.0; // uv = local pos - inner corner
 
localPos.xz *= -0.5; // move from -1;1 to -0.5;0.5
 
localPos = localPos.xzy;
#endif
vec4 posW = dModelMatrix * vec4(localPos, 1.0);
#ifdefSCREENSPACE
posW.zw = vec2(0.0, 1.0);
#endif
dPositionW = posW.xyz;
vec4 screenPos;
#ifdefUV1LAYOUT
screenPos = vec4(vertex_texCoord1.xy * 2.0 - 1.0, 0.5, );
#else
#ifdefSCREENSPACE
screenPos = posW;
#else
screenPos = matrix_viewProjection * posW;
#endif
#ifdefPIXELSNAP
// snap vertex to a pixel boundary
screenPos.xy = (screenPos.xy * 0.5) + 0.5;
screenPos.xy *= uScreenSize.xy;
screenPos.xy = floor(screenPos.xy);
screenPos.xy *= uScreenSize.zw;
screenPos.xy = (screenPos.xy * 2.0) - 1.0;
#endif
#endif
return screenPos;
}
vec3 getWorldPosition() {
return dPositionW;
}
vec3 getNormal() {
#ifdefSKIN
dNormalMatrix = mat3(dModelMatrix[].xyz, dModelMatrix[].xyz, dModelMatrix[].xyz);
#elifdefined(INSTANCING)
dNormalMatrix = mat3(instance_line1.xyz, instance_line2.xyz, instance_line3.xyz);
#else
dNormalMatrix = matrix_normal;
#endif
return normalize(dNormalMatrix * vertex_normal);
}
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}

PlayCanvas PBR材质shader代码分析(vertex shader)的更多相关文章

  1. PlayCanvas PBR材质shader代码分析(pixel shader)

    #version es #define varying in out highp vec4 pc_fragColor; #define gl_FragColor pc_fragColor #defin ...

  2. 【OpenGL】Shader实例分析(九)- AngryBots中的主角受伤特效

    转发请保持地址:http://blog.csdn.net/stalendp/article/details/40859441 AngryBots是Unity官方的一个非常棒的样例.非常有研究价值. 曾 ...

  3. vertex shader(4)

    Swizzling and Masking 如果你使用输入.常量.临时寄存器作为源寄存器,你可以彼此独立地swizzle .x,.y,.z,.w值.如果你使用输出.临时寄存器作为目标寄存器,你可以把. ...

  4. 【OpenGL】Shader实例分析(七)- 雪花飘落效果

    转发请保持地址:http://blog.csdn.net/stalendp/article/details/40624603 研究了一个雪花飘落效果.感觉挺不错的.分享给大家,效果例如以下: 代码例如 ...

  5. GLSL写vertex shader和fragment shader

    0.一般来说vertex shader处理顶点坐标,然后向后传输,经过光栅化之后,传给fragment shader,其负责颜色.纹理.光照等等. 前者处理之后变成裁剪坐标系(三维),光栅化之后一般认 ...

  6. Stage3d 由浅到深理解AGAL的管线vertex shader和fragment shader || 简易教程 学习心得 AGAL 非常非常好的入门文章

    Everyday Stage3D (一) Everyday Stage3D (二) Triangle Everyday Stage3D (三) AGAL的基本概念 Everyday Stage3D ( ...

  7. vertex shader(3)

    之前我们学习了如何声明顶点着色器.如何设置常量寄存器中的常量.接下来我们学习如何写和编译一个顶点着色器程序. 在我们编译一个顶点着色器之前,首先需要写一个. 有17种不同的指令(instruction ...

  8. 向Vertex Shader传递vertex attribute

    在VBO.VAO和EBO那一节,介绍了如何向Vertex Shader传递vertex attribute的基本方法.现在我准备把这个话题再次扩展开. 传递整型数据 之前我们的顶点属性数据都是floa ...

  9. linearizing the depth in vertex shader

    please refer to http://www.mvps.org/directx/articles/linear_z/linearz.htm When using a programmable ...

随机推荐

  1. You are using the runtime-only build of Vue where the template compiler is not available. Either pre-compile the templates into render functions, or use the compiler-included build.

    异常 You are using the runtime-only build of Vue where the template compiler is not available. Either ...

  2. JQuery多个异步操作后执行(resolve,promise,when,done)

    代码分享: //3秒后完成 function asyncThing1() { var dfd = $.Deferred(); setTimeout(function () { alert('async ...

  3. 【Linux】一个定时任务小脚本

    需要一个定时任务,每隔固定时间停止,等待几分钟,然后在启动,每5分钟一个循环. 简要写了一下,如下,未来在优化: 3,8,13,18,23,28,33,38,43,48,53,58 * * * * s ...

  4. echarts拓扑图(graph,力导向布局图)

    echarts连接:https://gallery.echartsjs.com/editor.html?c=xCLEj67T3H 讲解:https://www.cnblogs.com/koala201 ...

  5. 理解vue数据驱动

    vue是双向数据绑定的框架,数据驱动是他的灵魂,他的实现原理众所周知是Object.defineProperty方法实现的get.set重写,但是这样说太牵强外门了.本文将宏观介绍他的实现 使用vue ...

  6. vps远程桌面服务器管理

    vps服务器是没有远程桌面系统的,本地电脑要有远程桌面的组件或者专业的远程桌面管理工具,如果出于安全考虑关闭了3389端口(这是系统自带远程桌面的端口),你可以试试iis7远程桌面管理工具,这个还是很 ...

  7. 使用EF操作Docker中的Mysql实例

    为啥我会选择mysql呢?因为我的服务器配置较低,而SqlServer在docker中的实例,服务器的运行内存应当保持在2G+,我没有这个条件,它会爆出这样的错误 sqlservr: This pro ...

  8. 1089 狼人杀-简单版 (20 分)C语言

    以下文字摘自<灵机一动·好玩的数学>:"狼人杀"游戏分为狼人.好人两大阵营.在一局"狼人杀"游戏中,1 号玩家说:"2 号是狼人" ...

  9. Python for Data Analysis 学习心得(三) - 文件读写和数据预处理

    一.Pandas文件读写 pandas很核心的一个功能就是数据读取.导入,pandas支援大部分主流的数据储存格式,并在导入的时候可以做筛选.预处理.在读取数据时的选项有超过50个参数,可见panda ...

  10. 我该如何学习spring源码以及解析bean定义的注册

    如何学习spring源码 前言 本文属于spring源码解析的系列文章之一,文章主要是介绍如何学习spring的源码,希望能够最大限度的帮助到有需要的人.文章总体难度不大,但比较繁重,学习时一定要耐住 ...