顶点shader主要对顶点坐标变换,将顶点坐标从local->world->view->clip 空间变换

local空间:模型物体坐标系

world空间:世界空间坐标系

view空间: 相机空间

clip空间: 裁剪空间

local->world空间简化后其实就是这么一行代码:

vec4 posW = dModelMatrix * vec4(localPos, 1.0);

此图由https://tinygltf.xyz/drawgeometry/网站脚本编写后截图输出

dModelMatrix就是将顶点local坐标系转换到了世界坐标系下。

dModelMatrix是通过getModelMatrix()方法获取

mat4 getModelMatrix() {
#ifdef DYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elif defined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elif defined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}

从代码可以看出这里有不同分支代码,通过不一样的方式得到modelMatrix,如果没有蒙皮动画就是简单的通过外部传进来的全局modelMatrix给返回即可

如果有蒙皮动画会将返回modelMatrix * boneMatrix,意思就是顶点local坐标先会进行一次骨骼变化(通过骨骼矩阵),然后再通过modelMatrix转到世界空间,

boneMatrix的计算方式会通过顶点权重以及对应的骨骼的骨骼矩阵进行加和。

world->clip空间简化后:

screenPos = matrix_viewProjection * posW;

其实就是将之前得到的顶点世界坐标通过matrix_viewprojection矩阵变换到裁剪空间,这里虽然写的是screenPos其实这里坐标还不是屏幕坐标,它只是未归一化的clip空间,然后到fragment shader时候会除以w然后裁剪后xyz归一到[-1,1]的NDC空间

vertex shader只需要输出clip空间的坐标即可也就是getPosition()返回给gl_Position出去。

gl_Position = getPosition();
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}

后面几行代码getWorldPosition(),getNormal(),getUv0(),计算输出顶点的世界坐标以及世界空间法线方向和uv坐标给fragment shader用来光照着色使用!

完整代码如下:

#version 300 es
#define attribute in
#define varying out
#define texture2D texture
#define GL2
#define VERTEXSHADER
varying vec4 vVertexColor;
varying vec3 vPositionW;
varying vec3 vNormalW;
varying vec2 vUv0;
attribute vec3 vertex_position;
attribute vec3 vertex_normal;
attribute vec4 vertex_tangent;
attribute vec2 vertex_texCoord0;
attribute vec2 vertex_texCoord1;
attribute vec4 vertex_color;
uniform mat4 matrix_viewProjection;
uniform mat4 matrix_model;
uniform mat3 matrix_normal;
vec3 dPositionW;
mat4 dModelMatrix;
mat3 dNormalMatrix;
vec3 dLightPosW;
vec3 dLightDirNormW;
vec3 dNormalW;
#ifdef NINESLICED
vec2 getUv0() {
vec2 uv = vertex_position.xz;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
uv += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
uv = uv * -0.5 + 0.5;
uv = uv * atlasRect.zw + atlasRect.xy;
vMask = vertex_texCoord0.xy;
return uv;
}
#else
vec2 getUv0() {
return vertex_texCoord0;
}
#endif
attribute vec4 vertex_boneWeights;
attribute vec4 vertex_boneIndices;
uniform sampler2D texture_poseMap;
uniform vec2 texture_poseMapSize;
mat4 getBoneMatrix(const in float i) {
float j = i * 4.0;
float x = mod(j, float(texture_poseMapSize.x));
float y = floor(j / float(texture_poseMapSize.x));
float dx = 1.0 / float(texture_poseMapSize.x);
float dy = 1.0 / float(texture_poseMapSize.y);
y = dy * (y + 0.5);
vec4 v1 = texture2D(texture_poseMap, vec2(dx * (x + 0.5), y));
vec4 v2 = texture2D(texture_poseMap, vec2(dx * (x + 1.5), y));
vec4 v3 = texture2D(texture_poseMap, vec2(dx * (x + 2.5), y));
vec4 v4 = texture2D(texture_poseMap, vec2(dx * (x + 3.5), y));
mat4 bone = mat4(v1, v2, v3, v4);
return bone;
}
#define SKIN
#ifdef PIXELSNAP
uniform vec4 uScreenSize;
#endif
mat4 getModelMatrix() {
#ifdefDYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elifdefined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elifdefined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}
vec4 getPosition() {
dModelMatrix = getModelMatrix();
vec3 localPos = vertex_position;
#ifdefNINESLICED
// outer and inner vertices are at the same position, scale both
localPos.xz *= outerScale;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
localPos.xz += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
vTiledUv = (localPos.xz - outerScale + innerOffset.xy) * -0.5 + 1.0; // uv = local pos - inner corner
 
localPos.xz *= -0.5; // move from -1;1 to -0.5;0.5
 
localPos = localPos.xzy;
#endif
vec4 posW = dModelMatrix * vec4(localPos, 1.0);
#ifdefSCREENSPACE
posW.zw = vec2(0.0, 1.0);
#endif
dPositionW = posW.xyz;
vec4 screenPos;
#ifdefUV1LAYOUT
screenPos = vec4(vertex_texCoord1.xy * 2.0 - 1.0, 0.5, );
#else
#ifdefSCREENSPACE
screenPos = posW;
#else
screenPos = matrix_viewProjection * posW;
#endif
#ifdefPIXELSNAP
// snap vertex to a pixel boundary
screenPos.xy = (screenPos.xy * 0.5) + 0.5;
screenPos.xy *= uScreenSize.xy;
screenPos.xy = floor(screenPos.xy);
screenPos.xy *= uScreenSize.zw;
screenPos.xy = (screenPos.xy * 2.0) - 1.0;
#endif
#endif
return screenPos;
}
vec3 getWorldPosition() {
return dPositionW;
}
vec3 getNormal() {
#ifdefSKIN
dNormalMatrix = mat3(dModelMatrix[].xyz, dModelMatrix[].xyz, dModelMatrix[].xyz);
#elifdefined(INSTANCING)
dNormalMatrix = mat3(instance_line1.xyz, instance_line2.xyz, instance_line3.xyz);
#else
dNormalMatrix = matrix_normal;
#endif
return normalize(dNormalMatrix * vertex_normal);
}
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}
#version 300 es
#define attribute in
#define varying out
#define texture2D texture
#define GL2
#define VERTEXSHADER
varying vec4 vVertexColor;
varying vec3 vPositionW;
varying vec3 vNormalW;
varying vec2 vUv0;
attribute vec3 vertex_position;
attribute vec3 vertex_normal;
attribute vec4 vertex_tangent;
attribute vec2 vertex_texCoord0;
attribute vec2 vertex_texCoord1;
attribute vec4 vertex_color;
uniform mat4 matrix_viewProjection;
uniform mat4 matrix_model;
uniform mat3 matrix_normal;
vec3 dPositionW;
mat4 dModelMatrix;
mat3 dNormalMatrix;
vec3 dLightPosW;
vec3 dLightDirNormW;
vec3 dNormalW;
#ifdef NINESLICED
vec2 getUv0() {
vec2 uv = vertex_position.xz;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
uv += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
uv = uv * -0.5 + 0.5;
uv = uv * atlasRect.zw + atlasRect.xy;
vMask = vertex_texCoord0.xy;
return uv;
}
#else
vec2 getUv0() {
return vertex_texCoord0;
}
#endif
attribute vec4 vertex_boneWeights;
attribute vec4 vertex_boneIndices;
uniform sampler2D texture_poseMap;
uniform vec2 texture_poseMapSize;
mat4 getBoneMatrix(const in float i) {
float j = i * 4.0;
float x = mod(j, float(texture_poseMapSize.x));
float y = floor(j / float(texture_poseMapSize.x));
float dx = 1.0 / float(texture_poseMapSize.x);
float dy = 1.0 / float(texture_poseMapSize.y);
y = dy * (y + 0.5);
vec4 v1 = texture2D(texture_poseMap, vec2(dx * (x + 0.5), y));
vec4 v2 = texture2D(texture_poseMap, vec2(dx * (x + 1.5), y));
vec4 v3 = texture2D(texture_poseMap, vec2(dx * (x + 2.5), y));
vec4 v4 = texture2D(texture_poseMap, vec2(dx * (x + 3.5), y));
mat4 bone = mat4(v1, v2, v3, v4);
return bone;
}
#define SKIN
#ifdef PIXELSNAP
uniform vec4 uScreenSize;
#endif
mat4 getModelMatrix() {
#ifdefDYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elifdefined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elifdefined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}
vec4 getPosition() {
dModelMatrix = getModelMatrix();
vec3 localPos = vertex_position;
#ifdefNINESLICED
// outer and inner vertices are at the same position, scale both
localPos.xz *= outerScale;
// offset inner vertices inside
 
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
localPos.xz += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
vTiledUv = (localPos.xz - outerScale + innerOffset.xy) * -0.5 + 1.0; // uv = local pos - inner corner
 
localPos.xz *= -0.5; // move from -1;1 to -0.5;0.5
 
localPos = localPos.xzy;
#endif
vec4 posW = dModelMatrix * vec4(localPos, 1.0);
#ifdefSCREENSPACE
posW.zw = vec2(0.0, 1.0);
#endif
dPositionW = posW.xyz;
vec4 screenPos;
#ifdefUV1LAYOUT
screenPos = vec4(vertex_texCoord1.xy * 2.0 - 1.0, 0.5, );
#else
#ifdefSCREENSPACE
screenPos = posW;
#else
screenPos = matrix_viewProjection * posW;
#endif
#ifdefPIXELSNAP
// snap vertex to a pixel boundary
screenPos.xy = (screenPos.xy * 0.5) + 0.5;
screenPos.xy *= uScreenSize.xy;
screenPos.xy = floor(screenPos.xy);
screenPos.xy *= uScreenSize.zw;
screenPos.xy = (screenPos.xy * 2.0) - 1.0;
#endif
#endif
return screenPos;
}
vec3 getWorldPosition() {
return dPositionW;
}
vec3 getNormal() {
#ifdefSKIN
dNormalMatrix = mat3(dModelMatrix[].xyz, dModelMatrix[].xyz, dModelMatrix[].xyz);
#elifdefined(INSTANCING)
dNormalMatrix = mat3(instance_line1.xyz, instance_line2.xyz, instance_line3.xyz);
#else
dNormalMatrix = matrix_normal;
#endif
return normalize(dNormalMatrix * vertex_normal);
}
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}

PlayCanvas PBR材质shader代码分析(vertex shader)的更多相关文章

  1. PlayCanvas PBR材质shader代码分析(pixel shader)

    #version es #define varying in out highp vec4 pc_fragColor; #define gl_FragColor pc_fragColor #defin ...

  2. 【OpenGL】Shader实例分析(九)- AngryBots中的主角受伤特效

    转发请保持地址:http://blog.csdn.net/stalendp/article/details/40859441 AngryBots是Unity官方的一个非常棒的样例.非常有研究价值. 曾 ...

  3. vertex shader(4)

    Swizzling and Masking 如果你使用输入.常量.临时寄存器作为源寄存器,你可以彼此独立地swizzle .x,.y,.z,.w值.如果你使用输出.临时寄存器作为目标寄存器,你可以把. ...

  4. 【OpenGL】Shader实例分析(七)- 雪花飘落效果

    转发请保持地址:http://blog.csdn.net/stalendp/article/details/40624603 研究了一个雪花飘落效果.感觉挺不错的.分享给大家,效果例如以下: 代码例如 ...

  5. GLSL写vertex shader和fragment shader

    0.一般来说vertex shader处理顶点坐标,然后向后传输,经过光栅化之后,传给fragment shader,其负责颜色.纹理.光照等等. 前者处理之后变成裁剪坐标系(三维),光栅化之后一般认 ...

  6. Stage3d 由浅到深理解AGAL的管线vertex shader和fragment shader || 简易教程 学习心得 AGAL 非常非常好的入门文章

    Everyday Stage3D (一) Everyday Stage3D (二) Triangle Everyday Stage3D (三) AGAL的基本概念 Everyday Stage3D ( ...

  7. vertex shader(3)

    之前我们学习了如何声明顶点着色器.如何设置常量寄存器中的常量.接下来我们学习如何写和编译一个顶点着色器程序. 在我们编译一个顶点着色器之前,首先需要写一个. 有17种不同的指令(instruction ...

  8. 向Vertex Shader传递vertex attribute

    在VBO.VAO和EBO那一节,介绍了如何向Vertex Shader传递vertex attribute的基本方法.现在我准备把这个话题再次扩展开. 传递整型数据 之前我们的顶点属性数据都是floa ...

  9. linearizing the depth in vertex shader

    please refer to http://www.mvps.org/directx/articles/linear_z/linearz.htm When using a programmable ...

随机推荐

  1. 【Kubernetes】容器集群管理常用命令笔记

    一.集群部署-查询集群状态 ①查询k8s master各组件健康状态: kubectl get componentstatus ②查询k8s node健康状态: kubectl get node 二. ...

  2. Struts2 类型转换(易百教程)

    在HTTP请求中的一切都被视为一个String由协议.这包括数字,布尔值,整数,日期,小数和一切.每一件事情是一个字符串,将根据HTTP.然而,Struts类可以有任何数据类型的属性.Struts的自 ...

  3. PHP四大主流框架的优缺点总结

    本篇文章我们来讲讲PHP四大框架的优缺点都有哪些,让你们在开发中更好的去选择使用哪款PHP框架去完成项目,废话不多说,我们一起来看看吧!! ThinkPHP ThinkPHP(FCS)是一个轻量级的中 ...

  4. HolidayFileDisPersonViewList.js中的一些基础

    1,CSS display 属性 使段落生出行内框: p.inline { display:inline; } none 此元素不会被显示 详细介绍:  http://www.w3school.com ...

  5. 开发当中curl简单使用

    curl是linux上可以发送http请求的命令.当然Postman是一个很好的接口调用管理工具,但在验证一个linux服务器调用另外一个linux服务器API是否可用的场景下,非curl命令莫属. ...

  6. DEVOPS技术实践_23:判断文件下载成功作为执行条件

    在实际生产中,我们经常会需要通过判断一个结果作为一个条件去执行另一个内容,比如判断一个文件是否存在,判官一个命令是否执行成功等等 现在我们选择其中一个场景进行实验,当某个目录下存在,则执行操作 1. ...

  7. 有关常用的UI组件

    前段时间想看看大家都用什么UI组件,下面是在网上搜集的,记录在这里方便自己使用和其他小伙伴使用 1:WeUI 小程序–使用教程 https://weui.io/ 官方介绍:WeUI 是一套同微信原生视 ...

  8. python的一些高阶用法

    map的用法 def fn(x): return x*2 L1 = [1,2,3,4,5,6] L2 = list(map(fn,L1)) L2 [2, 4, 6, 8, 10, 12] 通过上面的运 ...

  9. 「JOISC 2014 Day1」历史研究 --- 回滚莫队

    题目又臭又长,但其实题意很简单. 给出一个长度为\(N\)的序列与\(Q\)个询问,每个询问都对应原序列中的一个区间.对于每个查询的区间,设数\(X_{i}\)在此区间出现的次数为\(Sum_{X_{ ...

  10. TensorFlow——卷积神经网络的相关函数

    在TensorFlow中,使用tr.nn.conv2d来实现卷积操作,使用tf.nn.max_pool进行最大池化操作.通过闯传入不同的参数,来实现各种不同类型的卷积与池化操作. 卷积函数tf.nn. ...