顶点shader主要对顶点坐标变换,将顶点坐标从local->world->view->clip 空间变换
local空间:模型物体坐标系
world空间:世界空间坐标系
view空间: 相机空间
clip空间: 裁剪空间
local->world空间简化后其实就是这么一行代码:
vec4 posW = dModelMatrix * vec4(localPos, 1.0);

此图由https://tinygltf.xyz/drawgeometry/网站脚本编写后截图输出
dModelMatrix就是将顶点local坐标系转换到了世界坐标系下。
dModelMatrix是通过getModelMatrix()方法获取
mat4 getModelMatrix() {
#ifdef DYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elif defined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elif defined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}
从代码可以看出这里有不同分支代码,通过不一样的方式得到modelMatrix,如果没有蒙皮动画就是简单的通过外部传进来的全局modelMatrix给返回即可
如果有蒙皮动画会将返回modelMatrix * boneMatrix,意思就是顶点local坐标先会进行一次骨骼变化(通过骨骼矩阵),然后再通过modelMatrix转到世界空间,
boneMatrix的计算方式会通过顶点权重以及对应的骨骼的骨骼矩阵进行加和。
world->clip空间简化后:
screenPos = matrix_viewProjection * posW;
其实就是将之前得到的顶点世界坐标通过matrix_viewprojection矩阵变换到裁剪空间,这里虽然写的是screenPos其实这里坐标还不是屏幕坐标,它只是未归一化的clip空间,然后到fragment shader时候会除以w然后裁剪后xyz归一到[-1,1]的NDC空间
vertex shader只需要输出clip空间的坐标即可也就是getPosition()返回给gl_Position出去。
gl_Position = getPosition();
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}
后面几行代码getWorldPosition(),getNormal(),getUv0(),计算输出顶点的世界坐标以及世界空间法线方向和uv坐标给fragment shader用来光照着色使用!
完整代码如下:
#version 300 es
#define attribute in
#define varying out
#define texture2D texture
#define GL2
#define VERTEXSHADER
varying vec4 vVertexColor;
varying vec3 vPositionW;
varying vec3 vNormalW;
varying vec2 vUv0;
attribute vec3 vertex_position;
attribute vec3 vertex_normal;
attribute vec4 vertex_tangent;
attribute vec2 vertex_texCoord0;
attribute vec2 vertex_texCoord1;
attribute vec4 vertex_color;
uniform mat4 matrix_viewProjection;
uniform mat4 matrix_model;
uniform mat3 matrix_normal;
vec3 dPositionW;
mat4 dModelMatrix;
mat3 dNormalMatrix;
vec3 dLightPosW;
vec3 dLightDirNormW;
vec3 dNormalW;
#ifdef NINESLICED
vec2 getUv0() {
vec2 uv = vertex_position.xz;
// offset inner vertices inside
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
uv += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
uv = uv * -0.5 + 0.5;
uv = uv * atlasRect.zw + atlasRect.xy;
vMask = vertex_texCoord0.xy;
return uv;
}
#else
vec2 getUv0() {
return vertex_texCoord0;
}
#endif
attribute vec4 vertex_boneWeights;
attribute vec4 vertex_boneIndices;
uniform sampler2D texture_poseMap;
uniform vec2 texture_poseMapSize;
mat4 getBoneMatrix(const in float i) {
float j = i * 4.0;
float x = mod(j, float(texture_poseMapSize.x));
float y = floor(j / float(texture_poseMapSize.x));
float dx = 1.0 / float(texture_poseMapSize.x);
float dy = 1.0 / float(texture_poseMapSize.y);
y = dy * (y + 0.5);
vec4 v1 = texture2D(texture_poseMap, vec2(dx * (x + 0.5), y));
vec4 v2 = texture2D(texture_poseMap, vec2(dx * (x + 1.5), y));
vec4 v3 = texture2D(texture_poseMap, vec2(dx * (x + 2.5), y));
vec4 v4 = texture2D(texture_poseMap, vec2(dx * (x + 3.5), y));
mat4 bone = mat4(v1, v2, v3, v4);
return bone;
}
#define SKIN
#ifdef PIXELSNAP
uniform vec4 uScreenSize;
#endif
mat4 getModelMatrix() {
#ifdefDYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elifdefined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elifdefined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}
vec4 getPosition() {
dModelMatrix = getModelMatrix();
vec3 localPos = vertex_position;
#ifdefNINESLICED
// outer and inner vertices are at the same position, scale both
localPos.xz *= outerScale;
// offset inner vertices inside
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
localPos.xz += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
vTiledUv = (localPos.xz - outerScale + innerOffset.xy) * -0.5 + 1.0; // uv = local pos - inner corner
localPos.xz *= -0.5; // move from -1;1 to -0.5;0.5
localPos = localPos.xzy;
#endif
vec4 posW = dModelMatrix * vec4(localPos, 1.0);
#ifdefSCREENSPACE
posW.zw = vec2(0.0, 1.0);
#endif
dPositionW = posW.xyz;
vec4 screenPos;
#ifdefUV1LAYOUT
screenPos = vec4(vertex_texCoord1.xy * 2.0 - 1.0, 0.5, );
#else
#ifdefSCREENSPACE
screenPos = posW;
#else
screenPos = matrix_viewProjection * posW;
#endif
#ifdefPIXELSNAP
// snap vertex to a pixel boundary
screenPos.xy = (screenPos.xy * 0.5) + 0.5;
screenPos.xy *= uScreenSize.xy;
screenPos.xy = floor(screenPos.xy);
screenPos.xy *= uScreenSize.zw;
screenPos.xy = (screenPos.xy * 2.0) - 1.0;
#endif
#endif
return screenPos;
}
vec3 getWorldPosition() {
return dPositionW;
}
vec3 getNormal() {
#ifdefSKIN
dNormalMatrix = mat3(dModelMatrix[].xyz, dModelMatrix[].xyz, dModelMatrix[].xyz);
#elifdefined(INSTANCING)
dNormalMatrix = mat3(instance_line1.xyz, instance_line2.xyz, instance_line3.xyz);
#else
dNormalMatrix = matrix_normal;
#endif
return normalize(dNormalMatrix * vertex_normal);
}
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}
#version 300 es
#define attribute in
#define varying out
#define texture2D texture
#define GL2
#define VERTEXSHADER
varying vec4 vVertexColor;
varying vec3 vPositionW;
varying vec3 vNormalW;
varying vec2 vUv0;
attribute vec3 vertex_position;
attribute vec3 vertex_normal;
attribute vec4 vertex_tangent;
attribute vec2 vertex_texCoord0;
attribute vec2 vertex_texCoord1;
attribute vec4 vertex_color;
uniform mat4 matrix_viewProjection;
uniform mat4 matrix_model;
uniform mat3 matrix_normal;
vec3 dPositionW;
mat4 dModelMatrix;
mat3 dNormalMatrix;
vec3 dLightPosW;
vec3 dLightDirNormW;
vec3 dNormalW;
#ifdef NINESLICED
vec2 getUv0() {
vec2 uv = vertex_position.xz;
// offset inner vertices inside
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
uv += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
uv = uv * -0.5 + 0.5;
uv = uv * atlasRect.zw + atlasRect.xy;
vMask = vertex_texCoord0.xy;
return uv;
}
#else
vec2 getUv0() {
return vertex_texCoord0;
}
#endif
attribute vec4 vertex_boneWeights;
attribute vec4 vertex_boneIndices;
uniform sampler2D texture_poseMap;
uniform vec2 texture_poseMapSize;
mat4 getBoneMatrix(const in float i) {
float j = i * 4.0;
float x = mod(j, float(texture_poseMapSize.x));
float y = floor(j / float(texture_poseMapSize.x));
float dx = 1.0 / float(texture_poseMapSize.x);
float dy = 1.0 / float(texture_poseMapSize.y);
y = dy * (y + 0.5);
vec4 v1 = texture2D(texture_poseMap, vec2(dx * (x + 0.5), y));
vec4 v2 = texture2D(texture_poseMap, vec2(dx * (x + 1.5), y));
vec4 v3 = texture2D(texture_poseMap, vec2(dx * (x + 2.5), y));
vec4 v4 = texture2D(texture_poseMap, vec2(dx * (x + 3.5), y));
mat4 bone = mat4(v1, v2, v3, v4);
return bone;
}
#define SKIN
#ifdef PIXELSNAP
uniform vec4 uScreenSize;
#endif
mat4 getModelMatrix() {
#ifdefDYNAMICBATCH
return getBoneMatrix(vertex_boneIndices);
#elifdefined(SKIN)
return matrix_model * (getBoneMatrix(vertex_boneIndices.x) * vertex_boneWeights.x +
getBoneMatrix(vertex_boneIndices.y) * vertex_boneWeights.y+
getBoneMatrix(vertex_boneIndices.z) * vertex_boneWeights.z +
getBoneMatrix(vertex_boneIndices.w) * vertex_boneWeights.w);
#elifdefined(INSTANCING)
return mat4(instance_line1, instance_line2, instance_line3, instance_line4);
#else
return matrix_model;
#endif
}
vec4 getPosition() {
dModelMatrix = getModelMatrix();
vec3 localPos = vertex_position;
#ifdefNINESLICED
// outer and inner vertices are at the same position, scale both
localPos.xz *= outerScale;
// offset inner vertices inside
// (original vertices must be in [-1;1] range)
vec2 positiveUnitOffset = clamp(vertex_position.xz, vec2(0.0), vec2(1.0));
vec2 negativeUnitOffset = clamp(-vertex_position.xz, vec2(0.0), vec2(1.0));
localPos.xz += (-positiveUnitOffset * innerOffset.xy + negativeUnitOffset * innerOffset.zw) * vertex_texCoord0.xy;
vTiledUv = (localPos.xz - outerScale + innerOffset.xy) * -0.5 + 1.0; // uv = local pos - inner corner
localPos.xz *= -0.5; // move from -1;1 to -0.5;0.5
localPos = localPos.xzy;
#endif
vec4 posW = dModelMatrix * vec4(localPos, 1.0);
#ifdefSCREENSPACE
posW.zw = vec2(0.0, 1.0);
#endif
dPositionW = posW.xyz;
vec4 screenPos;
#ifdefUV1LAYOUT
screenPos = vec4(vertex_texCoord1.xy * 2.0 - 1.0, 0.5, );
#else
#ifdefSCREENSPACE
screenPos = posW;
#else
screenPos = matrix_viewProjection * posW;
#endif
#ifdefPIXELSNAP
// snap vertex to a pixel boundary
screenPos.xy = (screenPos.xy * 0.5) + 0.5;
screenPos.xy *= uScreenSize.xy;
screenPos.xy = floor(screenPos.xy);
screenPos.xy *= uScreenSize.zw;
screenPos.xy = (screenPos.xy * 2.0) - 1.0;
#endif
#endif
return screenPos;
}
vec3 getWorldPosition() {
return dPositionW;
}
vec3 getNormal() {
#ifdefSKIN
dNormalMatrix = mat3(dModelMatrix[].xyz, dModelMatrix[].xyz, dModelMatrix[].xyz);
#elifdefined(INSTANCING)
dNormalMatrix = mat3(instance_line1.xyz, instance_line2.xyz, instance_line3.xyz);
#else
dNormalMatrix = matrix_normal;
#endif
return normalize(dNormalMatrix * vertex_normal);
}
void main(void) {
gl_Position = getPosition();
vPositionW = getWorldPosition();
vNormalW = dNormalW = getNormal();
vec2 uv0 = getUv0();
vUv0 = uv0;
vVertexColor = vertex_color;
}
- PlayCanvas PBR材质shader代码分析(pixel shader)
#version es #define varying in out highp vec4 pc_fragColor; #define gl_FragColor pc_fragColor #defin ...
- 【OpenGL】Shader实例分析(九)- AngryBots中的主角受伤特效
转发请保持地址:http://blog.csdn.net/stalendp/article/details/40859441 AngryBots是Unity官方的一个非常棒的样例.非常有研究价值. 曾 ...
- vertex shader(4)
Swizzling and Masking 如果你使用输入.常量.临时寄存器作为源寄存器,你可以彼此独立地swizzle .x,.y,.z,.w值.如果你使用输出.临时寄存器作为目标寄存器,你可以把. ...
- 【OpenGL】Shader实例分析(七)- 雪花飘落效果
转发请保持地址:http://blog.csdn.net/stalendp/article/details/40624603 研究了一个雪花飘落效果.感觉挺不错的.分享给大家,效果例如以下: 代码例如 ...
- GLSL写vertex shader和fragment shader
0.一般来说vertex shader处理顶点坐标,然后向后传输,经过光栅化之后,传给fragment shader,其负责颜色.纹理.光照等等. 前者处理之后变成裁剪坐标系(三维),光栅化之后一般认 ...
- Stage3d 由浅到深理解AGAL的管线vertex shader和fragment shader || 简易教程 学习心得 AGAL 非常非常好的入门文章
Everyday Stage3D (一) Everyday Stage3D (二) Triangle Everyday Stage3D (三) AGAL的基本概念 Everyday Stage3D ( ...
- vertex shader(3)
之前我们学习了如何声明顶点着色器.如何设置常量寄存器中的常量.接下来我们学习如何写和编译一个顶点着色器程序. 在我们编译一个顶点着色器之前,首先需要写一个. 有17种不同的指令(instruction ...
- 向Vertex Shader传递vertex attribute
在VBO.VAO和EBO那一节,介绍了如何向Vertex Shader传递vertex attribute的基本方法.现在我准备把这个话题再次扩展开. 传递整型数据 之前我们的顶点属性数据都是floa ...
- linearizing the depth in vertex shader
please refer to http://www.mvps.org/directx/articles/linear_z/linearz.htm When using a programmable ...
随机推荐
- 使用SuperWebSocket实现Web消息推送
在大部分Web系统中,我们可能遇到需要向客户端推送消息的需求.SuperWebSocket第三方库能让我们轻松的完成任务.SuperWebSocket第三方库可以从网上下载,不过通过Visual St ...
- 使用BFD检测EBGP邻居
在广域网BGP环境中,通常使能BFD以快速实现链路故障后的路由的主动收敛. BFD使用UDP在链路上进行双向检测,BFD有Echo mode和asynchronous mode两种模式,默认为Echo ...
- linux查看文件内容跳到文件底部和回到文件顶部的快捷键
有时候需要查看一些日志文件,然后要从底部开始查看的话 可以按 shift+g 即可跳到文件底部 要返回文件顶部的时候 按 gg即可
- 024.讲MFC_窗口指针
窗口指针通过HWND获得CWnd指针 //如何通过窗口句柄获得窗口指针获得应用程序主窗口的指针 //如何获得应用程序主窗口的指针一.建立名为dialogPoint的mfc工程,添加两个button 双 ...
- Django的安装命令
国内的一些pipy镜像源: 1.清华源: https://pypi.tuna.tsinghua.edu.cn/simple 2.豆瓣源: https://pypi.douban.com/simple ...
- Java第一次创建对象速度比之后慢的原因
类的对象在第一次创建的时候,Java虚拟机(JVM)首先检查是否所要加载的类对应的Class对象是否已经加载.如果没有加载,JVM就会根据类名查找.class文件,并将其Class对象载入.一般某个类 ...
- 使用vue-quill-editor富文本 实现图片上传
1. 下载并引入 import { quillEditor, Quill } from "vue-quill-editor"; import { container, ImageE ...
- python之子类继承父类时进行初始化的一些问题
直接看代码: class Person: def __init__(self): self.name = "jack" class Student(Person): def __i ...
- Web基础了解版11-Ajax-JSON
Ajax AJAX即“Asynchronous Javascript And XML”:是,不发生页面跳转.异步请求载入内容并改写局部页面内容的技术. 也可以简单的理解为通过JS向服务器发送请求. ...
- ng-zorro-antd中踩过的坑
ng-zorro-antd中踩过的坑 前端项目中,我们经常会使用阿里开源的组件库:ant-design,其提供的组件已经足以满足多数的需求,拿来就能直接用,十分方便,当然了,有些公司会对组件库进行二次 ...