Mercurial > games > semicongine
comparison tests/test_gltf.nim @ 1258:5442d0e9d8ff
did: improve testing lighting, try new glb model (need to add jpeg support first)
| author | sam <sam@basx.dev> |
|---|---|
| date | Sun, 28 Jul 2024 20:42:51 +0700 |
| parents | e9b8d87b9883 |
| children | a13509ede62a |
comparison
equal
deleted
inserted
replaced
| 1257:e9b8d87b9883 | 1258:5442d0e9d8ff |
|---|---|
| 13 type | 13 type |
| 14 ObjectData = object | 14 ObjectData = object |
| 15 transform: Mat4 | 15 transform: Mat4 |
| 16 materialId: int32 | 16 materialId: int32 |
| 17 Camera = object | 17 Camera = object |
| 18 viewPerspective: Mat4 | 18 view: Mat4 |
| 19 normal: Mat4 | |
| 20 projection: Mat4 | |
| 19 Material = object | 21 Material = object |
| 20 color: Vec4f = NewVec4f(1, 1, 1, 1) | 22 color: Vec4f = NewVec4f(1, 1, 1, 1) |
| 21 # colorTexture: int32 = -1 | 23 # colorTexture: int32 = -1 |
| 22 metallic: float32 = 0 | 24 metallic: float32 = 0 |
| 23 roughness: float32 = 0 | 25 roughness: float32 = 0 |
| 32 Shader = object | 34 Shader = object |
| 33 objectData {.PushConstantAttribute.}: ObjectData | 35 objectData {.PushConstantAttribute.}: ObjectData |
| 34 position {.VertexAttribute.}: Vec3f | 36 position {.VertexAttribute.}: Vec3f |
| 35 color {.VertexAttribute.}: Vec4f | 37 color {.VertexAttribute.}: Vec4f |
| 36 normal {.VertexAttribute.}: Vec3f | 38 normal {.VertexAttribute.}: Vec3f |
| 39 fragmentPosition {.Pass.}: Vec3f | |
| 37 fragmentColor {.Pass.}: Vec4f | 40 fragmentColor {.Pass.}: Vec4f |
| 38 fragmentNormal {.Pass.}: Vec3f | 41 fragmentNormal {.Pass.}: Vec3f |
| 39 outColor {.ShaderOutput.}: Vec4f | 42 outColor {.ShaderOutput.}: Vec4f |
| 40 descriptors {.DescriptorSets.}: (MainDescriptors, ) | 43 descriptors {.DescriptorSets.}: (MainDescriptors, ) |
| 41 # code | 44 # code |
| 42 vertexCode: string = """ | 45 vertexCode: string = """ |
| 43 void main() { | 46 void main() { |
| 47 mat4 modelView = objectData.transform * camera.view; | |
| 48 mat3 normalMat = mat3(transpose(inverse(objectData.transform))); | |
| 49 vec4 posTransformed = vec4(position, 1) * modelView; | |
| 50 fragmentPosition = posTransformed.xyz / posTransformed.w; | |
| 44 fragmentColor = color * materials[objectData.materialId].color; | 51 fragmentColor = color * materials[objectData.materialId].color; |
| 45 fragmentNormal = normal; | 52 fragmentNormal = normal * normalMat; |
| 46 gl_Position = vec4(position, 1) * (objectData.transform * camera.viewPerspective); | 53 gl_Position = vec4(position, 1) * (modelView * camera.projection); |
| 47 }""" | 54 }""" |
| 48 fragmentCode: string = """ | 55 fragmentCode: string = """ |
| 49 const vec3 lightDir = normalize(vec3(1, -1, 1)); | 56 const vec3 lightPosition = vec3(7, 9, -12); |
| 57 const float shininess = 40; | |
| 58 const vec3 ambientColor = vec3(0, 0, 0); | |
| 59 const vec3 lightColor = vec3(1, 1, 1); | |
| 60 // const vec3 specColor = vec3(1, 1, 1); | |
| 61 const float lightPower = 20; | |
| 50 void main() { | 62 void main() { |
| 51 outColor = vec4(fragmentColor.rgb * (1 - abs(dot(fragmentNormal, lightDir))), fragmentColor.a); | 63 // some setup |
| 64 vec3 normal = normalize(fragmentNormal); | |
| 65 vec3 lightDir = lightPosition - fragmentPosition; | |
| 66 float dist = length(lightDir); | |
| 67 lightDir = normalize(lightDir); | |
| 68 | |
| 69 float lambertian = max(dot(lightDir, normal), 0); | |
| 70 float specular = 0; | |
| 71 | |
| 72 // blinn-phong | |
| 73 if (lambertian > 0) { | |
| 74 vec3 viewDir = normalize(-fragmentPosition); | |
| 75 vec3 halfDir = normalize(lightDir + viewDir); | |
| 76 float specAngle = max(dot(halfDir, normal), 0.0); | |
| 77 specular = pow(specAngle, shininess); | |
| 78 } | |
| 79 | |
| 80 vec3 diffuseColor = fragmentColor.rgb; | |
| 81 vec3 specColor = diffuseColor; | |
| 82 vec3 color = ambientColor + diffuseColor * lambertian * lightColor * lightPower / dist + specColor * specular * lightColor * lightPower / dist; | |
| 83 | |
| 84 outColor = vec4(color, fragmentColor.a); | |
| 52 }""" | 85 }""" |
| 53 Mesh = object | 86 Mesh = object |
| 54 position: GPUArray[Vec3f, VertexBuffer] | 87 position: GPUArray[Vec3f, VertexBuffer] |
| 55 color: GPUArray[Vec4f, VertexBuffer] | 88 color: GPUArray[Vec4f, VertexBuffer] |
| 56 normal: GPUArray[Vec3f, VertexBuffer] | 89 normal: GPUArray[Vec3f, VertexBuffer] |
| 57 indices: GPUArray[uint32, IndexBuffer] | 90 indices: GPUArray[uint32, IndexBuffer] |
| 58 material: int32 | 91 material: int32 |
| 59 | 92 |
| 60 var gltfData = LoadMeshes[Mesh, Material]( | 93 var gltfData = LoadMeshes[Mesh, Material]( |
| 61 "town.glb", | 94 # "town.glb", |
| 95 "forest.glb", | |
| 62 MeshAttributeNames( | 96 MeshAttributeNames( |
| 63 POSITION: "position", | 97 POSITION: "position", |
| 64 COLOR: @["color"], | 98 COLOR: @["color"], |
| 65 NORMAL: "normal", | 99 NORMAL: "normal", |
| 66 indices: "indices", | 100 indices: "indices", |
| 79 ) | 113 ) |
| 80 ) | 114 ) |
| 81 var descriptors = asDescriptorSet( | 115 var descriptors = asDescriptorSet( |
| 82 MainDescriptors( | 116 MainDescriptors( |
| 83 camera: asGPUValue(Camera( | 117 camera: asGPUValue(Camera( |
| 84 viewPerspective: Unit4, | 118 view: Unit4, |
| 119 normal: Unit4, | |
| 120 projection: Unit4, | |
| 85 ), UniformBufferMapped) | 121 ), UniformBufferMapped) |
| 86 ) | 122 ) |
| 87 ) | 123 ) |
| 88 for i in 0 ..< gltfData.materials.len: | 124 for i in 0 ..< gltfData.materials.len: |
| 89 descriptors.data.materials[i] = asGPUValue(gltfData.materials[i], UniformBuffer) | 125 descriptors.data.materials[i] = asGPUValue(gltfData.materials[i], UniformBuffer) |
| 136 let camDir = (Rotate(camYaw, Y) * Rotate(camPitch, X)) * Z | 172 let camDir = (Rotate(camYaw, Y) * Rotate(camPitch, X)) * Z |
| 137 let camDirSide = camDir.Cross(-Y).Normalized | 173 let camDirSide = camDir.Cross(-Y).Normalized |
| 138 camPos += camDir * forward * dt | 174 camPos += camDir * forward * dt |
| 139 camPos += camDirSide * sideward * dt | 175 camPos += camDirSide * sideward * dt |
| 140 | 176 |
| 141 let fovH = PI / 2 | 177 let view = Rotate(-camPitch, X) * Rotate(-camYaw, Y) * Translate(-camPos) |
| 142 let fovV = 2 * arctan(tan(fovH / 2) * 1 / GetAspectRatio()) | 178 descriptors.data.camera.data.view = view |
| 143 descriptors.data.camera.data.viewPerspective = ( | 179 descriptors.data.camera.data.normal = view |
| 144 Perspective(fovV, aspect = GetAspectRatio(), zNear = 0.01, zFar = 20) * | 180 descriptors.data.camera.data.projection = Perspective(PI / 2, aspect = GetAspectRatio(), zNear = 0.01, zFar = 20) |
| 145 Rotate(-camPitch, X) * Rotate(-camYaw, Y) * Translate(-camPos) | |
| 146 ) | |
| 147 | 181 |
| 148 UpdateGPUBuffer(descriptors.data.camera) | 182 UpdateGPUBuffer(descriptors.data.camera) |
| 149 | 183 |
| 150 WithNextFrame(framebuffer, commandbuffer): | 184 WithNextFrame(framebuffer, commandbuffer): |
| 151 | 185 |
