strafe-client-jed/src/shader.wgsl

111 lines
3.1 KiB
WebGPU Shading Language
Raw Normal View History

struct Camera {
2023-09-06 21:39:44 +00:00
// from camera to screen
proj: mat4x4<f32>,
// from screen to camera
proj_inv: mat4x4<f32>,
// from world to camera
view: mat4x4<f32>,
// camera position
cam_pos: vec4<f32>,
2023-08-30 01:20:58 +00:00
};
//group 0 is the camera
2023-08-30 01:20:58 +00:00
@group(0)
@binding(0)
var<uniform> camera: Camera;
struct SkyOutput {
@builtin(position) position: vec4<f32>,
@location(0) sampledir: vec3<f32>,
};
2023-08-30 01:20:58 +00:00
@vertex
fn vs_sky(@builtin(vertex_index) vertex_index: u32) -> SkyOutput {
2023-09-06 21:39:44 +00:00
// hacky way to draw a large triangle
let tmp1 = i32(vertex_index) / 2;
let tmp2 = i32(vertex_index) & 1;
let pos = vec4<f32>(
f32(tmp1) * 4.0 - 1.0,
f32(tmp2) * 4.0 - 1.0,
1.0,
1.0
);
// transposition = inversion for this orthonormal matrix
let inv_model_view = transpose(mat3x3<f32>(camera.view[0].xyz, camera.view[1].xyz, camera.view[2].xyz));
let unprojected = camera.proj_inv * pos;
2023-09-06 21:39:44 +00:00
var result: SkyOutput;
result.sampledir = inv_model_view * unprojected.xyz;
result.position = pos;
return result;
2023-08-30 01:20:58 +00:00
}
2023-09-21 20:02:01 +00:00
struct ModelInstance{
model_transform:mat4x4<f32>,
2023-09-21 20:02:01 +00:00
color:vec4<f32>,
}
//my fancy idea is to create a megatexture for each model that includes all the textures each intance will need
//the texture transform then maps the texture coordinates to the location of the specific texture
//group 1 is the model
const MAX_MODEL_INSTANCES=4096;
2023-09-29 01:28:10 +00:00
@group(2)
@binding(0)
var<uniform> model_instances: array<ModelInstance, MAX_MODEL_INSTANCES>;
2023-09-29 01:28:10 +00:00
@group(2)
@binding(1)
var model_texture: texture_2d<f32>;
2023-09-29 01:28:10 +00:00
@group(2)
@binding(2)
var model_sampler: sampler;
struct EntityOutputTexture {
2023-09-06 21:39:44 +00:00
@builtin(position) position: vec4<f32>,
@location(1) texture: vec2<f32>,
@location(2) normal: vec3<f32>,
@location(3) view: vec3<f32>,
2023-09-21 20:02:01 +00:00
@location(4) color: vec4<f32>,
2023-08-30 01:20:58 +00:00
};
@vertex
fn vs_entity_texture(
2023-09-20 20:03:25 +00:00
@builtin(instance_index) instance: u32,
2023-09-06 21:39:44 +00:00
@location(0) pos: vec3<f32>,
@location(1) texture: vec2<f32>,
@location(2) normal: vec3<f32>,
2023-09-21 20:02:01 +00:00
@location(3) color: vec4<f32>,
) -> EntityOutputTexture {
var position: vec4<f32> = model_instances[instance].model_transform * vec4<f32>(pos, 1.0);
var result: EntityOutputTexture;
result.normal = (model_instances[instance].model_transform * vec4<f32>(normal, 0.0)).xyz;
2023-09-27 03:23:42 +00:00
result.texture = texture;
result.color = model_instances[instance].color * color;
result.view = position.xyz - camera.cam_pos.xyz;
result.position = camera.proj * camera.view * position;
2023-09-06 21:39:44 +00:00
return result;
2023-08-30 01:20:58 +00:00
}
//group 2 is the skybox texture
2023-09-29 01:28:10 +00:00
@group(1)
@binding(0)
var cube_texture: texture_cube<f32>;
2023-09-29 01:28:10 +00:00
@group(1)
2023-08-30 01:20:58 +00:00
@binding(1)
var cube_sampler: sampler;
2023-08-30 01:20:58 +00:00
@fragment
fn fs_sky(vertex: SkyOutput) -> @location(0) vec4<f32> {
2023-09-29 01:28:10 +00:00
return textureSample(cube_texture, cube_sampler, vertex.sampledir);
2023-08-30 01:20:58 +00:00
}
@fragment
fn fs_entity_texture(vertex: EntityOutputTexture) -> @location(0) vec4<f32> {
2023-09-06 21:39:44 +00:00
let incident = normalize(vertex.view);
let normal = normalize(vertex.normal);
let d = dot(normal, incident);
let reflected = incident - 2.0 * d * normal;
2023-09-21 20:02:01 +00:00
let fragment_color = textureSample(model_texture, model_sampler, vertex.texture)*vertex.color;
let reflected_color = textureSample(cube_texture, cube_sampler, reflected).rgb;
2023-09-29 02:58:54 +00:00
return mix(vec4<f32>(vec3<f32>(0.05) + 0.2 * reflected_color,1.0),fragment_color,1.0-pow(1.0-abs(d),2.0));
2023-08-30 01:20:58 +00:00
}