forked from StrafesNET/strafe-project
Compare commits
10 Commits
redo-input
...
load-textu
Author | SHA1 | Date | |
---|---|---|---|
e2f649baae | |||
140e84341d | |||
5c568dec84 | |||
f457acef2f | |||
f9a725b767 | |||
01153fc929 | |||
acb658f3e9 | |||
7e427b3879 | |||
d16485ae6d | |||
cdf695ee6e |
BIN
images/squid.dds
Normal file
BIN
images/squid.dds
Normal file
Binary file not shown.
474
src/main.rs
474
src/main.rs
@ -19,13 +19,13 @@ struct Entity {
|
||||
|
||||
//temp?
|
||||
struct ModelData {
|
||||
transform: glam::Mat4,
|
||||
transforms: Vec<glam::Mat4>,
|
||||
vertex_buf: wgpu::Buffer,
|
||||
entities: Vec<Entity>,
|
||||
}
|
||||
|
||||
struct ModelGraphics {
|
||||
transform: glam::Mat4,
|
||||
transforms: Vec<glam::Mat4>,
|
||||
vertex_buf: wgpu::Buffer,
|
||||
entities: Vec<Entity>,
|
||||
bind_group: wgpu::BindGroup,
|
||||
@ -113,21 +113,29 @@ impl Camera {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Skybox {
|
||||
pub struct GraphicsBindGroups {
|
||||
camera: wgpu::BindGroup,
|
||||
skybox_texture: wgpu::BindGroup,
|
||||
}
|
||||
|
||||
pub struct GraphicsPipelines {
|
||||
skybox: wgpu::RenderPipeline,
|
||||
model: wgpu::RenderPipeline,
|
||||
}
|
||||
|
||||
pub struct GraphicsData {
|
||||
start_time: std::time::Instant,
|
||||
camera: Camera,
|
||||
physics: strafe_client::body::PhysicsState,
|
||||
sky_pipeline: wgpu::RenderPipeline,
|
||||
entity_pipeline: wgpu::RenderPipeline,
|
||||
ground_pipeline: wgpu::RenderPipeline,
|
||||
main_bind_group: wgpu::BindGroup,
|
||||
pipelines: GraphicsPipelines,
|
||||
bind_groups: GraphicsBindGroups,
|
||||
camera_buf: wgpu::Buffer,
|
||||
models: Vec<ModelGraphics>,
|
||||
depth_view: wgpu::TextureView,
|
||||
staging_belt: wgpu::util::StagingBelt,
|
||||
}
|
||||
|
||||
impl Skybox {
|
||||
impl GraphicsData {
|
||||
const DEPTH_FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Depth24Plus;
|
||||
|
||||
fn create_depth_texture(
|
||||
@ -153,14 +161,17 @@ impl Skybox {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_transform_uniform_data(transform:&glam::Mat4) -> [f32; 4*4] {
|
||||
let mut raw = [0f32; 4*4];
|
||||
raw[0..16].copy_from_slice(&AsRef::<[f32; 4*4]>::as_ref(transform)[..]);
|
||||
fn get_transform_uniform_data(transforms:&Vec<glam::Mat4>) -> Vec<f32> {
|
||||
let mut raw = Vec::with_capacity(4*4*transforms.len());
|
||||
for (i,t) in transforms.iter().enumerate(){
|
||||
let mut v = raw.split_off(4*4*i);
|
||||
raw.extend_from_slice(&AsRef::<[f32; 4*4]>::as_ref(t)[..]);
|
||||
raw.append(&mut v);
|
||||
}
|
||||
raw
|
||||
}
|
||||
|
||||
fn add_obj(device:&wgpu::Device,modeldatas:& mut Vec<ModelData>,source:&[u8]){
|
||||
let data = obj::ObjData::load_buf(&source[..]).unwrap();
|
||||
fn add_obj(device:&wgpu::Device,modeldatas:& mut Vec<ModelData>,data:obj::ObjData){
|
||||
let mut vertices = Vec::new();
|
||||
let mut vertex_index = std::collections::HashMap::<obj::IndexTuple,u16>::new();
|
||||
for object in data.objects {
|
||||
@ -202,14 +213,14 @@ fn add_obj(device:&wgpu::Device,modeldatas:& mut Vec<ModelData>,source:&[u8]){
|
||||
usage: wgpu::BufferUsages::VERTEX,
|
||||
});
|
||||
modeldatas.push(ModelData {
|
||||
transform: glam::Mat4::default(),
|
||||
transforms: vec![glam::Mat4::default()],
|
||||
vertex_buf,
|
||||
entities,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl strafe_client::framework::Example for Skybox {
|
||||
impl strafe_client::framework::Example for GraphicsData {
|
||||
fn optional_features() -> wgpu::Features {
|
||||
wgpu::Features::TEXTURE_COMPRESSION_ASTC
|
||||
| wgpu::Features::TEXTURE_COMPRESSION_ETC2
|
||||
@ -223,15 +234,77 @@ impl strafe_client::framework::Example for Skybox {
|
||||
queue: &wgpu::Queue,
|
||||
) -> Self {
|
||||
let mut modeldatas = Vec::<ModelData>::new();
|
||||
add_obj(device,& mut modeldatas,include_bytes!("../models/teslacyberv3.0.obj"));
|
||||
add_obj(device,& mut modeldatas,include_bytes!("../models/suzanne.obj"));
|
||||
add_obj(device,& mut modeldatas,include_bytes!("../models/teapot.obj"));
|
||||
let ground=obj::ObjData{
|
||||
position: vec![[-1.0,0.0,-1.0],[1.0,0.0,-1.0],[1.0,0.0,1.0],[-1.0,0.0,1.0]],
|
||||
texture: vec![[-10.0,-10.0],[10.0,-10.0],[10.0,10.0],[-10.0,10.0]],
|
||||
normal: vec![[0.0,1.0,0.0]],
|
||||
objects: vec![obj::Object{
|
||||
name: "Ground Object".to_owned(),
|
||||
groups: vec![obj::Group{
|
||||
name: "Ground Group".to_owned(),
|
||||
index: 0,
|
||||
material: None,
|
||||
polys: vec![obj::SimplePolygon(vec![
|
||||
obj::IndexTuple(0,Some(0),Some(0)),
|
||||
obj::IndexTuple(1,Some(1),Some(0)),
|
||||
obj::IndexTuple(2,Some(2),Some(0)),
|
||||
obj::IndexTuple(3,Some(3),Some(0)),
|
||||
])]
|
||||
}]
|
||||
}],
|
||||
material_libs: Vec::new(),
|
||||
};
|
||||
add_obj(device,& mut modeldatas,obj::ObjData::load_buf(&include_bytes!("../models/teslacyberv3.0.obj")[..]).unwrap());
|
||||
add_obj(device,& mut modeldatas,obj::ObjData::load_buf(&include_bytes!("../models/suzanne.obj")[..]).unwrap());
|
||||
add_obj(device,& mut modeldatas,obj::ObjData::load_buf(&include_bytes!("../models/teapot.obj")[..]).unwrap());
|
||||
add_obj(device,& mut modeldatas,ground);
|
||||
println!("models.len = {:?}", modeldatas.len());
|
||||
modeldatas[1].transform=glam::Mat4::from_translation(glam::vec3(10.,5.,10.));
|
||||
modeldatas[2].transform=glam::Mat4::from_translation(glam::vec3(-10.,5.,10.));
|
||||
modeldatas[0].transforms[0]=glam::Mat4::from_translation(glam::vec3(10.,0.,-10.));
|
||||
modeldatas[1].transforms[0]=glam::Mat4::from_translation(glam::vec3(10.,5.,10.));
|
||||
modeldatas[1].transforms.push(glam::Mat4::from_translation(glam::vec3(20.,5.,10.)));
|
||||
modeldatas[1].transforms.push(glam::Mat4::from_translation(glam::vec3(10.,5.,20.)));
|
||||
modeldatas[1].transforms.push(glam::Mat4::from_translation(glam::vec3(20.,5.,20.)));
|
||||
modeldatas[2].transforms[0]=glam::Mat4::from_translation(glam::vec3(-10.,5.,10.));
|
||||
modeldatas[3].transforms[0]=glam::Mat4::from_translation(glam::vec3(0.,0.,0.))*glam::Mat4::from_scale(glam::vec3(160.0, 1.0, 160.0));
|
||||
|
||||
let main_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
|
||||
let camera_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
|
||||
label: None,
|
||||
entries: &[
|
||||
wgpu::BindGroupLayoutEntry {
|
||||
binding: 0,
|
||||
visibility: wgpu::ShaderStages::VERTEX,
|
||||
ty: wgpu::BindingType::Buffer {
|
||||
ty: wgpu::BufferBindingType::Uniform,
|
||||
has_dynamic_offset: false,
|
||||
min_binding_size: None,
|
||||
},
|
||||
count: None,
|
||||
},
|
||||
],
|
||||
});
|
||||
let skybox_texture_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
|
||||
label: Some("Skybox Texture Bind Group Layout"),
|
||||
entries: &[
|
||||
wgpu::BindGroupLayoutEntry {
|
||||
binding: 0,
|
||||
visibility: wgpu::ShaderStages::FRAGMENT,
|
||||
ty: wgpu::BindingType::Texture {
|
||||
sample_type: wgpu::TextureSampleType::Float { filterable: true },
|
||||
multisampled: false,
|
||||
view_dimension: wgpu::TextureViewDimension::Cube,
|
||||
},
|
||||
count: None,
|
||||
},
|
||||
wgpu::BindGroupLayoutEntry {
|
||||
binding: 1,
|
||||
visibility: wgpu::ShaderStages::FRAGMENT,
|
||||
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
|
||||
count: None,
|
||||
},
|
||||
],
|
||||
});
|
||||
let model_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
|
||||
label: Some("Model Bind Group Layout"),
|
||||
entries: &[
|
||||
wgpu::BindGroupLayoutEntry {
|
||||
binding: 0,
|
||||
@ -249,7 +322,7 @@ impl strafe_client::framework::Example for Skybox {
|
||||
ty: wgpu::BindingType::Texture {
|
||||
sample_type: wgpu::TextureSampleType::Float { filterable: true },
|
||||
multisampled: false,
|
||||
view_dimension: wgpu::TextureViewDimension::Cube,
|
||||
view_dimension: wgpu::TextureViewDimension::D2,
|
||||
},
|
||||
count: None,
|
||||
},
|
||||
@ -261,20 +334,26 @@ impl strafe_client::framework::Example for Skybox {
|
||||
},
|
||||
],
|
||||
});
|
||||
let model_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
|
||||
label: None,
|
||||
entries: &[
|
||||
wgpu::BindGroupLayoutEntry {
|
||||
binding: 0,
|
||||
visibility: wgpu::ShaderStages::VERTEX,
|
||||
ty: wgpu::BindingType::Buffer {
|
||||
ty: wgpu::BufferBindingType::Uniform,
|
||||
has_dynamic_offset: false,
|
||||
min_binding_size: None,
|
||||
},
|
||||
count: None,
|
||||
},
|
||||
],
|
||||
|
||||
let clamp_sampler = device.create_sampler(&wgpu::SamplerDescriptor {
|
||||
label: Some("Clamp Sampler"),
|
||||
address_mode_u: wgpu::AddressMode::ClampToEdge,
|
||||
address_mode_v: wgpu::AddressMode::ClampToEdge,
|
||||
address_mode_w: wgpu::AddressMode::ClampToEdge,
|
||||
mag_filter: wgpu::FilterMode::Linear,
|
||||
min_filter: wgpu::FilterMode::Linear,
|
||||
mipmap_filter: wgpu::FilterMode::Linear,
|
||||
..Default::default()
|
||||
});
|
||||
let repeat_sampler = device.create_sampler(&wgpu::SamplerDescriptor {
|
||||
label: Some("Repeat Sampler"),
|
||||
address_mode_u: wgpu::AddressMode::Repeat,
|
||||
address_mode_v: wgpu::AddressMode::Repeat,
|
||||
address_mode_w: wgpu::AddressMode::Repeat,
|
||||
mag_filter: wgpu::FilterMode::Linear,
|
||||
min_filter: wgpu::FilterMode::Linear,
|
||||
mipmap_filter: wgpu::FilterMode::Linear,
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
// Create the render pipeline
|
||||
@ -306,22 +385,131 @@ impl strafe_client::framework::Example for Skybox {
|
||||
temp_control_dir: glam::Vec3::ZERO,
|
||||
walkspeed: 18.0,
|
||||
contacts: std::collections::HashSet::new(),
|
||||
models_cringe_clone: modeldatas.iter().map(|m|strafe_client::body::Model::new(m.transform)).collect(),
|
||||
models_cringe_clone: modeldatas.iter().map(|m|m.transforms.iter().map(|t|strafe_client::body::Model::new(*t))).flatten().collect(),
|
||||
walk: strafe_client::body::WalkState::new(),
|
||||
hitbox_halfsize: glam::vec3(1.0,2.5,1.0),
|
||||
};
|
||||
|
||||
let camera_uniforms = camera.to_uniform_data(physics.body.extrapolated_position(0));
|
||||
let camera_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
|
||||
label: Some("Camera"),
|
||||
contents: bytemuck::cast_slice(&camera_uniforms),
|
||||
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
|
||||
});
|
||||
//load textures
|
||||
let device_features = device.features();
|
||||
|
||||
let skybox_texture_view={
|
||||
let skybox_format = if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_ASTC) {
|
||||
log::info!("Using ASTC");
|
||||
wgpu::TextureFormat::Astc {
|
||||
block: AstcBlock::B4x4,
|
||||
channel: AstcChannel::UnormSrgb,
|
||||
}
|
||||
} else if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_ETC2) {
|
||||
log::info!("Using ETC2");
|
||||
wgpu::TextureFormat::Etc2Rgb8UnormSrgb
|
||||
} else if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_BC) {
|
||||
log::info!("Using BC");
|
||||
wgpu::TextureFormat::Bc1RgbaUnormSrgb
|
||||
} else {
|
||||
log::info!("Using plain");
|
||||
wgpu::TextureFormat::Bgra8UnormSrgb
|
||||
};
|
||||
|
||||
let size = wgpu::Extent3d {
|
||||
width: IMAGE_SIZE,
|
||||
height: IMAGE_SIZE,
|
||||
depth_or_array_layers: 6,
|
||||
};
|
||||
|
||||
let layer_size = wgpu::Extent3d {
|
||||
depth_or_array_layers: 1,
|
||||
..size
|
||||
};
|
||||
let max_mips = layer_size.max_mips(wgpu::TextureDimension::D2);
|
||||
|
||||
log::debug!(
|
||||
"Copying {:?} skybox images of size {}, {}, 6 with {} mips to gpu",
|
||||
skybox_format,
|
||||
IMAGE_SIZE,
|
||||
IMAGE_SIZE,
|
||||
max_mips,
|
||||
);
|
||||
|
||||
let bytes = match skybox_format {
|
||||
wgpu::TextureFormat::Astc {
|
||||
block: AstcBlock::B4x4,
|
||||
channel: AstcChannel::UnormSrgb,
|
||||
} => &include_bytes!("../images/astc.dds")[..],
|
||||
wgpu::TextureFormat::Etc2Rgb8UnormSrgb => &include_bytes!("../images/etc2.dds")[..],
|
||||
wgpu::TextureFormat::Bc1RgbaUnormSrgb => &include_bytes!("../images/bc1.dds")[..],
|
||||
wgpu::TextureFormat::Bgra8UnormSrgb => &include_bytes!("../images/bgra.dds")[..],
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let skybox_image = ddsfile::Dds::read(&mut std::io::Cursor::new(&bytes)).unwrap();
|
||||
|
||||
let skybox_texture = device.create_texture_with_data(
|
||||
queue,
|
||||
&wgpu::TextureDescriptor {
|
||||
size,
|
||||
mip_level_count: max_mips,
|
||||
sample_count: 1,
|
||||
dimension: wgpu::TextureDimension::D2,
|
||||
format: skybox_format,
|
||||
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
|
||||
label: Some("Skybox Texture"),
|
||||
view_formats: &[],
|
||||
},
|
||||
&skybox_image.data,
|
||||
);
|
||||
|
||||
skybox_texture.create_view(&wgpu::TextureViewDescriptor {
|
||||
label: Some("Skybox Texture View"),
|
||||
dimension: Some(wgpu::TextureViewDimension::Cube),
|
||||
..wgpu::TextureViewDescriptor::default()
|
||||
})
|
||||
};
|
||||
|
||||
//squid
|
||||
let squid_texture_view={
|
||||
let size = wgpu::Extent3d {
|
||||
width: 1076,
|
||||
height: 1076,
|
||||
depth_or_array_layers: 1,
|
||||
};
|
||||
|
||||
let layer_size = wgpu::Extent3d {
|
||||
depth_or_array_layers: 1,
|
||||
..size
|
||||
};
|
||||
let max_mips = layer_size.max_mips(wgpu::TextureDimension::D2);
|
||||
|
||||
let bytes = &include_bytes!("../images/squid.dds")[..];
|
||||
|
||||
let image = ddsfile::Dds::read(&mut std::io::Cursor::new(&bytes)).unwrap();
|
||||
|
||||
let texture = device.create_texture_with_data(
|
||||
queue,
|
||||
&wgpu::TextureDescriptor {
|
||||
size,
|
||||
mip_level_count: max_mips,
|
||||
sample_count: 1,
|
||||
dimension: wgpu::TextureDimension::D2,
|
||||
format: wgpu::TextureFormat::Bc7RgbaUnorm,
|
||||
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
|
||||
label: Some("Squid Texture"),
|
||||
view_formats: &[],
|
||||
},
|
||||
&image.data,
|
||||
);
|
||||
|
||||
texture.create_view(&wgpu::TextureViewDescriptor {
|
||||
label: Some("Squid Texture View"),
|
||||
dimension: Some(wgpu::TextureViewDimension::D2),
|
||||
..wgpu::TextureViewDescriptor::default()
|
||||
})
|
||||
};
|
||||
|
||||
//drain the modeldata vec so entities can be /moved/ to models.entities
|
||||
let mut models = Vec::<ModelGraphics>::with_capacity(modeldatas.len());
|
||||
for (i,modeldata) in modeldatas.drain(..).enumerate() {
|
||||
let model_uniforms = get_transform_uniform_data(&modeldata.transform);
|
||||
let model_uniforms = get_transform_uniform_data(&modeldata.transforms);
|
||||
let model_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
|
||||
label: Some(format!("ModelGraphics{}",i).as_str()),
|
||||
contents: bytemuck::cast_slice(&model_uniforms),
|
||||
@ -334,12 +522,20 @@ impl strafe_client::framework::Example for Skybox {
|
||||
binding: 0,
|
||||
resource: model_buf.as_entire_binding(),
|
||||
},
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 1,
|
||||
resource: wgpu::BindingResource::TextureView(&squid_texture_view),
|
||||
},
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 2,
|
||||
resource: wgpu::BindingResource::Sampler(&repeat_sampler),
|
||||
},
|
||||
],
|
||||
label: Some(format!("ModelGraphics{}",i).as_str()),
|
||||
});
|
||||
//all of these are being moved here
|
||||
models.push(ModelGraphics{
|
||||
transform: modeldata.transform,
|
||||
transforms: modeldata.transforms,
|
||||
vertex_buf:modeldata.vertex_buf,
|
||||
entities: modeldata.entities,
|
||||
bind_group: model_bind_group,
|
||||
@ -349,13 +545,17 @@ impl strafe_client::framework::Example for Skybox {
|
||||
|
||||
let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
|
||||
label: None,
|
||||
bind_group_layouts: &[&main_bind_group_layout, &model_bind_group_layout],
|
||||
bind_group_layouts: &[
|
||||
&camera_bind_group_layout,
|
||||
&model_bind_group_layout,
|
||||
&skybox_texture_bind_group_layout,
|
||||
],
|
||||
push_constant_ranges: &[],
|
||||
});
|
||||
|
||||
// Create the render pipelines
|
||||
let sky_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
|
||||
label: Some("Sky"),
|
||||
label: Some("Sky Pipeline"),
|
||||
layout: Some(&pipeline_layout),
|
||||
vertex: wgpu::VertexState {
|
||||
module: &shader,
|
||||
@ -381,12 +581,12 @@ impl strafe_client::framework::Example for Skybox {
|
||||
multisample: wgpu::MultisampleState::default(),
|
||||
multiview: None,
|
||||
});
|
||||
let entity_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
|
||||
label: Some("Entity"),
|
||||
let model_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
|
||||
label: Some("Model Pipeline"),
|
||||
layout: Some(&pipeline_layout),
|
||||
vertex: wgpu::VertexState {
|
||||
module: &shader,
|
||||
entry_point: "vs_entity",
|
||||
entry_point: "vs_entity_texture",
|
||||
buffers: &[wgpu::VertexBufferLayout {
|
||||
array_stride: std::mem::size_of::<Vertex>() as wgpu::BufferAddress,
|
||||
step_mode: wgpu::VertexStepMode::Vertex,
|
||||
@ -395,34 +595,7 @@ impl strafe_client::framework::Example for Skybox {
|
||||
},
|
||||
fragment: Some(wgpu::FragmentState {
|
||||
module: &shader,
|
||||
entry_point: "fs_entity",
|
||||
targets: &[Some(config.view_formats[0].into())],
|
||||
}),
|
||||
primitive: wgpu::PrimitiveState {
|
||||
front_face: wgpu::FrontFace::Cw,
|
||||
..Default::default()
|
||||
},
|
||||
depth_stencil: Some(wgpu::DepthStencilState {
|
||||
format: Self::DEPTH_FORMAT,
|
||||
depth_write_enabled: true,
|
||||
depth_compare: wgpu::CompareFunction::LessEqual,
|
||||
stencil: wgpu::StencilState::default(),
|
||||
bias: wgpu::DepthBiasState::default(),
|
||||
}),
|
||||
multisample: wgpu::MultisampleState::default(),
|
||||
multiview: None,
|
||||
});
|
||||
let ground_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
|
||||
label: Some("Ground"),
|
||||
layout: Some(&pipeline_layout),
|
||||
vertex: wgpu::VertexState {
|
||||
module: &shader,
|
||||
entry_point: "vs_ground",
|
||||
buffers: &[],
|
||||
},
|
||||
fragment: Some(wgpu::FragmentState {
|
||||
module: &shader,
|
||||
entry_point: "fs_ground",
|
||||
entry_point: "fs_entity_texture",
|
||||
targets: &[Some(config.view_formats[0].into())],
|
||||
}),
|
||||
primitive: wgpu::PrimitiveState {
|
||||
@ -440,118 +613,51 @@ impl strafe_client::framework::Example for Skybox {
|
||||
multiview: None,
|
||||
});
|
||||
|
||||
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
|
||||
label: None,
|
||||
address_mode_u: wgpu::AddressMode::ClampToEdge,
|
||||
address_mode_v: wgpu::AddressMode::ClampToEdge,
|
||||
address_mode_w: wgpu::AddressMode::ClampToEdge,
|
||||
mag_filter: wgpu::FilterMode::Linear,
|
||||
min_filter: wgpu::FilterMode::Linear,
|
||||
mipmap_filter: wgpu::FilterMode::Linear,
|
||||
..Default::default()
|
||||
let camera_uniforms = camera.to_uniform_data(physics.body.extrapolated_position(0));
|
||||
let camera_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
|
||||
label: Some("Camera"),
|
||||
contents: bytemuck::cast_slice(&camera_uniforms),
|
||||
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
|
||||
});
|
||||
|
||||
let device_features = device.features();
|
||||
|
||||
let skybox_format = if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_ASTC) {
|
||||
log::info!("Using ASTC");
|
||||
wgpu::TextureFormat::Astc {
|
||||
block: AstcBlock::B4x4,
|
||||
channel: AstcChannel::UnormSrgb,
|
||||
}
|
||||
} else if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_ETC2) {
|
||||
log::info!("Using ETC2");
|
||||
wgpu::TextureFormat::Etc2Rgb8UnormSrgb
|
||||
} else if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_BC) {
|
||||
log::info!("Using BC");
|
||||
wgpu::TextureFormat::Bc1RgbaUnormSrgb
|
||||
} else {
|
||||
log::info!("Using plain");
|
||||
wgpu::TextureFormat::Bgra8UnormSrgb
|
||||
};
|
||||
|
||||
let size = wgpu::Extent3d {
|
||||
width: IMAGE_SIZE,
|
||||
height: IMAGE_SIZE,
|
||||
depth_or_array_layers: 6,
|
||||
};
|
||||
|
||||
let layer_size = wgpu::Extent3d {
|
||||
depth_or_array_layers: 1,
|
||||
..size
|
||||
};
|
||||
let max_mips = layer_size.max_mips(wgpu::TextureDimension::D2);
|
||||
|
||||
log::debug!(
|
||||
"Copying {:?} skybox images of size {}, {}, 6 with {} mips to gpu",
|
||||
skybox_format,
|
||||
IMAGE_SIZE,
|
||||
IMAGE_SIZE,
|
||||
max_mips,
|
||||
);
|
||||
|
||||
let bytes = match skybox_format {
|
||||
wgpu::TextureFormat::Astc {
|
||||
block: AstcBlock::B4x4,
|
||||
channel: AstcChannel::UnormSrgb,
|
||||
} => &include_bytes!("../images/astc.dds")[..],
|
||||
wgpu::TextureFormat::Etc2Rgb8UnormSrgb => &include_bytes!("../images/etc2.dds")[..],
|
||||
wgpu::TextureFormat::Bc1RgbaUnormSrgb => &include_bytes!("../images/bc1.dds")[..],
|
||||
wgpu::TextureFormat::Bgra8UnormSrgb => &include_bytes!("../images/bgra.dds")[..],
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let image = ddsfile::Dds::read(&mut std::io::Cursor::new(&bytes)).unwrap();
|
||||
|
||||
let texture = device.create_texture_with_data(
|
||||
queue,
|
||||
&wgpu::TextureDescriptor {
|
||||
size,
|
||||
mip_level_count: max_mips,
|
||||
sample_count: 1,
|
||||
dimension: wgpu::TextureDimension::D2,
|
||||
format: skybox_format,
|
||||
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
|
||||
label: None,
|
||||
view_formats: &[],
|
||||
},
|
||||
&image.data,
|
||||
);
|
||||
|
||||
let texture_view = texture.create_view(&wgpu::TextureViewDescriptor {
|
||||
label: None,
|
||||
dimension: Some(wgpu::TextureViewDimension::Cube),
|
||||
..wgpu::TextureViewDescriptor::default()
|
||||
});
|
||||
let main_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
|
||||
layout: &main_bind_group_layout,
|
||||
let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
|
||||
layout: &camera_bind_group_layout,
|
||||
entries: &[
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 0,
|
||||
resource: camera_buf.as_entire_binding(),
|
||||
},
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 1,
|
||||
resource: wgpu::BindingResource::TextureView(&texture_view),
|
||||
},
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 2,
|
||||
resource: wgpu::BindingResource::Sampler(&sampler),
|
||||
},
|
||||
],
|
||||
label: Some("Camera"),
|
||||
});
|
||||
let skybox_texture_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
|
||||
layout: &skybox_texture_bind_group_layout,
|
||||
entries: &[
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 0,
|
||||
resource: wgpu::BindingResource::TextureView(&skybox_texture_view),
|
||||
},
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 1,
|
||||
resource: wgpu::BindingResource::Sampler(&clamp_sampler),
|
||||
},
|
||||
],
|
||||
label: Some("Sky Texture"),
|
||||
});
|
||||
|
||||
let depth_view = Self::create_depth_texture(config, device);
|
||||
|
||||
Skybox {
|
||||
GraphicsData {
|
||||
start_time: Instant::now(),
|
||||
camera,
|
||||
physics,
|
||||
sky_pipeline,
|
||||
entity_pipeline,
|
||||
ground_pipeline,
|
||||
main_bind_group,
|
||||
pipelines:GraphicsPipelines{
|
||||
skybox:sky_pipeline,
|
||||
model:model_pipeline
|
||||
},
|
||||
bind_groups:GraphicsBindGroups{
|
||||
camera:camera_bind_group,
|
||||
skybox_texture:skybox_texture_bind_group,
|
||||
},
|
||||
camera_buf,
|
||||
models,
|
||||
depth_view,
|
||||
@ -683,7 +789,7 @@ impl strafe_client::framework::Example for Skybox {
|
||||
.copy_from_slice(bytemuck::cast_slice(&camera_uniforms));
|
||||
//This code only needs to run when the uniforms change
|
||||
for model in self.models.iter() {
|
||||
let model_uniforms = get_transform_uniform_data(&model.transform);
|
||||
let model_uniforms = get_transform_uniform_data(&model.transforms);
|
||||
self.staging_belt
|
||||
.write_buffer(
|
||||
&mut encoder,
|
||||
@ -722,25 +828,21 @@ impl strafe_client::framework::Example for Skybox {
|
||||
}),
|
||||
});
|
||||
|
||||
rpass.set_bind_group(0, &self.main_bind_group, &[]);
|
||||
rpass.set_bind_group(0, &self.bind_groups.camera, &[]);
|
||||
rpass.set_bind_group(2, &self.bind_groups.skybox_texture, &[]);
|
||||
|
||||
rpass.set_pipeline(&self.entity_pipeline);
|
||||
rpass.set_pipeline(&self.pipelines.model);
|
||||
for model in self.models.iter() {
|
||||
rpass.set_bind_group(1, &model.bind_group, &[]);
|
||||
rpass.set_vertex_buffer(0, model.vertex_buf.slice(..));
|
||||
|
||||
for entity in model.entities.iter() {
|
||||
rpass.set_index_buffer(entity.index_buf.slice(..), wgpu::IndexFormat::Uint16);
|
||||
rpass.draw_indexed(0..entity.index_count, 0, 0..1);
|
||||
rpass.draw_indexed(0..entity.index_count, 0, 0..model.transforms.len() as u32);
|
||||
}
|
||||
}
|
||||
|
||||
rpass.set_pipeline(&self.ground_pipeline);
|
||||
//rpass.set_index_buffer(&[0u16,1,2,1,2,3][..] as wgpu::BufferSlice, wgpu::IndexFormat::Uint16);
|
||||
//rpass.draw_indexed(0..4, 0, 0..1);
|
||||
rpass.draw(0..6, 0..1);
|
||||
|
||||
rpass.set_pipeline(&self.sky_pipeline);
|
||||
rpass.set_pipeline(&self.pipelines.skybox);
|
||||
rpass.draw(0..3, 0..1);
|
||||
}
|
||||
|
||||
@ -751,7 +853,7 @@ impl strafe_client::framework::Example for Skybox {
|
||||
}
|
||||
|
||||
fn main() {
|
||||
strafe_client::framework::run::<Skybox>(
|
||||
strafe_client::framework::run::<GraphicsData>(
|
||||
format!("Strafe Client v{}",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
).as_str()
|
||||
|
120
src/shader.wgsl
120
src/shader.wgsl
@ -1,9 +1,4 @@
|
||||
struct SkyOutput {
|
||||
@builtin(position) position: vec4<f32>,
|
||||
@location(0) sampledir: vec3<f32>,
|
||||
};
|
||||
|
||||
struct Data {
|
||||
struct Camera {
|
||||
// from camera to screen
|
||||
proj: mat4x4<f32>,
|
||||
// from screen to camera
|
||||
@ -13,9 +8,16 @@ struct Data {
|
||||
// camera position
|
||||
cam_pos: vec4<f32>,
|
||||
};
|
||||
|
||||
//group 0 is the camera
|
||||
@group(0)
|
||||
@binding(0)
|
||||
var<uniform> r_data: Data;
|
||||
var<uniform> camera: Camera;
|
||||
|
||||
struct SkyOutput {
|
||||
@builtin(position) position: vec4<f32>,
|
||||
@location(0) sampledir: vec3<f32>,
|
||||
};
|
||||
|
||||
@vertex
|
||||
fn vs_sky(@builtin(vertex_index) vertex_index: u32) -> SkyOutput {
|
||||
@ -30,8 +32,8 @@ fn vs_sky(@builtin(vertex_index) vertex_index: u32) -> SkyOutput {
|
||||
);
|
||||
|
||||
// transposition = inversion for this orthonormal matrix
|
||||
let inv_model_view = transpose(mat3x3<f32>(r_data.view[0].xyz, r_data.view[1].xyz, r_data.view[2].xyz));
|
||||
let unprojected = r_data.proj_inv * pos;
|
||||
let inv_model_view = transpose(mat3x3<f32>(camera.view[0].xyz, camera.view[1].xyz, camera.view[2].xyz));
|
||||
let unprojected = camera.proj_inv * pos;
|
||||
|
||||
var result: SkyOutput;
|
||||
result.sampledir = inv_model_view * unprojected.xyz;
|
||||
@ -39,93 +41,65 @@ fn vs_sky(@builtin(vertex_index) vertex_index: u32) -> SkyOutput {
|
||||
return result;
|
||||
}
|
||||
|
||||
struct GroundOutput {
|
||||
@builtin(position) position: vec4<f32>,
|
||||
@location(4) pos: vec3<f32>,
|
||||
};
|
||||
const MAX_ENTITY_INSTANCES=1024;
|
||||
//group 1 is the model
|
||||
@group(1)
|
||||
@binding(0)
|
||||
var<uniform> entity_transforms: array<mat4x4<f32>,MAX_ENTITY_INSTANCES>;
|
||||
//var<uniform> entity_texture_transforms: array<mat3x3<f32>,MAX_ENTITY_INSTANCES>;
|
||||
//my fancy idea is to create a megatexture for each model that includes all the textures each intance will need
|
||||
//the texture transform then maps the texture coordinates to the location of the specific texture
|
||||
//how to do no texture?
|
||||
@group(1)
|
||||
@binding(1)
|
||||
var model_texture: texture_2d<f32>;
|
||||
@group(1)
|
||||
@binding(2)
|
||||
var model_sampler: sampler;
|
||||
|
||||
@vertex
|
||||
fn vs_ground(@builtin(vertex_index) vertex_index: u32) -> GroundOutput {
|
||||
// hacky way to draw two triangles that make a square
|
||||
let tmp1 = i32(vertex_index)/2-i32(vertex_index)/3;
|
||||
let tmp2 = i32(vertex_index)&1;
|
||||
let pos = vec3<f32>(
|
||||
f32(tmp1) * 2.0 - 1.0,
|
||||
0.0,
|
||||
f32(tmp2) * 2.0 - 1.0
|
||||
) * 160.0;
|
||||
|
||||
var result: GroundOutput;
|
||||
result.pos = pos;
|
||||
result.position = r_data.proj * r_data.view * vec4<f32>(pos, 1.0);
|
||||
return result;
|
||||
}
|
||||
|
||||
struct EntityOutput {
|
||||
struct EntityOutputTexture {
|
||||
@builtin(position) position: vec4<f32>,
|
||||
@location(1) texture: vec2<f32>,
|
||||
@location(2) normal: vec3<f32>,
|
||||
@location(3) view: vec3<f32>,
|
||||
};
|
||||
|
||||
@group(1)
|
||||
@binding(0)
|
||||
var<uniform> r_EntityTransform: mat4x4<f32>;
|
||||
|
||||
@vertex
|
||||
fn vs_entity(
|
||||
fn vs_entity_texture(
|
||||
@builtin(instance_index) instance: u32,
|
||||
@location(0) pos: vec3<f32>,
|
||||
@location(1) texture: vec2<f32>,
|
||||
@location(2) normal: vec3<f32>,
|
||||
) -> EntityOutput {
|
||||
var position: vec4<f32> = r_EntityTransform * vec4<f32>(pos, 1.0);
|
||||
var result: EntityOutput;
|
||||
result.normal = (r_EntityTransform * vec4<f32>(normal, 0.0)).xyz;
|
||||
result.texture=texture;
|
||||
result.view = position.xyz - r_data.cam_pos.xyz;
|
||||
result.position = r_data.proj * r_data.view * position;
|
||||
) -> EntityOutputTexture {
|
||||
var position: vec4<f32> = entity_transforms[instance] * vec4<f32>(pos, 1.0);
|
||||
var result: EntityOutputTexture;
|
||||
result.normal = (entity_transforms[instance] * vec4<f32>(normal, 0.0)).xyz;
|
||||
result.texture=texture;//(entity_texture_transforms[instance] * vec3<f32>(texture, 1.0)).xy;
|
||||
result.view = position.xyz - camera.cam_pos.xyz;
|
||||
result.position = camera.proj * camera.view * position;
|
||||
return result;
|
||||
}
|
||||
|
||||
@group(0)
|
||||
//group 2 is the skybox texture
|
||||
@group(2)
|
||||
@binding(0)
|
||||
var cube_texture: texture_cube<f32>;
|
||||
@group(2)
|
||||
@binding(1)
|
||||
var r_texture: texture_cube<f32>;
|
||||
@group(0)
|
||||
@binding(2)
|
||||
var r_sampler: sampler;
|
||||
var cube_sampler: sampler;
|
||||
|
||||
@fragment
|
||||
fn fs_sky(vertex: SkyOutput) -> @location(0) vec4<f32> {
|
||||
return textureSample(r_texture, r_sampler, vertex.sampledir);
|
||||
return textureSample(cube_texture, model_sampler, vertex.sampledir);
|
||||
}
|
||||
|
||||
@fragment
|
||||
fn fs_entity(vertex: EntityOutput) -> @location(0) vec4<f32> {
|
||||
fn fs_entity_texture(vertex: EntityOutputTexture) -> @location(0) vec4<f32> {
|
||||
let incident = normalize(vertex.view);
|
||||
let normal = normalize(vertex.normal);
|
||||
let d = dot(normal, incident);
|
||||
let reflected = incident - 2.0 * d * normal;
|
||||
|
||||
let dir = vec3<f32>(-1.0)+2.0*vec3<f32>(vertex.texture.x,0.0,vertex.texture.y);
|
||||
let texture_color = textureSample(r_texture, r_sampler, dir).rgb;
|
||||
let reflected_color = textureSample(r_texture, r_sampler, reflected).rgb;
|
||||
return vec4<f32>(mix(vec3<f32>(0.1) + 0.5 * reflected_color,texture_color,1.0-pow(1.0-abs(d),2.0)), 1.0);
|
||||
}
|
||||
|
||||
fn modulo_euclidean (a: f32, b: f32) -> f32 {
|
||||
var m = a % b;
|
||||
if (m < 0.0) {
|
||||
if (b < 0.0) {
|
||||
m -= b;
|
||||
} else {
|
||||
m += b;
|
||||
}
|
||||
}
|
||||
return m;
|
||||
}
|
||||
|
||||
@fragment
|
||||
fn fs_ground(vertex: GroundOutput) -> @location(0) vec4<f32> {
|
||||
let dir = vec3<f32>(-1.0)+vec3<f32>(modulo_euclidean(vertex.pos.x/16.,1.0),0.0,modulo_euclidean(vertex.pos.z/16.,1.0))*2.0;
|
||||
return vec4<f32>(textureSample(r_texture, r_sampler, dir).rgb, 1.0);
|
||||
let fragment_color = textureSample(model_texture, model_sampler, vertex.texture).rgb;
|
||||
let reflected_color = textureSample(cube_texture, cube_sampler, reflected).rgb;
|
||||
return vec4<f32>(mix(vec3<f32>(0.1) + 0.5 * reflected_color,fragment_color,1.0-pow(1.0-abs(d),2.0)), 1.0);
|
||||
}
|
||||
|
Reference in New Issue
Block a user