Compare commits

...

9 Commits

Author SHA1 Message Date
69c7896011 binding group per model 2023-09-05 14:58:55 -07:00
1a1509f315 try one entity, convert to mat4x4 2023-09-05 14:58:55 -07:00
87f674238d add model_bind_group 2023-09-05 14:58:55 -07:00
cce9800045 no fragment 2023-09-05 14:58:55 -07:00
8792b8e782 add entity CFrames 2023-09-05 14:58:55 -07:00
c9cb22be68 comment on value 2023-09-05 14:58:55 -07:00
d4172f5040 rename uniform_buf to camera_buf 2023-09-05 14:58:55 -07:00
df43dc6f6e add suzanne 2023-09-05 14:58:55 -07:00
c3290e2ce6 macro version into window title 2023-09-05 12:23:26 -07:00
3 changed files with 2735 additions and 63 deletions

2580
models/suzanne.obj Normal file

File diff suppressed because it is too large Load Diff

View File

@ -16,6 +16,19 @@ struct Entity {
vertex_buf: wgpu::Buffer, vertex_buf: wgpu::Buffer,
} }
//temp?
struct ModelData {
transform: glam::Affine3A,
entities: Vec<Entity>,
}
struct Model {
transform: glam::Affine3A,
entities: Vec<Entity>,
bind_group: wgpu::BindGroup,
model_buf: wgpu::Buffer,
}
// Note: we use the Y=up coordinate space in this example. // Note: we use the Y=up coordinate space in this example.
struct Camera { struct Camera {
time: Instant, time: Instant,
@ -100,7 +113,7 @@ impl Camera {
raw[16..32].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&proj_inv)[..]); raw[16..32].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&proj_inv)[..]);
raw[32..48].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&view)[..]); raw[32..48].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&view)[..]);
raw[48..51].copy_from_slice(AsRef::<[f32; 3]>::as_ref(&self.pos)); raw[48..51].copy_from_slice(AsRef::<[f32; 3]>::as_ref(&self.pos));
raw[51] = 1.0; raw[51] = 1.0;//cam_pos is vec4
raw raw
} }
} }
@ -110,9 +123,9 @@ pub struct Skybox {
sky_pipeline: wgpu::RenderPipeline, sky_pipeline: wgpu::RenderPipeline,
entity_pipeline: wgpu::RenderPipeline, entity_pipeline: wgpu::RenderPipeline,
ground_pipeline: wgpu::RenderPipeline, ground_pipeline: wgpu::RenderPipeline,
bind_group: wgpu::BindGroup, main_bind_group: wgpu::BindGroup,
uniform_buf: wgpu::Buffer, camera_buf: wgpu::Buffer,
entities: Vec<Entity>, models: Vec<Model>,
depth_view: wgpu::TextureView, depth_view: wgpu::TextureView,
staging_belt: wgpu::util::StagingBelt, staging_belt: wgpu::util::StagingBelt,
} }
@ -143,6 +156,48 @@ impl Skybox {
} }
} }
fn get_transform_uniform_data(transform:&glam::Affine3A) -> [f32; 4*4] {
let mut raw = [0f32; 4*4];
raw[0..16].copy_from_slice(&AsRef::<[f32; 4*4]>::as_ref(&glam::Mat4::from(*transform))[..]);
raw
}
fn add_obj(device:&wgpu::Device,modeldatas:& mut Vec<ModelData>,source:&[u8]){
let data = obj::ObjData::load_buf(&source[..]).unwrap();
let mut vertices = Vec::new();
for object in data.objects {
let mut entities = Vec::<Entity>::new();
for group in object.groups {
vertices.clear();
for poly in group.polys {
for end_index in 2..poly.0.len() {
for &index in &[0, end_index - 1, end_index] {
let obj::IndexTuple(position_id, _texture_id, normal_id) =
poly.0[index];
vertices.push(Vertex {
pos: data.position[position_id],
normal: data.normal[normal_id.unwrap()],
})
}
}
}
let vertex_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Vertex"),
contents: bytemuck::cast_slice(&vertices),
usage: wgpu::BufferUsages::VERTEX,
});
entities.push(Entity {
vertex_count: vertices.len() as u32,
vertex_buf,
});
}
modeldatas.push(ModelData {
transform: glam::Affine3A::default(),
entities,
})
}
}
impl strafe_client::framework::Example for Skybox { impl strafe_client::framework::Example for Skybox {
fn optional_features() -> wgpu::Features { fn optional_features() -> wgpu::Features {
wgpu::Features::TEXTURE_COMPRESSION_ASTC wgpu::Features::TEXTURE_COMPRESSION_ASTC
@ -156,45 +211,18 @@ impl strafe_client::framework::Example for Skybox {
device: &wgpu::Device, device: &wgpu::Device,
queue: &wgpu::Queue, queue: &wgpu::Queue,
) -> Self { ) -> Self {
let mut entities = Vec::new(); let mut modeldatas = Vec::<ModelData>::new();
{ add_obj(device,& mut modeldatas,include_bytes!("../models/teslacyberv3.0.obj"));
let source = include_bytes!("../models/teslacyberv3.0.obj"); add_obj(device,& mut modeldatas,include_bytes!("../models/suzanne.obj"));
let data = obj::ObjData::load_buf(&source[..]).unwrap(); println!("models.len = {:?}", modeldatas.len());
let mut vertices = Vec::new(); modeldatas[1].transform=glam::Affine3A::from_translation(glam::vec3(10.,5.,10.));
for object in data.objects {
for group in object.groups {
vertices.clear();
for poly in group.polys {
for end_index in 2..poly.0.len() {
for &index in &[0, end_index - 1, end_index] {
let obj::IndexTuple(position_id, _texture_id, normal_id) =
poly.0[index];
vertices.push(Vertex {
pos: data.position[position_id],
normal: data.normal[normal_id.unwrap()],
})
}
}
}
let vertex_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Vertex"),
contents: bytemuck::cast_slice(&vertices),
usage: wgpu::BufferUsages::VERTEX,
});
entities.push(Entity {
vertex_count: vertices.len() as u32,
vertex_buf,
});
}
}
}
let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { let main_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: None, label: None,
entries: &[ entries: &[
wgpu::BindGroupLayoutEntry { wgpu::BindGroupLayoutEntry {
binding: 0, binding: 0,
visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT, visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer { ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform, ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false, has_dynamic_offset: false,
@ -220,6 +248,21 @@ impl strafe_client::framework::Example for Skybox {
}, },
], ],
}); });
let model_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: None,
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
},
],
});
// Create the render pipeline // Create the render pipeline
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
@ -243,16 +286,16 @@ impl strafe_client::framework::Example for Skybox {
grounded: true, grounded: true,
walkspeed: 18.0, walkspeed: 18.0,
}; };
let raw_uniforms = camera.to_uniform_data(); let camera_uniforms = camera.to_uniform_data();
let uniform_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { let camera_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Buffer"), label: Some("Camera"),
contents: bytemuck::cast_slice(&raw_uniforms), contents: bytemuck::cast_slice(&camera_uniforms),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
}); });
let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: None, label: None,
bind_group_layouts: &[&bind_group_layout], bind_group_layouts: &[&main_bind_group_layout, &model_bind_group_layout],
push_constant_ranges: &[], push_constant_ranges: &[],
}); });
@ -426,12 +469,12 @@ impl strafe_client::framework::Example for Skybox {
dimension: Some(wgpu::TextureViewDimension::Cube), dimension: Some(wgpu::TextureViewDimension::Cube),
..wgpu::TextureViewDescriptor::default() ..wgpu::TextureViewDescriptor::default()
}); });
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { let main_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &bind_group_layout, layout: &main_bind_group_layout,
entries: &[ entries: &[
wgpu::BindGroupEntry { wgpu::BindGroupEntry {
binding: 0, binding: 0,
resource: uniform_buf.as_entire_binding(), resource: camera_buf.as_entire_binding(),
}, },
wgpu::BindGroupEntry { wgpu::BindGroupEntry {
binding: 1, binding: 1,
@ -442,8 +485,33 @@ impl strafe_client::framework::Example for Skybox {
resource: wgpu::BindingResource::Sampler(&sampler), resource: wgpu::BindingResource::Sampler(&sampler),
}, },
], ],
label: None, label: Some("Camera"),
}); });
let mut models = Vec::<Model>::new();
for (i,modeldata) in modeldatas.drain(..).enumerate() {
let model_uniforms = get_transform_uniform_data(&modeldata.transform);
let model_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some(format!("Model{}",i).as_str()),
contents: bytemuck::cast_slice(&model_uniforms),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
});
let model_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &model_bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: model_buf.as_entire_binding(),
},
],
label: Some(format!("Model{}",i).as_str()),
});
models.push(Model{
transform: modeldata.transform,
entities: modeldata.entities,
bind_group: model_bind_group,
model_buf: model_buf,
})
}
let depth_view = Self::create_depth_texture(config, device); let depth_view = Self::create_depth_texture(config, device);
@ -452,9 +520,9 @@ impl strafe_client::framework::Example for Skybox {
sky_pipeline, sky_pipeline,
entity_pipeline, entity_pipeline,
ground_pipeline, ground_pipeline,
bind_group, main_bind_group,
uniform_buf, camera_buf,
entities, models,
depth_view, depth_view,
staging_belt: wgpu::util::StagingBelt::new(0x100), staging_belt: wgpu::util::StagingBelt::new(0x100),
} }
@ -571,17 +639,28 @@ impl strafe_client::framework::Example for Skybox {
device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None }); device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None });
// update rotation // update rotation
let raw_uniforms = self.camera.to_uniform_data(); let camera_uniforms = self.camera.to_uniform_data();
self.staging_belt self.staging_belt
.write_buffer( .write_buffer(
&mut encoder, &mut encoder,
&self.uniform_buf, &self.camera_buf,
0, 0,
wgpu::BufferSize::new((raw_uniforms.len() * 4) as wgpu::BufferAddress).unwrap(), wgpu::BufferSize::new((camera_uniforms.len() * 4) as wgpu::BufferAddress).unwrap(),
device, device,
) )
.copy_from_slice(bytemuck::cast_slice(&raw_uniforms)); .copy_from_slice(bytemuck::cast_slice(&camera_uniforms));
for model in self.models.iter() {
let model_uniforms = get_transform_uniform_data(&model.transform);
self.staging_belt
.write_buffer(
&mut encoder,
&model.model_buf,//description of where data will be written when command is executed
0,//offset in staging belt?
wgpu::BufferSize::new((model_uniforms.len() * 4) as wgpu::BufferAddress).unwrap(),
device,
)
.copy_from_slice(bytemuck::cast_slice(&model_uniforms));
}
self.staging_belt.finish(); self.staging_belt.finish();
{ {
@ -610,12 +689,16 @@ impl strafe_client::framework::Example for Skybox {
}), }),
}); });
rpass.set_bind_group(0, &self.bind_group, &[]); rpass.set_bind_group(0, &self.main_bind_group, &[]);
rpass.set_pipeline(&self.entity_pipeline); rpass.set_pipeline(&self.entity_pipeline);
for entity in self.entities.iter() { for model in self.models.iter() {
rpass.set_vertex_buffer(0, entity.vertex_buf.slice(..)); rpass.set_bind_group(1, &model.bind_group, &[]);
rpass.draw(0..entity.vertex_count, 0..1);
for entity in model.entities.iter() {
rpass.set_vertex_buffer(0, entity.vertex_buf.slice(..));
rpass.draw(0..entity.vertex_count, 0..1);
}
} }
rpass.set_pipeline(&self.ground_pipeline); rpass.set_pipeline(&self.ground_pipeline);
@ -634,5 +717,9 @@ impl strafe_client::framework::Example for Skybox {
} }
fn main() { fn main() {
strafe_client::framework::run::<Skybox>("Strafe Client v0.1"); strafe_client::framework::run::<Skybox>(
format!("Strafe Client v{}",
env!("CARGO_PKG_VERSION")
).as_str()
);
} }

View File

@ -67,15 +67,20 @@ struct EntityOutput {
@location(3) view: vec3<f32>, @location(3) view: vec3<f32>,
}; };
@group(1)
@binding(0)
var<uniform> r_EntityTransform: mat4x4<f32>;
@vertex @vertex
fn vs_entity( fn vs_entity(
@location(0) pos: vec3<f32>, @location(0) pos: vec3<f32>,
@location(1) normal: vec3<f32>, @location(1) normal: vec3<f32>,
) -> EntityOutput { ) -> EntityOutput {
var position: vec4<f32> = r_EntityTransform * vec4<f32>(pos, 1.0);
var result: EntityOutput; var result: EntityOutput;
result.normal = normal; result.normal = (r_EntityTransform * vec4<f32>(normal, 0.0)).xyz;
result.view = pos - r_data.cam_pos.xyz; result.view = position.xyz - r_data.cam_pos.xyz;
result.position = r_data.proj * r_data.view * vec4<f32>(pos, 1.0); result.position = r_data.proj * r_data.view * position;
return result; return result;
} }