Compare commits

..

3 Commits

Author SHA1 Message Date
2a03987d89 fixme 2025-03-14 15:24:53 -07:00
57a4cadaf3 bsp_loader: max area triangulation 2025-03-14 15:24:53 -07:00
881bc60ab3 bsp_loader: truncate vertex precision to 16 bits
The physics algorithm expects vertices to align exactly with faces.  Since the face normal is calculated via the cross product of vertex positions, this allows the face normals to be exact with respect to the vertex positions.
2025-03-14 15:24:53 -07:00
102 changed files with 3234 additions and 6581 deletions

1
.gitignore vendored
View File

@@ -1,2 +1 @@
/target
.zed

2338
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,13 +25,6 @@ resolver = "2"
strip = true
codegen-units = 1
[workspace.lints.rust]
# unsafe_code = "forbid"
# missing_docs = "warn"
# missing_debug_implementations = "warn"
single_use_lifetimes = "warn"
trivial_casts = "warn"
unused_lifetimes = "warn"
unused_qualifications = "warn"
# variant_size_differences = "warn"
unexpected_cfgs = "warn"
[profile.dev]
strip = false
opt-level = 3

View File

@@ -9,10 +9,6 @@ ddsfile = "0.5.1"
glam = "0.30.0"
id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
strafesnet_physics = { path = "../physics", registry = "strafesnet" }
strafesnet_session = { path = "../session", registry = "strafesnet" }
strafesnet_settings = { path = "../settings", registry = "strafesnet" }
wgpu = "28.0.0"
[lints]
workspace = true
wgpu = "24.0.0"

View File

@@ -5,7 +5,7 @@ use strafesnet_settings::settings;
use strafesnet_session::session;
use strafesnet_common::model::{self, ColorId, NormalId, PolygonIter, PositionId, RenderConfigId, TextureCoordinateId, VertexId};
use wgpu::{util::DeviceExt,AstcBlock,AstcChannel};
use crate::model::{self as model_graphics,IndexedGraphicsMeshOwnedRenderConfig,IndexedGraphicsMeshOwnedRenderConfigId,GraphicsMeshOwnedRenderConfig,GraphicsModelColor4,GraphicsModelOwned,GraphicsVertex,DebugGraphicsVertex};
use crate::model::{self as model_graphics,IndexedGraphicsMeshOwnedRenderConfig,IndexedGraphicsMeshOwnedRenderConfigId,GraphicsMeshOwnedRenderConfig,GraphicsModelColor4,GraphicsModelOwned,GraphicsVertex};
pub fn required_limits()->wgpu::Limits{
wgpu::Limits::default()
@@ -36,28 +36,12 @@ struct GraphicsModel{
instance_count:u32,
}
struct DebugGraphicsSubmesh{
verts:Vec<strafesnet_physics::model::MeshVertId>,
edges:Vec<Indices>,
faces:Vec<Indices>,
}
struct DebugGraphicsMesh{
verts:Indices,
submeshes:Vec<DebugGraphicsSubmesh>,
vertex_buf:wgpu::Buffer,
}
struct DebugGraphicsModel{
debug_mesh_id:u32,
bind_group:wgpu::BindGroup,
}
struct GraphicsSamplers{
repeat:wgpu::Sampler,
}
struct GraphicsBindGroupLayouts{
model:wgpu::BindGroupLayout,
debug_model:wgpu::BindGroupLayout,
}
struct GraphicsBindGroups{
@@ -68,9 +52,6 @@ struct GraphicsBindGroups{
struct GraphicsPipelines{
skybox:wgpu::RenderPipeline,
model:wgpu::RenderPipeline,
debug_face:wgpu::RenderPipeline,
debug_edge:wgpu::RenderPipeline,
debug_vert:wgpu::RenderPipeline,
}
struct GraphicsCamera{
@@ -113,7 +94,7 @@ impl GraphicsCamera{
raw
}
}
impl Default for GraphicsCamera{
impl std::default::Default for GraphicsCamera{
fn default()->Self{
Self{
screen_size:glam::UVec2::ONE,
@@ -122,26 +103,6 @@ impl Default for GraphicsCamera{
}
}
const MODEL_BUFFER_SIZE:usize=4*4 + 12 + 4;//let size=std::mem::size_of::<ModelInstance>();
const MODEL_BUFFER_SIZE_BYTES:usize=MODEL_BUFFER_SIZE*4;
fn get_instances_buffer_data(instances:&[GraphicsModelOwned])->Vec<f32>{
let mut raw=Vec::with_capacity(MODEL_BUFFER_SIZE*instances.len());
for mi in instances{
//model transform
raw.extend_from_slice(&AsRef::<[f32; 4*4]>::as_ref(&mi.transform)[..]);
//normal transform
raw.extend_from_slice(AsRef::<[f32; 3]>::as_ref(&mi.normal_transform.x_axis));
raw.extend_from_slice(&[0.0]);
raw.extend_from_slice(AsRef::<[f32; 3]>::as_ref(&mi.normal_transform.y_axis));
raw.extend_from_slice(&[0.0]);
raw.extend_from_slice(AsRef::<[f32; 3]>::as_ref(&mi.normal_transform.z_axis));
raw.extend_from_slice(&[0.0]);
//color
raw.extend_from_slice(AsRef::<[f32; 4]>::as_ref(&mi.color.get()));
}
raw
}
pub struct GraphicsState{
pipelines:GraphicsPipelines,
bind_groups:GraphicsBindGroups,
@@ -151,8 +112,6 @@ pub struct GraphicsState{
camera_buf:wgpu::Buffer,
temp_squid_texture_view:wgpu::TextureView,
models:Vec<GraphicsModel>,
debug_meshes:Vec<DebugGraphicsMesh>,
debug_models:Vec<DebugGraphicsModel>,
depth_view:wgpu::TextureView,
staging_belt:wgpu::util::StagingBelt,
}
@@ -187,90 +146,8 @@ impl GraphicsState{
self.camera.fov=user_settings.calculate_fov(1.0,&self.camera.screen_size).as_vec2();
}
pub fn generate_models(&mut self,device:&wgpu::Device,queue:&wgpu::Queue,map:&map::CompleteMap){
//generate debug meshes, each debug model refers to one
self.debug_meshes=map.meshes.iter().map(|mesh|{
let vertices:Vec<DebugGraphicsVertex>=mesh.unique_pos.iter().copied().map(|pos|{
DebugGraphicsVertex{
pos:pos.to_array().map(Into::into),
}
}).collect();
let vertex_buf=device.create_buffer_init(&wgpu::util::BufferInitDescriptor{
label:Some("Vertex"),
contents:bytemuck::cast_slice(&vertices),
usage:wgpu::BufferUsages::VERTEX,
});
macro_rules! indices{
($indices:expr)=>{
if (u32::MAX as usize)<vertices.len(){
panic!("Model has too many vertices!");
}else if (u16::MAX as usize)<vertices.len(){
Indices::new(device,&$indices.into_iter().map(|vertex_idx|vertex_idx.get() as u32).collect(),wgpu::IndexFormat::Uint32)
}else{
Indices::new(device,&$indices.into_iter().map(|vertex_idx|vertex_idx.get() as u16).collect(),wgpu::IndexFormat::Uint16)
}
};
}
let submeshes=if let Ok(physics_mesh)=strafesnet_physics::model::PhysicsMesh::try_from(mesh){
physics_mesh.submesh_views().into_iter().map(|submesh_view|DebugGraphicsSubmesh{
verts:submesh_view.verts().to_owned(),
edges:submesh_view.edge_vert_ids_iter().map(|edge_verts|indices!(edge_verts)).collect(),
faces:submesh_view.face_vert_ids_iter().map(|face_verts|{
// triangulate
let mut indices=Vec::new();
let mut poly_vertices=face_verts.into_iter();
if let (Some(a),Some(mut b))=(poly_vertices.next(),poly_vertices.next()){
for c in poly_vertices{
indices.extend([a,b,c]);
b=c;
}
}
indices!(indices)
}).collect(),
}).collect()
}else{
//idc
Vec::new()
};
DebugGraphicsMesh{
verts:indices!((0..vertices.len() as u32).map(strafesnet_physics::model::MeshVertId::new)),
submeshes,
vertex_buf,
}
}).collect();
//generate debug models, only one will be rendered at a time
self.debug_models=map.models.iter().enumerate().map(|(model_id,model)|{
let model_uniforms=get_instances_buffer_data(&[GraphicsModelOwned{
transform:model.transform.into(),
normal_transform:glam::Mat3::from_cols_array_2d(&model.transform.matrix3.to_array().map(|row|row.map(Into::into))).inverse().transpose(),
color:GraphicsModelColor4::new(glam::vec4(1.0,0.0,0.0,0.2)),
}]);
let model_buf=device.create_buffer_init(&wgpu::util::BufferInitDescriptor{
label:Some(format!("Debug Model{} Buf",model_id).as_str()),
contents:bytemuck::cast_slice(&model_uniforms),
usage:wgpu::BufferUsages::UNIFORM|wgpu::BufferUsages::COPY_DST,
});
let bind_group=device.create_bind_group(&wgpu::BindGroupDescriptor{
layout:&self.bind_group_layouts.debug_model,
entries:&[
wgpu::BindGroupEntry{
binding:0,
resource:model_buf.as_entire_binding(),
},
],
label:Some(format!("Debug Model{} Bind Group",model_id).as_str()),
});
DebugGraphicsModel{
debug_mesh_id:model.mesh.get(),
bind_group,
}
}).collect();
//generate texture view per texture
let texture_views:HashMap<model::TextureId,wgpu::TextureView>=map.textures.iter().enumerate().filter_map(|(texture_id,texture_data)|{
let texture_views:HashMap<strafesnet_common::model::TextureId,wgpu::TextureView>=map.textures.iter().enumerate().filter_map(|(texture_id,texture_data)|{
let texture_id=model::TextureId::new(texture_id as u32);
let image=match ddsfile::Dds::read(std::io::Cursor::new(texture_data)){
Ok(image)=>image,
@@ -711,21 +588,6 @@ impl GraphicsState{
},
],
});
let debug_model_bind_group_layout=device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor{
label:Some("Debug Model Bind Group Layout"),
entries:&[
wgpu::BindGroupLayoutEntry{
binding:0,
visibility:wgpu::ShaderStages::VERTEX_FRAGMENT,
ty:wgpu::BindingType::Buffer{
ty:wgpu::BufferBindingType::Uniform,
has_dynamic_offset:false,
min_binding_size:None,
},
count:None,
},
],
});
let clamp_sampler=device.create_sampler(&wgpu::SamplerDescriptor{
label:Some("Clamp Sampler"),
@@ -734,7 +596,7 @@ impl GraphicsState{
address_mode_w:wgpu::AddressMode::ClampToEdge,
mag_filter:wgpu::FilterMode::Linear,
min_filter:wgpu::FilterMode::Linear,
mipmap_filter:wgpu::MipmapFilterMode::Linear,
mipmap_filter:wgpu::FilterMode::Linear,
..Default::default()
});
let repeat_sampler=device.create_sampler(&wgpu::SamplerDescriptor{
@@ -744,7 +606,7 @@ impl GraphicsState{
address_mode_w:wgpu::AddressMode::Repeat,
mag_filter:wgpu::FilterMode::Linear,
min_filter:wgpu::FilterMode::Linear,
mipmap_filter:wgpu::MipmapFilterMode::Linear,
mipmap_filter:wgpu::FilterMode::Linear,
anisotropy_clamp:16,
..Default::default()
});
@@ -872,23 +734,15 @@ impl GraphicsState{
&skybox_texture_bind_group_layout,
&model_bind_group_layout,
],
immediate_size:0,
push_constant_ranges:&[],
});
let debug_model_pipeline_layout=device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor{
label:None,
bind_group_layouts:&[
&camera_bind_group_layout,
&debug_model_bind_group_layout,
],
push_constant_ranges:&[],
});
let sky_pipeline_layout=device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor{
label:None,
bind_group_layouts:&[
&camera_bind_group_layout,
&skybox_texture_bind_group_layout,
],
immediate_size:0,
push_constant_ranges:&[],
});
// Create the render pipelines
@@ -919,7 +773,7 @@ impl GraphicsState{
bias:wgpu::DepthBiasState::default(),
}),
multisample:wgpu::MultisampleState::default(),
multiview_mask:None,
multiview:None,
cache:None,
});
let model_pipeline=device.create_render_pipeline(&wgpu::RenderPipelineDescriptor{
@@ -929,7 +783,7 @@ impl GraphicsState{
module:&shader,
entry_point:Some("vs_entity_texture"),
buffers:&[wgpu::VertexBufferLayout{
array_stride:size_of::<GraphicsVertex>() as wgpu::BufferAddress,
array_stride:std::mem::size_of::<GraphicsVertex>() as wgpu::BufferAddress,
step_mode:wgpu::VertexStepMode::Vertex,
attributes:&wgpu::vertex_attr_array![0=>Float32x3,1=>Float32x2,2=>Float32x3,3=>Float32x4],
}],
@@ -954,57 +808,9 @@ impl GraphicsState{
bias:wgpu::DepthBiasState::default(),
}),
multisample:wgpu::MultisampleState::default(),
multiview_mask:None,
cache:None,
});
let mut debug_model_pipeline=wgpu::RenderPipelineDescriptor{
label:None,
layout:Some(&debug_model_pipeline_layout),
vertex:wgpu::VertexState{
module:&shader,
entry_point:Some("vs_debug"),
buffers:&[wgpu::VertexBufferLayout{
array_stride:size_of::<DebugGraphicsVertex>() as wgpu::BufferAddress,
step_mode:wgpu::VertexStepMode::Vertex,
attributes:&wgpu::vertex_attr_array![0=>Float32x3],
}],
compilation_options:wgpu::PipelineCompilationOptions::default(),
},
fragment:Some(wgpu::FragmentState{
module:&shader,
entry_point:Some("fs_debug"),
targets:&[Some(wgpu::ColorTargetState{
format:config.view_formats[0],
blend:Some(wgpu::BlendState::ALPHA_BLENDING),
write_mask:wgpu::ColorWrites::default(),
})],
compilation_options:wgpu::PipelineCompilationOptions::default(),
}),
primitive:wgpu::PrimitiveState{
front_face:wgpu::FrontFace::Cw,
cull_mode:Some(wgpu::Face::Front),
..Default::default()
},
depth_stencil:Some(wgpu::DepthStencilState{
format:Self::DEPTH_FORMAT,
depth_write_enabled:true,
depth_compare:wgpu::CompareFunction::Always,
stencil:wgpu::StencilState::default(),
bias:wgpu::DepthBiasState::default(),
}),
multisample:wgpu::MultisampleState::default(),
multiview:None,
cache:None,
};
debug_model_pipeline.label=Some("Debug Face Pipeline");
debug_model_pipeline.primitive.topology=wgpu::PrimitiveTopology::TriangleList;
let debug_model_pipeline_face=device.create_render_pipeline(&debug_model_pipeline);
debug_model_pipeline.label=Some("Debug Edge Pipeline");
debug_model_pipeline.primitive.topology=wgpu::PrimitiveTopology::LineList;
let debug_model_pipeline_edge=device.create_render_pipeline(&debug_model_pipeline);
debug_model_pipeline.label=Some("Debug Vert Pipeline");
debug_model_pipeline.primitive.topology=wgpu::PrimitiveTopology::PointList;
let debug_model_pipeline_vert=device.create_render_pipeline(&debug_model_pipeline);
});
let camera=GraphicsCamera::default();
let camera_uniforms=camera.to_uniform_data(glam::Vec3::ZERO,glam::Vec2::ZERO);
@@ -1044,10 +850,7 @@ impl GraphicsState{
Self{
pipelines:GraphicsPipelines{
skybox:sky_pipeline,
model:model_pipeline,
debug_face:debug_model_pipeline_face,
debug_edge:debug_model_pipeline_edge,
debug_vert:debug_model_pipeline_vert,
model:model_pipeline
},
bind_groups:GraphicsBindGroups{
camera:camera_bind_group,
@@ -1056,14 +859,9 @@ impl GraphicsState{
camera,
camera_buf,
models:Vec::new(),
debug_meshes:Vec::new(),
debug_models:Vec::new(),
depth_view,
staging_belt:wgpu::util::StagingBelt::new(device.clone(),0x100),
bind_group_layouts:GraphicsBindGroupLayouts{
model:model_bind_group_layout,
debug_model:debug_model_bind_group_layout,
},
staging_belt:wgpu::util::StagingBelt::new(0x100),
bind_group_layouts:GraphicsBindGroupLayouts{model:model_bind_group_layout},
samplers:GraphicsSamplers{repeat:repeat_sampler},
temp_squid_texture_view:squid_texture_view,
}
@@ -1100,6 +898,7 @@ impl GraphicsState{
&self.camera_buf,
0,
wgpu::BufferSize::new((camera_uniforms.len() * 4) as wgpu::BufferAddress).unwrap(),
device,
)
.copy_from_slice(bytemuck::cast_slice(&camera_uniforms));
//This code only needs to run when the uniforms change
@@ -1134,7 +933,6 @@ impl GraphicsState{
}),
store:wgpu::StoreOp::Store,
},
depth_slice:None,
})],
depth_stencil_attachment:Some(wgpu::RenderPassDepthStencilAttachment{
view:&self.depth_view,
@@ -1146,13 +944,11 @@ impl GraphicsState{
}),
timestamp_writes:Default::default(),
occlusion_query_set:Default::default(),
multiview_mask:None,
});
rpass.set_bind_group(0,&self.bind_groups.camera,&[]);
rpass.set_bind_group(1,&self.bind_groups.skybox_texture,&[]);
// Draw all models.
rpass.set_pipeline(&self.pipelines.model);
for model in &self.models{
rpass.set_bind_group(2,&model.bind_group,&[]);
@@ -1164,40 +960,6 @@ impl GraphicsState{
rpass.set_pipeline(&self.pipelines.skybox);
rpass.draw(0..3,0..1);
// render a single debug_model in red
if let Some(hit)=frame_state.hit{
if let Some(closest_fev)=&hit.closest_fev{
let model_id:model::ModelId=hit.convex_mesh_id.model_id.into();
if let Some(model)=self.debug_models.get(model_id.get() as usize){
let mesh=&self.debug_meshes[model.debug_mesh_id as usize];
rpass.set_bind_group(1,&model.bind_group,&[]);
rpass.set_vertex_buffer(0,mesh.vertex_buf.slice(..));
match closest_fev{
strafesnet_physics::model::FEV::Face(face)=>{
rpass.set_pipeline(&self.pipelines.debug_face);
let indices=&mesh.submeshes[hit.convex_mesh_id.submesh_id.get() as usize].faces[face.get() as usize];
rpass.set_index_buffer(indices.buf.slice(..),indices.format);
//TODO: loop over triangle strips
rpass.draw_indexed(0..indices.count,0,0..1);
},
strafesnet_physics::model::FEV::Edge(edge)=>{
rpass.set_pipeline(&self.pipelines.debug_edge);
let indices=&mesh.submeshes[hit.convex_mesh_id.submesh_id.get() as usize].edges[edge.get() as usize];
rpass.set_index_buffer(indices.buf.slice(..),indices.format);
rpass.draw_indexed(0..indices.count,0,0..1);
},
strafesnet_physics::model::FEV::Vert(vert)=>{
rpass.set_pipeline(&self.pipelines.debug_vert);
let indices=&mesh.verts;
rpass.set_index_buffer(indices.buf.slice(..),indices.format);
let vert_id=mesh.submeshes[hit.convex_mesh_id.submesh_id.get() as usize].verts[vert.get() as usize].get();
rpass.draw_indexed(0..1,vert_id as i32,0..1);
},
}
}
}
}
}
queue.submit(std::iter::once(encoder.finish()));
@@ -1205,3 +967,22 @@ impl GraphicsState{
self.staging_belt.recall();
}
}
const MODEL_BUFFER_SIZE:usize=4*4 + 12 + 4;//let size=std::mem::size_of::<ModelInstance>();
const MODEL_BUFFER_SIZE_BYTES:usize=MODEL_BUFFER_SIZE*4;
fn get_instances_buffer_data(instances:&[GraphicsModelOwned])->Vec<f32>{
let mut raw=Vec::with_capacity(MODEL_BUFFER_SIZE*instances.len());
for mi in instances{
//model transform
raw.extend_from_slice(&AsRef::<[f32; 4*4]>::as_ref(&mi.transform)[..]);
//normal transform
raw.extend_from_slice(AsRef::<[f32; 3]>::as_ref(&mi.normal_transform.x_axis));
raw.extend_from_slice(&[0.0]);
raw.extend_from_slice(AsRef::<[f32; 3]>::as_ref(&mi.normal_transform.y_axis));
raw.extend_from_slice(&[0.0]);
raw.extend_from_slice(AsRef::<[f32; 3]>::as_ref(&mi.normal_transform.z_axis));
raw.extend_from_slice(&[0.0]);
//color
raw.extend_from_slice(AsRef::<[f32; 4]>::as_ref(&mi.color.get()));
}
raw
}

View File

@@ -8,11 +8,6 @@ pub struct GraphicsVertex{
pub normal:[f32;3],
pub color:[f32;4],
}
#[derive(Clone,Copy,Pod,Zeroable)]
#[repr(C)]
pub struct DebugGraphicsVertex{
pub pos:[f32;3],
}
#[derive(Clone,Copy,id::Id)]
pub struct IndexedGraphicsMeshOwnedRenderConfigId(u32);
pub struct IndexedGraphicsMeshOwnedRenderConfig{

View File

@@ -8,6 +8,3 @@ arrayvec = "0.7.6"
glam = "0.30.0"
id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -18,22 +18,11 @@ impl<T> std::ops::Neg for Body<T>{
}
}
}
impl<T:Copy> std::ops::Neg for &Body<T>{
type Output=Body<T>;
fn neg(self)->Self::Output{
Body{
position:self.position,
velocity:-self.velocity,
acceleration:self.acceleration,
time:-self.time,
}
}
}
impl<T> Body<T>
where Time<T>:Copy,
{
pub const ZERO:Self=Self::new(vec3::zero(),vec3::zero(),vec3::zero(),Time::ZERO);
pub const ZERO:Self=Self::new(vec3::ZERO,vec3::ZERO,vec3::ZERO,Time::ZERO);
pub const fn new(position:Planar64Vec3,velocity:Planar64Vec3,acceleration:Planar64Vec3,time:Time<T>)->Self{
Self{
position,
@@ -107,8 +96,8 @@ impl<T> Body<T>
self.time+=dt.into();
}
pub fn infinity_dir(&self)->Option<Planar64Vec3>{
if self.velocity==vec3::zero(){
if self.acceleration==vec3::zero(){
if self.velocity==vec3::ZERO{
if self.acceleration==vec3::ZERO{
None
}else{
Some(self.acceleration)

View File

@@ -1,9 +1,7 @@
use crate::model::{into_giga_time,GigaTime,FEV,MeshQuery,DirectedEdge};
use crate::model::{GigaTime,FEV,MeshQuery,DirectedEdge};
use strafesnet_common::integer::{Fixed,Ratio,vec3::Vector3};
use crate::physics::{Time,Body};
use core::ops::Bound;
enum Transition<M:MeshQuery>{
Miss,
Next(FEV<M>,GigaTime),
@@ -21,46 +19,11 @@ impl<M:MeshQuery> CrawlResult<M>{
CrawlResult::Hit(face,time)=>Some((face,time)),
}
}
}
// TODO: move predict_collision_face_out algorithm in here or something
/// check_lower_bound
pub fn low<LhsNum,LhsDen,RhsNum,RhsDen,T>(lower_bound:&Bound<Ratio<LhsNum,LhsDen>>,dt:&Ratio<RhsNum,RhsDen>)->bool
where
RhsNum:Copy,
RhsDen:Copy,
LhsNum:Copy,
LhsDen:Copy,
LhsDen:strafesnet_common::integer::Parity,
RhsDen:strafesnet_common::integer::Parity,
LhsNum:core::ops::Mul<RhsDen,Output=T>,
LhsDen:core::ops::Mul<RhsNum,Output=T>,
T:Ord+Copy,
{
match lower_bound{
Bound::Included(time)=>time.le_ratio(*dt),
Bound::Excluded(time)=>time.lt_ratio(*dt),
Bound::Unbounded=>true,
}
}
/// check_upper_bound
pub fn upp<LhsNum,LhsDen,RhsNum,RhsDen,T>(dt:&Ratio<LhsNum,LhsDen>,upper_bound:&Bound<Ratio<RhsNum,RhsDen>>)->bool
where
RhsNum:Copy,
RhsDen:Copy,
LhsNum:Copy,
LhsDen:Copy,
LhsDen:strafesnet_common::integer::Parity,
RhsDen:strafesnet_common::integer::Parity,
LhsNum:core::ops::Mul<RhsDen,Output=T>,
LhsDen:core::ops::Mul<RhsNum,Output=T>,
T:Ord+Copy,
{
match upper_bound{
Bound::Included(time)=>dt.le_ratio(*time),
Bound::Excluded(time)=>dt.lt_ratio(*time),
Bound::Unbounded=>true,
pub fn miss(self)->Option<FEV<M>>{
match self{
CrawlResult::Miss(fev)=>Some(fev),
CrawlResult::Hit(_,_)=>None,
}
}
}
@@ -72,9 +35,9 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
M::Vert:Copy,
F:core::ops::Mul<Fixed<1,32>,Output=Fixed<4,128>>,
<F as core::ops::Mul<Fixed<1,32>>>::Output:core::iter::Sum,
M::Offset:core::ops::Sub<<F as std::ops::Mul<Fixed<1,32>>>::Output>,
<M as MeshQuery>::Offset:core::ops::Sub<<F as std::ops::Mul<Fixed<1,32>>>::Output>,
{
fn next_transition(&self,mesh:&M,body:&Body,lower_bound:Bound<GigaTime>,mut upper_bound:Bound<GigaTime>)->Transition<M>{
fn next_transition(&self,body_time:GigaTime,mesh:&M,body:&Body,mut best_time:GigaTime)->Transition<M>{
//conflicting derivative means it crosses in the wrong direction.
//if the transition time is equal to an already tested transition, do not replace the current best.
let mut best_transition=Transition::Miss;
@@ -87,8 +50,8 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//TODO: use higher precision d value?
//use the mesh transform translation instead of baking it into the d value.
for dt in Fixed::<4,128>::zeroes2((n.dot(body.position)-d)*2,n.dot(body.velocity)*2,n.dot(body.acceleration)){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
upper_bound=Bound::Included(dt);
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
best_transition=Transition::Hit(face_id,dt);
break;
}
@@ -102,8 +65,8 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//WARNING: precision is swept under the rug!
//wrap for speed
for dt in Fixed::<4,128>::zeroes2(n.dot(body.position*2-(mesh.vert(v0)+mesh.vert(v1))).wrap_4(),n.dot(body.velocity).wrap_4()*2,n.dot(body.acceleration).wrap_4()){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
upper_bound=Bound::Included(dt);
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
best_transition=Transition::Next(FEV::Edge(directed_edge_id.as_undirected()),dt);
break;
}
@@ -113,11 +76,10 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
},
&FEV::Edge(edge_id)=>{
//test each face collision time, ignoring roots with zero or conflicting derivative
let edge_n=mesh.edge_n(edge_id);
let edge_verts=mesh.edge_verts(edge_id);
let &[ev0,ev1]=edge_verts.as_ref();
let (v0,v1)=(mesh.vert(ev0),mesh.vert(ev1));
let edge_n=v1-v0;
let delta_pos=body.position*2-(v0+v1);
let delta_pos=body.position*2-(mesh.vert(ev0)+mesh.vert(ev1));
for (i,&edge_face_id) in mesh.edge_faces(edge_id).as_ref().iter().enumerate(){
let face_n=mesh.face_nd(edge_face_id).0;
//edge_n gets parity from the order of edge_faces
@@ -125,8 +87,8 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//WARNING yada yada d *2
//wrap for speed
for dt in Fixed::<4,128>::zeroes2(n.dot(delta_pos).wrap_4(),n.dot(body.velocity).wrap_4()*2,n.dot(body.acceleration).wrap_4()){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
upper_bound=Bound::Included(dt);
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
best_transition=Transition::Next(FEV::Face(edge_face_id),dt);
break;
}
@@ -137,9 +99,9 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//vertex normal gets parity from vert index
let n=edge_n*(1-2*(i as i64));
for dt in Fixed::<2,64>::zeroes2((n.dot(body.position-mesh.vert(vert_id)))*2,n.dot(body.velocity)*2,n.dot(body.acceleration)){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
let dt=Ratio::new(dt.num.widen_4(),dt.den.widen_4());
upper_bound=Bound::Included(dt);
best_time=dt;
best_transition=Transition::Next(FEV::Vert(vert_id),dt);
break;
}
@@ -153,9 +115,9 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//edge is directed away from vertex, but we want the dot product to turn out negative
let n=-mesh.directed_edge_n(directed_edge_id);
for dt in Fixed::<2,64>::zeroes2((n.dot(body.position-mesh.vert(vert_id)))*2,n.dot(body.velocity)*2,n.dot(body.acceleration)){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
let dt=Ratio::new(dt.num.widen_4(),dt.den.widen_4());
upper_bound=Bound::Included(dt);
best_time=dt;
best_transition=Transition::Next(FEV::Edge(directed_edge_id.as_undirected()),dt);
break;
}
@@ -166,13 +128,19 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
}
best_transition
}
pub fn crawl(mut self,mesh:&M,relative_body:&Body,lower_bound:Bound<&Time>,upper_bound:Bound<&Time>)->CrawlResult<M>{
let mut lower_bound=lower_bound.map(|&t|into_giga_time(t,relative_body.time));
let upper_bound=upper_bound.map(|&t|into_giga_time(t,relative_body.time));
pub fn crawl(mut self,mesh:&M,relative_body:&Body,start_time:Time,time_limit:Time)->CrawlResult<M>{
let mut body_time={
let r=(start_time-relative_body.time).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
};
let time_limit={
let r=(time_limit-relative_body.time).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
};
for _ in 0..20{
match self.next_transition(mesh,relative_body,lower_bound,upper_bound){
match self.next_transition(body_time,mesh,relative_body,time_limit){
Transition::Miss=>return CrawlResult::Miss(self),
Transition::Next(next_fev,next_time)=>(self,lower_bound)=(next_fev,Bound::Included(next_time)),
Transition::Next(next_fev,next_time)=>(self,body_time)=(next_fev,next_time),
Transition::Hit(face,time)=>return CrawlResult::Hit(face,time),
}
}

View File

@@ -1,8 +1,7 @@
mod body;
mod face_crawler;
pub mod model;
mod push_solve;
mod minimum_difference;
mod face_crawler;
mod model;
pub mod physics;

View File

@@ -1,913 +0,0 @@
use strafesnet_common::integer::vec3;
use strafesnet_common::integer::vec3::Vector3;
use strafesnet_common::integer::{Fixed,Planar64,Planar64Vec3};
use crate::model::{DirectedEdge,FEV,MeshQuery};
// TODO: remove mesh invert
use crate::model::{MinkowskiMesh,MinkowskiVert};
// This algorithm is based on Lua code
// written by Trey Reynolds in 2021
type Simplex<const N:usize,Vert>=[Vert;N];
#[derive(Clone,Copy)]
enum Simplex1_3<Vert>{
Simplex1(Simplex<1,Vert>),
Simplex2(Simplex<2,Vert>),
Simplex3(Simplex<3,Vert>),
}
impl<Vert> Simplex1_3<Vert>{
fn push_front(self,v:Vert)->Simplex2_4<Vert>{
match self{
Simplex1_3::Simplex1([v0])=>Simplex2_4::Simplex2([v,v0]),
Simplex1_3::Simplex2([v0,v1])=>Simplex2_4::Simplex3([v,v0,v1]),
Simplex1_3::Simplex3([v0,v1,v2])=>Simplex2_4::Simplex4([v,v0,v1,v2]),
}
}
}
#[derive(Clone,Copy)]
enum Simplex2_4<Vert>{
Simplex2(Simplex<2,Vert>),
Simplex3(Simplex<3,Vert>),
Simplex4(Simplex<4,Vert>),
}
/*
local function absDet(r, u, v, w)
if w then
return math.abs((u - r):Cross(v - r):Dot(w - r))
elseif v then
return (u - r):Cross(v - r).magnitude
elseif u then
return (u - r).magnitude
else
return 1
end
end
*/
impl<Vert> Simplex2_4<Vert>{
fn det_is_zero<M:MeshQuery<Vert=Vert>>(self,mesh:&M)->bool{
match self{
Self::Simplex4([p0,p1,p2,p3])=>{
let p0=mesh.vert(p0);
let p1=mesh.vert(p1);
let p2=mesh.vert(p2);
let p3=mesh.vert(p3);
(p1-p0).cross(p2-p0).dot(p3-p0)==Fixed::ZERO
},
Self::Simplex3([p0,p1,p2])=>{
let p0=mesh.vert(p0);
let p1=mesh.vert(p1);
let p2=mesh.vert(p2);
(p1-p0).cross(p2-p0)==vec3::zero()
},
Self::Simplex2([p0,p1])=>{
let p0=mesh.vert(p0);
let p1=mesh.vert(p1);
p1-p0==vec3::zero()
}
}
}
}
/*
local function choosePerpendicularDirection(d)
local x, y, z = d.x, d.y, d.z
local best = math.min(x*x, y*y, z*z)
if x*x == best then
return Vector3.new(y*y + z*z, -x*y, -x*z)
elseif y*y == best then
return Vector3.new(-x*y, x*x + z*z, -y*z)
else
return Vector3.new(-x*z, -y*z, x*x + y*y)
end
end
*/
fn choose_perpendicular_direction(d:Planar64Vec3)->Planar64Vec3{
let x=d.x.abs();
let y=d.y.abs();
let z=d.z.abs();
if x<y&&x<z{
Vector3::new([Fixed::ZERO,-d.z,d.y])
}else if y<z{
Vector3::new([d.z,Fixed::ZERO,-d.x])
}else{
Vector3::new([-d.y,d.x,Fixed::ZERO])
}
}
const fn choose_any_direction()->Planar64Vec3{
vec3::X
}
fn narrow_dir2(dir:Vector3<Fixed<2,64>>)->Planar64Vec3{
if dir==vec3::zero(){
return dir.narrow_1().unwrap();
}
let x=dir.x.as_bits().unsigned_abs().bits();
let y=dir.y.as_bits().unsigned_abs().bits();
let z=dir.z.as_bits().unsigned_abs().bits();
let big=x.max(y).max(z);
const MAX_BITS:u32=64+31;
if MAX_BITS<big{
dir>>(big-MAX_BITS)
}else{
dir
}.narrow_1().unwrap()
}
fn narrow_dir3(dir:Vector3<Fixed<3,96>>)->Planar64Vec3{
if dir==vec3::zero(){
return dir.narrow_1().unwrap();
}
let x=dir.x.as_bits().unsigned_abs().bits();
let y=dir.y.as_bits().unsigned_abs().bits();
let z=dir.z.as_bits().unsigned_abs().bits();
let big=x.max(y).max(z);
const MAX_BITS:u32=96+31;
if MAX_BITS<big{
dir>>(big-MAX_BITS)
}else{
dir
}.narrow_1().unwrap()
}
fn reduce1<M:MeshQuery>(
[v0]:Simplex<1,M::Vert>,
mesh:&M,
point:Planar64Vec3,
)->Reduced<M::Vert>{
// --debug.profilebegin("reduceSimplex0")
// local a = a1 - a0
let p0=mesh.vert(v0);
// local p = -a
let p=-(p0+point);
// local direction = p
let mut dir=p;
// if direction.magnitude == 0 then
// direction = chooseAnyDirection()
if dir==vec3::zero(){
dir=choose_any_direction();
}
// return direction, a0, a1
Reduced{
dir,
simplex:Simplex1_3::Simplex1([v0]),
}
}
// local function reduceSimplex1(a0, a1, b0, b1)
fn reduce2<M:MeshQuery>(
[v0,v1]:Simplex<2,M::Vert>,
mesh:&M,
point:Planar64Vec3,
)->Reduced<M::Vert>{
// --debug.profilebegin("reduceSimplex1")
// local a = a1 - a0
// local b = b1 - b0
let p0=mesh.vert(v0);
let p1=mesh.vert(v1);
// local p = -a
// local u = b - a
let p=-(p0+point);
let u=p1-p0;
// -- modify to take into account the radiuses
// local p_u = p:Dot(u)
let p_u=p.dot(u);
// if p_u >= 0 then
if !p_u.is_negative(){
// local direction = u:Cross(p):Cross(u)
let direction=u.cross(p).cross(u);
// if direction.magnitude == 0 then
if direction==vec3::zero(){
return Reduced{
dir:choose_perpendicular_direction(u),
simplex:Simplex1_3::Simplex2([v0,v1]),
};
}
// -- modify the direction to take into account a0R and b0R
// return direction, a0, a1, b0, b1
return Reduced{
dir:narrow_dir3(direction),
simplex:Simplex1_3::Simplex2([v0,v1]),
};
}
// local direction = p
let mut dir=p;
// if direction.magnitude == 0 then
if dir==vec3::zero(){
dir=choose_perpendicular_direction(u);
}
// return direction, a0, a1
Reduced{
dir,
simplex:Simplex1_3::Simplex1([v0]),
}
}
// local function reduceSimplex2(a0, a1, b0, b1, c0, c1)
fn reduce3<M:MeshQuery>(
[v0,mut v1,v2]:Simplex<3,M::Vert>,
mesh:&M,
point:Planar64Vec3,
)->Reduced<M::Vert>{
// --debug.profilebegin("reduceSimplex2")
// local a = a1 - a0
// local b = b1 - b0
// local c = c1 - c0
let p0=mesh.vert(v0);
let p1=mesh.vert(v1);
let p2=mesh.vert(v2);
// local p = -a
// local u = b - a
// local v = c - a
let p=-(p0+point);
let mut u=p1-p0;
let v=p2-p0;
// local uv = u:Cross(v)
// local up = u:Cross(p)
// local pv = p:Cross(v)
// local uv_up = uv:Dot(up)
// local uv_pv = uv:Dot(pv)
let mut uv=u.cross(v);
let mut up=u.cross(p);
let pv=p.cross(v);
let uv_up=uv.dot(up);
let uv_pv=uv.dot(pv);
// if uv_up >= 0 and uv_pv >= 0 then
if !uv_up.is_negative()&&!uv_pv.is_negative(){
// local uvp = uv:Dot(p)
let uvp=uv.dot(p);
// local direction = uvp < 0 and -uv or uv
let direction=if uvp.is_negative(){
-uv
}else{
uv
};
// return direction, a0, a1, b0, b1, c0, c1
return Reduced{
dir:narrow_dir2(direction),
simplex:Simplex1_3::Simplex3([v0,v1,v2]),
};
}
// local u_u = u:Dot(u)
// local v_v = v:Dot(v)
// local uDist = uv_up/(u_u*v.magnitude)
// local vDist = uv_pv/(v_v*u.magnitude)
// local minDist2 = math.min(uDist, vDist)
let u_dist=uv_up*v.length();
let v_dist=uv_pv*u.length();
// if vDist == minDist2 then
if v_dist<u_dist{
u=v;
up=-pv;
uv=-uv;
// b0 = c0
// b1 = c1
v1=v2;
}
// local p_u = p:Dot(u)
let p_u=p.dot(u);
// if p_u >= 0 then
if !p_u.is_negative(){
// local direction = up:Cross(u)
let direction=up.cross(u);
// if direction.magnitude == 0 then
if direction==vec3::zero(){
// direction = uv
return Reduced{
dir:narrow_dir2(uv),
simplex:Simplex1_3::Simplex2([v0,v1]),
};
}
// return direction, a0, a1, b0, b1
return Reduced{
dir:narrow_dir3(direction),
simplex:Simplex1_3::Simplex2([v0,v1]),
};
}
// local direction = p
let dir=p;
// if direction.magnitude == 0 then
if dir==vec3::zero(){
// direction = uv
return Reduced{
dir:narrow_dir2(uv),
simplex:Simplex1_3::Simplex1([v0]),
};
}
// return direction, a0, a0
Reduced{
dir,
simplex:Simplex1_3::Simplex1([v0]),
}
}
// local function reduceSimplex3(a0, a1, b0, b1, c0, c1, d0, d1)
fn reduce4<M:MeshQuery>(
[v0,mut v1,mut v2,v3]:Simplex<4,M::Vert>,
mesh:&M,
point:Planar64Vec3,
)->Reduce<M::Vert>{
// --debug.profilebegin("reduceSimplex3")
// local a = a1 - a0
// local b = b1 - b0
// local c = c1 - c0
// local d = d1 - d0
let p0=mesh.vert(v0);
let p1=mesh.vert(v1);
let p2=mesh.vert(v2);
let p3=mesh.vert(v3);
// local p = -a
// local u = b - a
// local v = c - a
// local w = d - a
let p=-(p0+point);
let mut u=p1-p0;
let mut v=p2-p0;
let w=p3-p0;
// local uv = u:Cross(v)
// local vw = v:Cross(w)
// local wu = w:Cross(u)
// local uvw = uv:Dot(w)
// local pvw = vw:Dot(p)
// local upw = wu:Dot(p)
// local uvp = uv:Dot(p)
let mut uv=u.cross(v);
let vw=v.cross(w);
let wu=w.cross(u);
let uv_w=uv.dot(w);
let pv_w=vw.dot(p);
let up_w=wu.dot(p);
let uv_p=uv.dot(p);
// if pvw/uvw >= 0 and upw/uvw >= 0 and uvp/uvw >= 0 then
if !pv_w.div_sign(uv_w).is_negative()
||!up_w.div_sign(uv_w).is_negative()
||!uv_p.div_sign(uv_w).is_negative(){
// origin is contained, this is a positive detection
// local direction = Vector3.new(0, 0, 0)
// return direction, a0, a1, b0, b1, c0, c1, d0, d1
return Reduce::Escape([v0,v1,v2,v3]);
}
// local uvwSign = uvw < 0 and -1 or uvw > 0 and 1 or 0
// local uvDist = uvp*uvwSign/uv.magnitude
// local vwDist = pvw*uvwSign/vw.magnitude
// local wuDist = upw*uvwSign/wu.magnitude
// local minDist3 = math.min(uvDist, vwDist, wuDist)
let uv_dist=uv_p.mul_sign(uv_w);
let vw_dist=pv_w.mul_sign(uv_w);
let wu_dist=up_w.mul_sign(uv_w);
let wu_len=wu.length();
let uv_len=uv.length();
let vw_len=vw.length();
if vw_dist*wu_len<wu_dist*vw_len{
// if vwDist == minDist3 then
if vw_dist*uv_len<uv_dist*vw_len{
(u,v)=(v,w);
uv=vw;
// uv_p=pv_w; // unused
// b0, c0 = c0, d0
// b1, c1 = c1, d1
(v1,v2)=(v2,v3);
}else{
v2=v3;
}
}else{
// elseif wuDist == minDist3 then
if wu_dist*uv_len<uv_dist*wu_len{
(u,v)=(w,u);
uv=wu;
// uv_p=up_w; // unused
// b0, c0 = d0, b0
// b1, c1 = d1, b1
// before [a,b,c,d]
(v1,v2)=(v3,v1);
// after [a,d,b]
}else{
v2=v3;
}
}
// local up = u:Cross(p)
// local pv = p:Cross(v)
// local uv_up = uv:Dot(up)
// local uv_pv = uv:Dot(pv)
let mut up=u.cross(p);
let pv=p.cross(v);
let uv_up=uv.dot(up);
let uv_pv=uv.dot(pv);
// if uv_up >= 0 and uv_pv >= 0 then
if !uv_up.is_negative()&&!uv_pv.is_negative(){
// local direction = uvw < 0 and uv or -uv
// return direction, a0, a1, b0, b1, c0, c1
if uv_w.is_negative(){
return Reduce::Reduced(Reduced{
dir:narrow_dir2(uv),
simplex:Simplex1_3::Simplex3([v0,v1,v2]),
});
}else{
return Reduce::Reduced(Reduced{
dir:narrow_dir2(-uv),
simplex:Simplex1_3::Simplex3([v0,v1,v2]),
});
}
}
// local u_u = u:Dot(u)
// local v_v = v:Dot(v)
// local uDist = uv_up/(u_u*v.magnitude)
// local vDist = uv_pv/(v_v*u.magnitude)
// local minDist2 = math.min(uDist, vDist)
let u_dist=uv_up*v.length();
let v_dist=uv_pv*u.length();
// if vDist == minDist2 then
if v_dist<u_dist{
u=v;
up=-pv;
uv=-uv;
// b0 = c0
// b1 = c1
v1=v2;
}
// local p_u = p:Dot(u)
let p_u=p.dot(u);
// if p_u >= 0 then
if !p_u.is_negative(){
// local direction = up:Cross(u)
let direction=up.cross(u);
// if direction.magnitude == 0 then
if direction==vec3::zero(){
// direction = uvw < 0 and uv or -uv
// return direction, a0, a1, b0, b1
if uv_w.is_negative(){
return Reduce::Reduced(Reduced{
dir:narrow_dir2(uv),
simplex:Simplex1_3::Simplex2([v0,v1]),
});
}else{
return Reduce::Reduced(Reduced{
dir:narrow_dir2(-uv),
simplex:Simplex1_3::Simplex2([v0,v1]),
});
}
}
// return direction, a0, a1, b0, b1
return Reduce::Reduced(Reduced{
dir:narrow_dir3(direction),
simplex:Simplex1_3::Simplex2([v0,v1]),
});
}
// local direction = p
let dir=p;
// if direction.magnitude == 0 then
if dir==vec3::zero(){
// direction = uvw < 0 and uv or -uv
if uv_w.is_negative(){
return Reduce::Reduced(Reduced{
dir:narrow_dir2(uv),
simplex:Simplex1_3::Simplex1([v0]),
});
}else{
return Reduce::Reduced(Reduced{
dir:narrow_dir2(-uv),
simplex:Simplex1_3::Simplex1([v0]),
});
}
}
// return direction, a0, a1
Reduce::Reduced(Reduced{
dir,
simplex:Simplex1_3::Simplex1([v0]),
})
}
struct Reduced<Vert>{
dir:Planar64Vec3,
simplex:Simplex1_3<Vert>,
}
enum Reduce<Vert>{
Escape(Simplex<4,Vert>),
Reduced(Reduced<Vert>),
}
impl<Vert> Simplex2_4<Vert>{
fn reduce<M:MeshQuery<Vert=Vert>>(self,mesh:&M,point:Planar64Vec3)->Reduce<Vert>{
match self{
Self::Simplex2(simplex)=>Reduce::Reduced(reduce2(simplex,mesh,point)),
Self::Simplex3(simplex)=>Reduce::Reduced(reduce3(simplex,mesh,point)),
Self::Simplex4(simplex)=>reduce4(simplex,mesh,point),
}
}
}
pub fn contains_point(mesh:&MinkowskiMesh<'_>,point:Planar64Vec3)->bool{
const ENABLE_FAST_FAIL:bool=true;
// TODO: remove mesh negation
minimum_difference::<ENABLE_FAST_FAIL,_,_>(&-mesh,point,
// on_exact
|is_intersecting,_simplex|{
is_intersecting
},
// on_escape
|_simplex|{
// intersection is guaranteed at this point
true
},
// fast_fail value
||false
)
}
//infinity fev algorithm state transition
#[derive(Debug)]
enum Transition<Vert>{
Done,//found closest vert, no edges are better
Vert(Vert),//transition to vert
}
enum EV<M:MeshQuery>{
Vert(M::Vert),
Edge(<M::Edge as DirectedEdge>::UndirectedEdge),
}
impl<M:MeshQuery> From<EV<M>> for FEV<M>{
fn from(value:EV<M>)->Self{
match value{
EV::Vert(minkowski_vert)=>FEV::Vert(minkowski_vert),
EV::Edge(minkowski_edge)=>FEV::Edge(minkowski_edge),
}
}
}
trait Contains{
fn contains(&self,point:Planar64Vec3)->bool;
}
// convenience type to check if a point is within some threshold of a plane.
struct ThickPlane{
point:Planar64Vec3,
normal:Vector3<Fixed<2,64>>,
epsilon:Fixed<3,96>,
}
impl ThickPlane{
fn new<M:MeshQuery>(mesh:&M,[v0,v1,v2]:Simplex<3,M::Vert>)->Self{
let p0=mesh.vert(v0);
let p1=mesh.vert(v1);
let p2=mesh.vert(v2);
let point=p0;
let normal=(p1-p0).cross(p2-p0);
// Allow ~ 2*sqrt(3) units of thickness on the plane
// This is to account for the variance of two voxels across the longest diagonal
let epsilon=(normal.length()*(Planar64::EPSILON*3)).wrap_3();
Self{point,normal,epsilon}
}
}
impl Contains for ThickPlane{
fn contains(&self,point:Planar64Vec3)->bool{
(point-self.point).dot(self.normal).abs()<=self.epsilon
}
}
struct ThickLine{
point:Planar64Vec3,
dir:Planar64Vec3,
epsilon:Fixed<4,128>,
}
impl ThickLine{
fn new<M:MeshQuery>(mesh:&M,[v0,v1]:Simplex<2,M::Vert>)->Self{
let p0=mesh.vert(v0);
let p1=mesh.vert(v1);
let point=p0;
let dir=p1-p0;
// Allow ~ 2*sqrt(3) units of thickness on the plane
// This is to account for the variance of two voxels across the longest diagonal
let epsilon=(dir.length_squared()*(Planar64::EPSILON*3)).widen_4();
Self{point,dir,epsilon}
}
}
impl Contains for ThickLine{
fn contains(&self,point:Planar64Vec3)->bool{
(point-self.point).cross(self.dir).length_squared()<=self.epsilon
}
}
struct EVFinder<'a,M,C>{
mesh:&'a M,
constraint:C,
best_distance_squared:Fixed<2,64>,
}
impl<M:MeshQuery,C:Contains> EVFinder<'_,M,C>{
fn next_transition_vert(&mut self,vert_id:M::Vert,point:Planar64Vec3)->Transition<M::Vert>{
let mut best_transition=Transition::Done;
for &directed_edge_id in self.mesh.vert_edges(vert_id).as_ref(){
//test if this edge's opposite vertex closer
let edge_verts=self.mesh.edge_verts(directed_edge_id.as_undirected());
//select opposite vertex
let test_vert_id=edge_verts.as_ref()[directed_edge_id.parity() as usize];
let test_pos=self.mesh.vert(test_vert_id);
let diff=point-test_pos;
let distance_squared=diff.dot(diff);
// ensure test_vert_id is coplanar to simplex
if distance_squared<self.best_distance_squared&&self.constraint.contains(test_pos){
best_transition=Transition::Vert(test_vert_id);
self.best_distance_squared=distance_squared;
}
}
best_transition
}
fn final_ev(&mut self,vert_id:M::Vert,point:Planar64Vec3)->EV<M>{
let mut best_transition=EV::Vert(vert_id);
let vert_pos=self.mesh.vert(vert_id);
let diff=point-vert_pos;
for &directed_edge_id in self.mesh.vert_edges(vert_id).as_ref(){
//test if this edge is closer
let edge_verts=self.mesh.edge_verts(directed_edge_id.as_undirected());
let test_vert_id=edge_verts.as_ref()[directed_edge_id.parity() as usize];
let test_pos=self.mesh.vert(test_vert_id);
let edge_n=test_pos-vert_pos;
let d=edge_n.dot(diff);
//test the edge
let edge_nn=edge_n.dot(edge_n);
// ensure edge contains closest point and directed_edge_id is coplanar to simplex
if !d.is_negative()&&d<=edge_nn&&self.constraint.contains(test_pos){
let distance_squared={
let c=diff.cross(edge_n);
//wrap for speed
(c.dot(c)/edge_nn).divide().wrap_2()
};
if distance_squared<=self.best_distance_squared{
best_transition=EV::Edge(directed_edge_id.as_undirected());
self.best_distance_squared=distance_squared;
}
}
}
best_transition
}
fn crawl_boundaries(&mut self,mut vert_id:M::Vert,point:Planar64Vec3)->EV<M>{
loop{
match self.next_transition_vert(vert_id,point){
Transition::Done=>return self.final_ev(vert_id,point),
Transition::Vert(new_vert_id)=>vert_id=new_vert_id,
}
}
}
}
/// This function drops a vertex down to an edge or a face if the path from infinity did not cross any vertex-edge boundaries but the point is supposed to have already crossed a boundary down from a vertex
fn crawl_to_closest_ev<M:MeshQuery>(mesh:&M,simplex:Simplex<2,M::Vert>,point:Planar64Vec3)->EV<M>{
// naively start at the closest vertex
// the closest vertex is not necessarily the one with the fewest boundary hops
// but it doesn't matter, we will get there regardless.
let (vert_id,best_distance_squared)=simplex.into_iter().map(|vert_id|{
let diff=point-mesh.vert(vert_id);
(vert_id,diff.dot(diff))
}).min_by_key(|&(_,d)|d).unwrap();
let constraint=ThickLine::new(mesh,simplex);
let mut finder=EVFinder{constraint,mesh,best_distance_squared};
//start on any vertex
//cross uncrossable vertex-edge boundaries until you find the closest vertex or edge
//cross edge-face boundary if it's uncrossable
finder.crawl_boundaries(vert_id,point)
}
/// This function drops a vertex down to an edge or a face if the path from infinity did not cross any vertex-edge boundaries but the point is supposed to have already crossed a boundary down from a vertex
fn crawl_to_closest_fev<'a>(mesh:&MinkowskiMesh<'a>,simplex:Simplex<3,MinkowskiVert>,point:Planar64Vec3)->FEV::<MinkowskiMesh<'a>>{
// naively start at the closest vertex
// the closest vertex is not necessarily the one with the fewest boundary hops
// but it doesn't matter, we will get there regardless.
let (vert_id,best_distance_squared)=simplex.into_iter().map(|vert_id|{
let diff=point-mesh.vert(vert_id);
(vert_id,diff.dot(diff))
}).min_by_key(|&(_,d)|d).unwrap();
let constraint=ThickPlane::new(mesh,simplex);
let mut finder=EVFinder{constraint,mesh,best_distance_squared};
//start on any vertex
//cross uncrossable vertex-edge boundaries until you find the closest vertex or edge
//cross edge-face boundary if it's uncrossable
match finder.crawl_boundaries(vert_id,point){
//if a vert is returned, it is the closest point to the infinity point
EV::Vert(vert_id)=>FEV::Vert(vert_id),
EV::Edge(edge_id)=>{
//cross to face if we are on the wrong side
let edge_n=mesh.edge_n(edge_id);
// point is multiplied by two because vert_sum sums two vertices.
let delta_pos=point*2-{
let &[v0,v1]=mesh.edge_verts(edge_id).as_ref();
mesh.vert(v0)+mesh.vert(v1)
};
for (i,&face_id) in mesh.edge_faces(edge_id).as_ref().iter().enumerate(){
//test if this face is closer
let (face_n,d)=mesh.face_nd(face_id);
//if test point is behind face, the face is invalid
// TODO: find out why I thought of this backwards
if !(face_n.dot(point)-d).is_positive(){
continue;
}
//edge-face boundary nd, n facing out of the face towards the edge
let boundary_n=face_n.cross(edge_n)*(i as i64*2-1);
let boundary_d=boundary_n.dot(delta_pos);
//is test point behind edge, i.e. contained in the face
if !boundary_d.is_positive(){
//both faces cannot pass this condition, return early if one does.
return FEV::Face(face_id);
}
}
FEV::Edge(edge_id)
},
}
}
pub fn closest_fev_not_inside<'a>(mesh:&MinkowskiMesh<'a>,point:Planar64Vec3)->Option<FEV<MinkowskiMesh<'a>>>{
const ENABLE_FAST_FAIL:bool=false;
// TODO: remove mesh negation
minimum_difference::<ENABLE_FAST_FAIL,_,_>(&-mesh,point,
// on_exact
|is_intersecting,simplex|{
if is_intersecting{
return None;
}
// Convert simplex to FEV
// Vertices must be inverted since the mesh is inverted
Some(match simplex{
Simplex1_3::Simplex1([v0])=>FEV::Vert(-v0),
Simplex1_3::Simplex2([v0,v1])=>{
// invert
let (v0,v1)=(-v0,-v1);
let ev=crawl_to_closest_ev(mesh,[v0,v1],point);
if !matches!(ev,EV::Edge(_)){
println!("I can't believe it's not an edge!");
}
ev.into()
},
Simplex1_3::Simplex3([v0,v1,v2])=>{
// invert
let (v0,v1,v2)=(-v0,-v1,-v2);
// Shimmy to the side until you find a face that contains the closest point
// it's ALWAYS representable as a face, but this algorithm may
// return E or V in edge cases but I don't think that will break the face crawler
let fev=crawl_to_closest_fev(mesh,[v0,v1,v2],point);
if !matches!(fev,FEV::Face(_)){
println!("I can't believe it's not a face!");
}
fev
},
})
},
// on_escape
|_simplex|{
// intersection is guaranteed at this point
// local norm, dist, u0, u1, v0, v1, w0, w1 = expand(queryP, queryQ, a0, a1, b0, b1, c0, c1, d0, d1, 1e-5)
// let simplex=refine_to_exact(mesh,simplex);
None
},
// fast_fail value is irrelevant and will never be returned!
||unreachable!()
)
}
// local function minimumDifference(
// queryP, radiusP,
// queryQ, radiusQ,
// exitRadius, testIntersection
// )
fn minimum_difference<const ENABLE_FAST_FAIL:bool,T,M:MeshQuery>(
mesh:&M,
point:Planar64Vec3,
on_exact:impl FnOnce(bool,Simplex1_3<M::Vert>)->T,
on_escape:impl FnOnce(Simplex<4,M::Vert>)->T,
on_fast_fail:impl FnOnce()->T,
)->T{
// local initialAxis = queryQ() - queryP()
// local new_point_p = queryP(initialAxis)
// local new_point_q = queryQ(-initialAxis)
// local direction, a0, a1, b0, b1, c0, c1, d0, d1
let mut initial_axis=mesh.hint_point()+point;
// degenerate case
if initial_axis==vec3::zero(){
initial_axis=choose_any_direction();
}
let last_point=mesh.farthest_vert(-initial_axis);
// this represents the 'a' value in the commented code
let mut last_pos=mesh.vert(last_point);
let Reduced{dir:mut direction,simplex:mut simplex_small}=reduce1([last_point],mesh,point);
// exitRadius = testIntersection and 0 or exitRadius or 1/0
// for _ = 1, 100 do
loop{
// new_point_p = queryP(-direction)
// new_point_q = queryQ(direction)
// local next_point = new_point_q - new_point_p
let next_point=mesh.farthest_vert(direction);
let next_pos=mesh.vert(next_point);
// if -direction:Dot(next_point) > (exitRadius + radiusP + radiusQ)*direction.magnitude then
if ENABLE_FAST_FAIL&&direction.dot(next_pos+point).is_negative(){
return on_fast_fail();
}
let simplex_big=simplex_small.push_front(next_point);
// if
// direction:Dot(next_point - a) <= 0 or
// absDet(next_point, a, b, c) < 1e-6
if !direction.dot(next_pos-last_pos).is_positive()
||simplex_big.det_is_zero(mesh){
// Found enough information to compute the exact closest point.
// local norm = direction.unit
// local dist = a:Dot(norm)
// local hits = -dist < radiusP + radiusQ
let is_intersecting=(last_pos+point).dot(direction).is_positive();
return on_exact(is_intersecting,simplex_small);
}
// direction, a0, a1, b0, b1, c0, c1, d0, d1 = reduceSimplex(new_point_p, new_point_q, a0, a1, b0, b1, c0, c1)
match simplex_big.reduce(mesh,point){
// if a and b and c and d then
Reduce::Escape(simplex)=>{
// Enough information to conclude that the meshes are intersecting.
// Topology information is computed if needed.
return on_escape(simplex);
},
Reduce::Reduced(reduced)=>{
direction=reduced.dir;
simplex_small=reduced.simplex;
},
}
// next loop this will be a
last_pos=next_pos;
}
}
#[cfg(test)]
mod test{
use super::*;
use crate::model::{PhysicsMesh,PhysicsMeshView};
fn mesh_contains_point(mesh:PhysicsMeshView<'_>,point:Planar64Vec3)->bool{
const ENABLE_FAST_FAIL:bool=true;
// TODO: remove mesh negation
minimum_difference::<ENABLE_FAST_FAIL,_,_>(&mesh,point,
// on_exact
|is_intersecting,_simplex|{
is_intersecting
},
// on_escape
|_simplex|{
// intersection is guaranteed at this point
true
},
// fast_fail value
||false
)
}
#[test]
fn test_cube_points(){
let mesh=PhysicsMesh::unit_cube();
let mesh_view=mesh.complete_mesh_view();
for x in -2..=2{
for y in -2..=2{
for z in -2..=2{
let point=vec3::int(x,y,z)>>1;
assert!(mesh_contains_point(mesh_view,point),"Mesh did not contain point {point}");
}
}
}
}
}

View File

@@ -1,5 +1,5 @@
use std::collections::{HashSet,HashMap};
use core::ops::{Bound,RangeBounds};
use core::ops::Range;
use strafesnet_common::integer::vec3::Vector3;
use strafesnet_common::model::{self,MeshId,PolygonIter};
use strafesnet_common::integer::{self,vec3,Fixed,Planar64,Planar64Vec3,Ratio};
@@ -60,7 +60,7 @@ impl DirectedEdge for SubmeshDirectedEdgeId{
}
//Vertex <-> Edge <-> Face -> Collide
#[derive(Debug,Clone)]
#[derive(Debug)]
pub enum FEV<M:MeshQuery>{
Face(M::Face),
Edge(<M::Edge as DirectedEdge>::UndirectedEdge),
@@ -68,12 +68,11 @@ pub enum FEV<M:MeshQuery>{
}
//use Unit32 #[repr(C)] for map files
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
#[derive(Clone,Hash,Eq,PartialEq)]
struct Face{
normal:Planar64Vec3,
dot:Planar64,
}
#[derive(Debug)]
struct Vert(Planar64Vec3);
pub trait MeshQuery{
type Face:Copy;
@@ -90,10 +89,6 @@ pub trait MeshQuery{
let &[v0,v1]=self.edge_verts(directed_edge_id.as_undirected()).as_ref();
(self.vert(v1)-self.vert(v0))*((directed_edge_id.parity() as i64)*2-1)
}
/// This must return a point inside the mesh.
#[expect(dead_code)]
fn hint_point(&self)->Planar64Vec3;
fn farthest_vert(&self,dir:Planar64Vec3)->Self::Vert;
fn vert(&self,vert_id:Self::Vert)->Planar64Vec3;
fn face_nd(&self,face_id:Self::Face)->(Self::Normal,Self::Offset);
fn face_edges(&self,face_id:Self::Face)->impl AsRef<[Self::Edge]>;
@@ -102,26 +97,18 @@ pub trait MeshQuery{
fn vert_edges(&self,vert_id:Self::Vert)->impl AsRef<[Self::Edge]>;
fn vert_faces(&self,vert_id:Self::Vert)->impl AsRef<[Self::Face]>;
}
#[derive(Debug)]
struct FaceRefs{
// I didn't write it down, but I assume the edges are directed
// clockwise when looking towards the face normal, i.e. right hand rule.
edges:Vec<SubmeshDirectedEdgeId>,
//verts are redundant, use edge[i].verts[0]
//verts:Vec<VertId>,
}
#[derive(Debug)]
struct EdgeRefs{
faces:[SubmeshFaceId;2],//left, right
verts:[SubmeshVertId;2],//start, end
verts:[SubmeshVertId;2],//bottom, top
}
#[derive(Debug)]
struct VertRefs{
faces:Vec<SubmeshFaceId>,
// edges are always directed away from the vert
edges:Vec<SubmeshDirectedEdgeId>,
}
#[derive(Debug)]
pub struct PhysicsMeshData{
//this contains all real and virtual faces used in both the complete mesh and convex submeshes
//faces are sorted such that all faces that belong to the complete mesh appear first, and then
@@ -131,7 +118,6 @@ pub struct PhysicsMeshData{
faces:Vec<Face>,//MeshFaceId indexes this list
verts:Vec<Vert>,//MeshVertId indexes this list
}
#[derive(Debug)]
pub struct PhysicsMeshTopology{
//mapping of local ids to PhysicsMeshData ids
faces:Vec<MeshFaceId>,//SubmeshFaceId indexes this list
@@ -157,12 +143,10 @@ impl From<MeshId> for PhysicsMeshId{
pub struct PhysicsSubmeshId(u32);
pub struct PhysicsMesh{
data:PhysicsMeshData,
// The complete mesh is unused at this time.
// complete_mesh:PhysicsMeshTopology,
// Submeshes are guaranteed to be convex and may contain
// "virtual" faces which are not part of the complete mesh.
// Physics calculations should never resolve to hitting
// a virtual face.
complete_mesh:PhysicsMeshTopology,
//Most objects in roblox maps are already convex, so the list length is 0
//as soon as the mesh is divided into 2 submeshes, the list length jumps to 2.
//length 1 is unnecessary since the complete mesh would be a duplicate of the only submesh, but would still function properly
submeshes:Vec<PhysicsMeshTopology>,
}
impl PhysicsMesh{
@@ -226,24 +210,19 @@ impl PhysicsMesh{
};
Self{
data,
// complete_mesh:mesh_topology.clone(),
submeshes:vec![mesh_topology],
complete_mesh:mesh_topology,
submeshes:Vec::new(),
}
}
pub fn unit_cylinder()->Self{
Self::unit_cube()
}
#[inline]
pub fn complete_mesh(&self)->&PhysicsMeshTopology{
// If there is exactly one submesh, then the complete mesh is identical to it.
if self.submeshes.len()==1{
self.submeshes.first().unwrap()
}else{
panic!("PhysicsMesh complete mesh is not known");
}
pub const fn complete_mesh(&self)->&PhysicsMeshTopology{
&self.complete_mesh
}
#[inline]
pub fn complete_mesh_view(&self)->PhysicsMeshView<'_>{
pub const fn complete_mesh_view(&self)->PhysicsMeshView{
PhysicsMeshView{
data:&self.data,
topology:self.complete_mesh(),
@@ -251,16 +230,21 @@ impl PhysicsMesh{
}
#[inline]
pub fn submeshes(&self)->&[PhysicsMeshTopology]{
&self.submeshes
//the complete mesh is already a convex mesh when len()==0, len()==1 is invalid but will still work
if self.submeshes.len()==0{
std::slice::from_ref(&self.complete_mesh)
}else{
&self.submeshes.as_slice()
}
}
#[inline]
pub fn submesh_view(&self,submesh_id:PhysicsSubmeshId)->PhysicsMeshView<'_>{
pub fn submesh_view(&self,submesh_id:PhysicsSubmeshId)->PhysicsMeshView{
PhysicsMeshView{
data:&self.data,
topology:&self.submeshes()[submesh_id.get() as usize],
}
}
pub fn submesh_views(&self)->impl Iterator<Item=PhysicsMeshView<'_>>{
pub fn submesh_views(&self)->impl Iterator<Item=PhysicsMeshView>{
self.submeshes().iter().map(|topology|PhysicsMeshView{
data:&self.data,
topology,
@@ -329,20 +313,17 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
if mesh.unique_pos.len()==0{
return Err(PhysicsMeshError::ZeroVertices);
}
// An empty physics mesh is a waste of resources
if mesh.physics_groups.len()==0{
return Err(PhysicsMeshError::NoPhysicsGroups);
}
let verts=mesh.unique_pos.iter().copied().map(Vert).collect();
// TODO: do not hash faces to get face id
// meshes can have multiple identical nd representations while still being distinct faces,
// especially when the complete mesh is a non-convex mesh.
//TODO: fix submeshes
//flat map mesh.physics_groups[$1].groups.polys()[$2] as face_id
//lower face_id points to upper face_id
//the same face is not allowed to be in multiple polygon groups
// because SubmeshFaceId -> CompleteMeshFaceId -> SubmeshFaceId is ambiguous
// when multiple SubmeshFaceId point to one MeshFaceId
let mut faces=Vec::new();
let mut face_id_from_face=HashMap::new();
let mesh_topologies:Vec<PhysicsMeshTopology>=mesh.physics_groups.iter().map(|physics_group|{
let mut mesh_topologies:Vec<PhysicsMeshTopology>=mesh.physics_groups.iter().map(|physics_group|{
//construct submesh
let mut submesh_faces=Vec::new();//these contain a map from submeshId->meshId
let mut submesh_verts=Vec::new();
@@ -403,11 +384,15 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
normal:(normal/len as i64).divide().narrow_1().unwrap(),
dot:(dot/(len*len) as i64).narrow_1().unwrap(),
};
let face_id=*face_id_from_face.entry(face).or_insert_with(||{
let face_id=MeshFaceId::new(faces.len() as u32);
faces.push(face);
face_id
});
let face_id=match face_id_from_face.get(&face){
Some(&face_id)=>face_id,
None=>{
let face_id=MeshFaceId::new(faces.len() as u32);
face_id_from_face.insert(face.clone(),face_id);
faces.push(face);
face_id
}
};
submesh_faces.push(face_id);
face_ref_guys.push(FaceRefEdges(face_edges));
}
@@ -415,16 +400,16 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
PhysicsMeshTopology{
faces:submesh_faces,
verts:submesh_verts,
face_topology:face_ref_guys.into_iter().map(|FaceRefEdges(edges)|{
FaceRefs{edges}
face_topology:face_ref_guys.into_iter().map(|face_ref_guy|{
FaceRefs{edges:face_ref_guy.0}
}).collect(),
edge_topology:edge_pool.edge_guys.into_iter().map(|(EdgeRefVerts(verts),EdgeRefFaces(faces))|
EdgeRefs{faces,verts}
edge_topology:edge_pool.edge_guys.into_iter().map(|(edge_ref_verts,edge_ref_faces)|
EdgeRefs{faces:edge_ref_faces.0,verts:edge_ref_verts.0}
).collect(),
vert_topology:vert_ref_guys.into_iter().map(|VertRefGuy{edges,faces}|
vert_topology:vert_ref_guys.into_iter().map(|vert_ref_guy|
VertRefs{
edges:edges.into_iter().collect(),
faces:faces.into_iter().collect(),
edges:vert_ref_guy.edges.into_iter().collect(),
faces:vert_ref_guy.faces.into_iter().collect(),
}
).collect(),
}
@@ -434,35 +419,16 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
faces,
verts,
},
// complete_mesh:None,
complete_mesh:mesh_topologies.pop().ok_or(PhysicsMeshError::NoPhysicsGroups)?,
submeshes:mesh_topologies,
})
}
}
#[derive(Debug,Clone,Copy)]
pub struct PhysicsMeshView<'a>{
data:&'a PhysicsMeshData,
topology:&'a PhysicsMeshTopology,
}
impl PhysicsMeshView<'_>{
pub fn verts(&self)->&[MeshVertId]{
&self.topology.verts
}
pub fn edge_vert_ids_iter(&self)->impl Iterator<Item=[MeshVertId;2]>+'_{
self.topology.edge_topology.iter().map(|edge|{
edge.verts.map(|vert_id|self.topology.verts[vert_id.get() as usize])
})
}
pub fn face_vert_ids_iter(&self)->impl Iterator<Item=impl Iterator<Item=MeshVertId>>+'_{
self.topology.face_topology.iter().map(|face|{
face.edges.iter().map(|edge_id|{
let vert_id=self.topology.edge_topology[edge_id.as_undirected().get() as usize].verts[edge_id.parity() as usize];
self.topology.verts[vert_id.get() as usize]
})
})
}
}
impl MeshQuery for PhysicsMeshView<'_>{
type Face=SubmeshFaceId;
type Edge=SubmeshDirectedEdgeId;
@@ -473,22 +439,6 @@ impl MeshQuery for PhysicsMeshView<'_>{
let face_idx=self.topology.faces[face_id.get() as usize].get() as usize;
(self.data.faces[face_idx].normal,self.data.faces[face_idx].dot)
}
fn hint_point(&self)->Planar64Vec3{
// invariant: meshes always encompass the origin
vec3::zero()
}
fn farthest_vert(&self,dir:Planar64Vec3)->SubmeshVertId{
//this happens to be well-defined. there are no virtual virtices
SubmeshVertId::new(
self.topology.verts.iter()
.enumerate()
.max_by_key(|&(_,&vert_id)|
dir.dot(self.data.verts[vert_id.get() as usize].0)
)
//assume there is more than zero vertices.
.unwrap().0 as u32
)
}
//ideally I never calculate the vertex position, but I have to for the graphical meshes...
fn vert(&self,vert_id:SubmeshVertId)->Planar64Vec3{
let vert_idx=self.topology.verts[vert_id.get() as usize].get() as usize;
@@ -511,7 +461,6 @@ impl MeshQuery for PhysicsMeshView<'_>{
}
}
#[derive(Debug)]
pub struct PhysicsMeshTransform{
pub vertex:integer::Planar64Affine3,
pub normal:integer::mat3::Matrix3<Fixed<2,64>>,
@@ -527,7 +476,6 @@ impl PhysicsMeshTransform{
}
}
#[derive(Debug,Clone,Copy)]
pub struct TransformedMesh<'a>{
view:PhysicsMeshView<'a>,
transform:&'a PhysicsMeshTransform,
@@ -542,12 +490,24 @@ impl TransformedMesh<'_>{
transform,
}
}
pub fn verts<'a>(&'a self)->impl Iterator<Item=Vector3<Fixed<2,64>>>+'a{
pub fn verts<'a>(&'a self)->impl Iterator<Item=vec3::Vector3<Fixed<2,64>>>+'a{
self.view.data.verts.iter().map(|&Vert(pos)|self.transform.vertex.transform_point3(pos))
}
pub fn faces(&self)->impl Iterator<Item=SubmeshFaceId>{
(0..self.view.topology.faces.len() as u32).map(SubmeshFaceId::new)
}
fn farthest_vert(&self,dir:Planar64Vec3)->SubmeshVertId{
//this happens to be well-defined. there are no virtual virtices
SubmeshVertId::new(
self.view.topology.verts.iter()
.enumerate()
.max_by_key(|&(_,&vert_id)|
dir.dot(self.transform.vertex.transform_point3(self.view.data.verts[vert_id.get() as usize].0))
)
//assume there is more than zero vertices.
.unwrap().0 as u32
)
}
}
impl MeshQuery for TransformedMesh<'_>{
type Face=SubmeshFaceId;
@@ -565,21 +525,6 @@ impl MeshQuery for TransformedMesh<'_>{
// wrap for speed
self.transform.vertex.transform_point3(self.view.vert(vert_id)).wrap_1()
}
fn hint_point(&self)->Planar64Vec3{
self.transform.vertex.translation
}
fn farthest_vert(&self,dir:Planar64Vec3)->SubmeshVertId{
//this happens to be well-defined. there are no virtual virtices
SubmeshVertId::new(
self.view.topology.verts.iter()
.enumerate()
.max_by_key(|&(_,&vert_id)|
dir.dot(self.transform.vertex.transform_point3(self.view.data.verts[vert_id.get() as usize].0))
)
//assume there is more than zero vertices.
.unwrap().0 as u32
)
}
#[inline]
fn face_edges(&self,face_id:SubmeshFaceId)->impl AsRef<[SubmeshDirectedEdgeId]>{
self.view.face_edges(face_id)
@@ -606,20 +551,11 @@ impl MeshQuery for TransformedMesh<'_>{
//(face,vertex)
//(edge,edge)
//(vertex,face)
#[derive(Clone,Copy,Debug,Eq,PartialEq)]
#[derive(Clone,Copy,Debug)]
pub enum MinkowskiVert{
VertVert(SubmeshVertId,SubmeshVertId),
}
// TODO: remove this
impl core::ops::Neg for MinkowskiVert{
type Output=Self;
fn neg(self)->Self::Output{
match self{
MinkowskiVert::VertVert(v0,v1)=>MinkowskiVert::VertVert(v1,v0),
}
}
}
#[derive(Clone,Copy,Debug,Eq,PartialEq)]
#[derive(Clone,Copy,Debug)]
pub enum MinkowskiEdge{
VertEdge(SubmeshVertId,SubmeshEdgeId),
EdgeVert(SubmeshEdgeId,SubmeshVertId),
@@ -634,7 +570,7 @@ impl UndirectedEdge for MinkowskiEdge{
}
}
}
#[derive(Clone,Copy,Debug,Eq,PartialEq)]
#[derive(Clone,Copy,Debug)]
pub enum MinkowskiDirectedEdge{
VertEdge(SubmeshVertId,SubmeshDirectedEdgeId),
EdgeVert(SubmeshDirectedEdgeId,SubmeshVertId),
@@ -655,7 +591,7 @@ impl DirectedEdge for MinkowskiDirectedEdge{
}
}
}
#[derive(Clone,Copy,Debug,Hash)]
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
pub enum MinkowskiFace{
VertFace(SubmeshVertId,SubmeshFaceId),
EdgeEdge(SubmeshEdgeId,SubmeshEdgeId,bool),
@@ -665,25 +601,23 @@ pub enum MinkowskiFace{
//FaceFace
}
#[derive(Debug)]
pub struct MinkowskiMesh<'a>{
mesh0:TransformedMesh<'a>,
mesh1:TransformedMesh<'a>,
}
pub type GigaTime=Ratio<Fixed<4,128>,Fixed<4,128>>;
pub fn into_giga_time(time:Time,relative_to:Time)->GigaTime{
let r=(time-relative_to).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
//infinity fev algorithm state transition
#[derive(Debug)]
enum Transition{
Done,//found closest vert, no edges are better
Vert(MinkowskiVert),//transition to vert
}
enum EV{
Vert(MinkowskiVert),
Edge(MinkowskiEdge),
}
// TODO: remove this
impl<'a> core::ops::Neg for &MinkowskiMesh<'a>{
type Output=MinkowskiMesh<'a>;
fn neg(self)->Self::Output{
MinkowskiMesh::minkowski_sum(self.mesh1,self.mesh0)
}
}
pub type GigaTime=Ratio<Fixed<4,128>,Fixed<4,128>>;
impl MinkowskiMesh<'_>{
pub fn minkowski_sum<'a>(mesh0:TransformedMesh<'a>,mesh1:TransformedMesh<'a>)->MinkowskiMesh<'a>{
@@ -692,35 +626,151 @@ impl MinkowskiMesh<'_>{
mesh1,
}
}
pub fn predict_collision_in(&self,relative_body:&Body,range:impl RangeBounds<Time>)->Option<(MinkowskiFace,GigaTime)>{
let fev=crate::minimum_difference::closest_fev_not_inside(self,relative_body.position)?;
//continue forwards along the body parabola
fev.crawl(self,relative_body,range.start_bound(),range.end_bound()).hit()
fn farthest_vert(&self,dir:Planar64Vec3)->MinkowskiVert{
MinkowskiVert::VertVert(self.mesh0.farthest_vert(dir),self.mesh1.farthest_vert(-dir))
}
pub fn predict_collision_out(&self,relative_body:&Body,range:impl RangeBounds<Time>)->Option<(MinkowskiFace,GigaTime)>{
let (lower_bound,upper_bound)=(range.start_bound(),range.end_bound());
// TODO: handle unbounded collision using infinity fev
let time=match upper_bound{
Bound::Included(&time)=>time,
Bound::Excluded(&time)=>time,
Bound::Unbounded=>unimplemented!("unbounded collision out"),
fn next_transition_vert(&self,vert_id:MinkowskiVert,best_distance_squared:&mut Fixed<2,64>,infinity_dir:Planar64Vec3,point:Planar64Vec3)->Transition{
let mut best_transition=Transition::Done;
for &directed_edge_id in self.vert_edges(vert_id).as_ref(){
let edge_n=self.directed_edge_n(directed_edge_id);
//is boundary uncrossable by a crawl from infinity
let edge_verts=self.edge_verts(directed_edge_id.as_undirected());
//select opposite vertex
let test_vert_id=edge_verts.as_ref()[directed_edge_id.parity() as usize];
//test if it's closer
let diff=point-self.vert(test_vert_id);
if edge_n.dot(infinity_dir).is_zero(){
let distance_squared=diff.dot(diff);
if distance_squared<*best_distance_squared{
best_transition=Transition::Vert(test_vert_id);
*best_distance_squared=distance_squared;
}
}
}
best_transition
}
fn final_ev(&self,vert_id:MinkowskiVert,best_distance_squared:&mut Fixed<2,64>,infinity_dir:Planar64Vec3,point:Planar64Vec3)->EV{
let mut best_transition=EV::Vert(vert_id);
let diff=point-self.vert(vert_id);
for &directed_edge_id in self.vert_edges(vert_id).as_ref(){
let edge_n=self.directed_edge_n(directed_edge_id);
//is boundary uncrossable by a crawl from infinity
//check if time of collision is outside Time::MIN..Time::MAX
if edge_n.dot(infinity_dir).is_zero(){
let d=edge_n.dot(diff);
//test the edge
let edge_nn=edge_n.dot(edge_n);
if !d.is_negative()&&d<=edge_nn{
let distance_squared={
let c=diff.cross(edge_n);
//wrap for speed
(c.dot(c)/edge_nn).divide().wrap_2()
};
if distance_squared<=*best_distance_squared{
best_transition=EV::Edge(directed_edge_id.as_undirected());
*best_distance_squared=distance_squared;
}
}
}
}
best_transition
}
fn crawl_boundaries(&self,mut vert_id:MinkowskiVert,infinity_dir:Planar64Vec3,point:Planar64Vec3)->EV{
let mut best_distance_squared={
let diff=point-self.vert(vert_id);
diff.dot(diff)
};
let fev=crate::minimum_difference::closest_fev_not_inside(self,relative_body.extrapolated_position(time))?;
// swap and negate bounds to do a time inversion
let (lower_bound,upper_bound)=(upper_bound.map(|&t|-t),lower_bound.map(|&t|-t));
let infinity_body=-relative_body;
//continue backwards along the body parabola
fev.crawl(self,&infinity_body,lower_bound.as_ref(),upper_bound.as_ref()).hit()
//no need to test -time<time_limit because of the first step
.map(|(face,time)|(face,-time))
loop{
match self.next_transition_vert(vert_id,&mut best_distance_squared,infinity_dir,point){
Transition::Done=>return self.final_ev(vert_id,&mut best_distance_squared,infinity_dir,point),
Transition::Vert(new_vert_id)=>vert_id=new_vert_id,
}
}
}
pub fn predict_collision_face_out(&self,relative_body:&Body,range:impl RangeBounds<Time>,contact_face_id:MinkowskiFace)->Option<(MinkowskiDirectedEdge,GigaTime)>{
// TODO: make better
use crate::face_crawler::{low,upp};
/// This function drops a vertex down to an edge or a face if the path from infinity did not cross any vertex-edge boundaries but the point is supposed to have already crossed a boundary down from a vertex
fn infinity_fev(&self,infinity_dir:Planar64Vec3,point:Planar64Vec3)->FEV::<MinkowskiMesh>{
//start on any vertex
//cross uncrossable vertex-edge boundaries until you find the closest vertex or edge
//cross edge-face boundary if it's uncrossable
match self.crawl_boundaries(self.farthest_vert(infinity_dir),infinity_dir,point){
//if a vert is returned, it is the closest point to the infinity point
EV::Vert(vert_id)=>FEV::Vert(vert_id),
EV::Edge(edge_id)=>{
//cross to face if the boundary is not crossable and we are on the wrong side
let edge_n=self.edge_n(edge_id);
// point is multiplied by two because vert_sum sums two vertices.
let delta_pos=point*2-{
let &[v0,v1]=self.edge_verts(edge_id).as_ref();
self.vert(v0)+self.vert(v1)
};
for (i,&face_id) in self.edge_faces(edge_id).as_ref().iter().enumerate(){
let face_n=self.face_nd(face_id).0;
//edge-face boundary nd, n facing out of the face towards the edge
let boundary_n=face_n.cross(edge_n)*(i as i64*2-1);
let boundary_d=boundary_n.dot(delta_pos);
//check if time of collision is outside Time::MIN..Time::MAX
//infinity_dir can always be treated as a velocity
if !boundary_d.is_positive()&&boundary_n.dot(infinity_dir).is_zero(){
//both faces cannot pass this condition, return early if one does.
return FEV::Face(face_id);
}
}
FEV::Edge(edge_id)
},
}
}
// TODO: fundamentally improve this algorithm.
// All it needs to do is find the closest point on the mesh
// and return the FEV which the point resides on.
//
// What it actually does is use the above functions to trace a ray in from infinity,
// crawling the closest point along the mesh surface until the ray reaches
// the starting point to discover the final FEV.
//
// The actual collision prediction probably does a single test
// and then immediately returns with 0 FEV transitions on average,
// because of the strict time_limit constraint.
//
// Most of the calculation time is just calculating the starting point
// for the "actual" crawling algorithm below (predict_collision_{in|out}).
fn closest_fev_not_inside(&self,mut infinity_body:Body,start_time:Time)->Option<FEV<MinkowskiMesh>>{
infinity_body.infinity_dir().and_then(|dir|{
let infinity_fev=self.infinity_fev(-dir,infinity_body.position);
//a line is simpler to solve than a parabola
infinity_body.velocity=dir;
infinity_body.acceleration=vec3::ZERO;
//crawl in from negative infinity along a tangent line to get the closest fev
// TODO: change crawl_fev args to delta time? Optional values?
infinity_fev.crawl(self,&infinity_body,Time::MIN/4,start_time).miss()
})
}
pub fn predict_collision_in(&self,relative_body:&Body,Range{start:start_time,end:time_limit}:Range<Time>)->Option<(MinkowskiFace,GigaTime)>{
self.closest_fev_not_inside(relative_body.clone(),start_time).and_then(|fev|{
//continue forwards along the body parabola
fev.crawl(self,relative_body,start_time,time_limit).hit()
})
}
pub fn predict_collision_out(&self,relative_body:&Body,Range{start:start_time,end:time_limit}:Range<Time>)->Option<(MinkowskiFace,GigaTime)>{
//create an extrapolated body at time_limit
let infinity_body=-relative_body.clone();
self.closest_fev_not_inside(infinity_body,-time_limit).and_then(|fev|{
//continue backwards along the body parabola
fev.crawl(self,&infinity_body,-time_limit,-start_time).hit()
//no need to test -time<time_limit because of the first step
.map(|(face,time)|(face,-time))
})
}
pub fn predict_collision_face_out(&self,relative_body:&Body,Range{start:start_time,end:time_limit}:Range<Time>,contact_face_id:MinkowskiFace)->Option<(MinkowskiEdge,GigaTime)>{
//no algorithm needed, there is only one state and two cases (Edge,None)
//determine when it passes an edge ("sliding off" case)
let start_time=range.start_bound().map(|&t|(t-relative_body.time).to_ratio());
let mut best_time=range.end_bound().map(|&t|into_giga_time(t,relative_body.time));
let start_time={
let r=(start_time-relative_body.time).to_ratio();
Ratio::new(r.num,r.den)
};
let mut best_time={
let r=(time_limit-relative_body.time).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
};
let mut best_edge=None;
let face_n=self.face_nd(contact_face_id).0;
for &directed_edge_id in self.face_edges(contact_face_id).as_ref(){
@@ -733,17 +783,28 @@ impl MinkowskiMesh<'_>{
//WARNING: truncated precision
//wrap for speed
for dt in Fixed::<4,128>::zeroes2(((n.dot(relative_body.position))*2-d).wrap_4(),n.dot(relative_body.velocity).wrap_4()*2,n.dot(relative_body.acceleration).wrap_4()){
if low(&start_time,&dt)&&upp(&dt,&best_time)&&n.dot(relative_body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=Bound::Included(dt);
best_edge=Some((directed_edge_id,dt));
if start_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(relative_body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
best_edge=Some(directed_edge_id);
break;
}
}
}
best_edge
best_edge.map(|e|(e.as_undirected(),best_time))
}
pub fn contains_point(&self,point:Planar64Vec3)->bool{
crate::minimum_difference::contains_point(self,point)
fn infinity_in(&self,infinity_body:Body)->Option<(MinkowskiFace,GigaTime)>{
let infinity_fev=self.infinity_fev(-infinity_body.velocity,infinity_body.position);
infinity_fev.crawl(self,&infinity_body,Time::MIN/4,infinity_body.time).hit()
}
pub fn is_point_in_mesh(&self,point:Planar64Vec3)->bool{
let infinity_body=Body::new(point,vec3::Y,vec3::ZERO,Time::ZERO);
//movement must escape the mesh forwards and backwards in time,
//otherwise the point is not inside the mesh
self.infinity_in(infinity_body)
.is_some_and(|_|
self.infinity_in(-infinity_body)
.is_some()
)
}
}
impl MeshQuery for MinkowskiMesh<'_>{
@@ -782,12 +843,6 @@ impl MeshQuery for MinkowskiMesh<'_>{
},
}
}
fn hint_point(&self)->Planar64Vec3{
self.mesh0.transform.vertex.translation-self.mesh1.transform.vertex.translation
}
fn farthest_vert(&self,dir:Planar64Vec3)->MinkowskiVert{
MinkowskiVert::VertVert(self.mesh0.farthest_vert(dir),self.mesh1.farthest_vert(-dir))
}
fn face_edges(&self,face_id:MinkowskiFace)->impl AsRef<[MinkowskiDirectedEdge]>{
match face_id{
MinkowskiFace::VertFace(v0,f1)=>{
@@ -877,10 +932,10 @@ impl MeshQuery for MinkowskiMesh<'_>{
}
fn edge_verts(&self,edge_id:MinkowskiEdge)->impl AsRef<[MinkowskiVert;2]>{
AsRefHelper(match edge_id{
MinkowskiEdge::VertEdge(v0,e1)=>self.mesh1.edge_verts(e1).as_ref().map(|vert_id1|
MinkowskiEdge::VertEdge(v0,e1)=>(*self.mesh1.edge_verts(e1).as_ref()).map(|vert_id1|
MinkowskiVert::VertVert(v0,vert_id1)
),
MinkowskiEdge::EdgeVert(e0,v1)=>self.mesh0.edge_verts(e0).as_ref().map(|vert_id0|
MinkowskiEdge::EdgeVert(e0,v1)=>(*self.mesh0.edge_verts(e0).as_ref()).map(|vert_id0|
MinkowskiVert::VertVert(vert_id0,v1)
),
})
@@ -890,43 +945,38 @@ impl MeshQuery for MinkowskiMesh<'_>{
MinkowskiVert::VertVert(v0,v1)=>{
let mut edges=Vec::new();
//detect shared volume when the other mesh is mirrored along a test edge dir
let v0f_thing=self.mesh0.vert_faces(v0);
let v1f_thing=self.mesh1.vert_faces(v1);
let v0f=v0f_thing.as_ref();
let v1f=v1f_thing.as_ref();
let v0f_n:Vec<_>=v0f.iter().map(|&face_id|self.mesh0.face_nd(face_id).0).collect();
let v1f_n:Vec<_>=v1f.iter().map(|&face_id|self.mesh1.face_nd(face_id).0).collect();
// scratch vector
let mut face_normals=Vec::with_capacity(v0f.len()+v1f.len());
face_normals.clone_from(&v0f_n);
let v0f=self.mesh0.vert_faces(v0);
let v1f=self.mesh1.vert_faces(v1);
let v0f_n:Vec<_>=v0f.as_ref().iter().map(|&face_id|self.mesh0.face_nd(face_id).0).collect();
let v1f_n:Vec<_>=v1f.as_ref().iter().map(|&face_id|self.mesh1.face_nd(face_id).0).collect();
let the_len=v0f.as_ref().len()+v1f.as_ref().len();
for &directed_edge_id in self.mesh0.vert_edges(v0).as_ref(){
let n=self.mesh0.directed_edge_n(directed_edge_id);
let nn=n.dot(n);
// TODO: there's gotta be a better way to do this
// drop faces beyond v0f_n
face_normals.truncate(v0f.len());
// make a set of faces from mesh0's perspective
//make a set of faces
let mut face_normals=Vec::with_capacity(the_len);
//add mesh0 faces as-is
face_normals.clone_from(&v0f_n);
for face_n in &v1f_n{
//add reflected mesh1 faces
//wrap for speed
face_normals.push(*face_n-(n*face_n.dot(n)*2/nn).divide().wrap_3());
}
if is_empty_volume(&face_normals){
if is_empty_volume(face_normals){
edges.push(MinkowskiDirectedEdge::EdgeVert(directed_edge_id,v1));
}
}
face_normals.clone_from(&v1f_n);
for &directed_edge_id in self.mesh1.vert_edges(v1).as_ref(){
let n=self.mesh1.directed_edge_n(directed_edge_id);
let nn=n.dot(n);
// drop faces beyond v1f_n
face_normals.truncate(v1f.len());
// make a set of faces from mesh1's perspective
let mut face_normals=Vec::with_capacity(the_len);
face_normals.clone_from(&v1f_n);
for face_n in &v0f_n{
//wrap for speed
face_normals.push(*face_n-(n*face_n.dot(n)*2/nn).divide().wrap_3());
}
if is_empty_volume(&face_normals){
if is_empty_volume(face_normals){
edges.push(MinkowskiDirectedEdge::VertEdge(v0,directed_edge_id));
}
}
@@ -936,12 +986,11 @@ impl MeshQuery for MinkowskiMesh<'_>{
}
fn vert_faces(&self,_vert_id:MinkowskiVert)->impl AsRef<[MinkowskiFace]>{
unimplemented!();
#[expect(unreachable_code)]
Vec::new()
vec![]
}
}
fn is_empty_volume(normals:&[Vector3<Fixed<3,96>>])->bool{
fn is_empty_volume(normals:Vec<Vector3<Fixed<3,96>>>)->bool{
let len=normals.len();
for i in 0..len-1{
for j in i+1..len{
@@ -967,6 +1016,6 @@ fn is_empty_volume(normals:&[Vector3<Fixed<3,96>>])->bool{
#[test]
fn test_is_empty_volume(){
assert!(!is_empty_volume(&[vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3()]));
assert!(is_empty_volume(&[vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3(),vec3::NEG_X.widen_3()]));
assert!(!is_empty_volume([vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3()].to_vec()));
assert!(is_empty_volume([vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3(),vec3::NEG_X.widen_3()].to_vec()));
}

File diff suppressed because it is too large Load Diff

View File

@@ -39,18 +39,20 @@ impl Contact{
//note that this is horrible with fixed point arithmetic
fn solve1(c0:&Contact)->Option<Ratio<Vector3<Fixed<3,96>>,Fixed<2,64>>>{
const EPSILON:Fixed<2,64>=Fixed::from_bits(Fixed::<2,64>::ONE.to_bits().shr(10));
let det=c0.normal.dot(c0.velocity);
if det.abs()==Fixed::ZERO{
if det.abs()<EPSILON{
return None;
}
let d0=c0.normal.dot(c0.position);
Some(c0.normal*d0/det)
}
fn solve2(c0:&Contact,c1:&Contact)->Option<Ratio<Vector3<Fixed<5,160>>,Fixed<4,128>>>{
const EPSILON:Fixed<4,128>=Fixed::from_bits(Fixed::<4,128>::ONE.to_bits().shr(10));
let u0_u1=c0.velocity.cross(c1.velocity);
let n0_n1=c0.normal.cross(c1.normal);
let det=u0_u1.dot(n0_n1);
if det.abs()==Fixed::ZERO{
if det.abs()<EPSILON{
return None;
}
let d0=c0.normal.dot(c0.position);
@@ -58,9 +60,10 @@ fn solve2(c0:&Contact,c1:&Contact)->Option<Ratio<Vector3<Fixed<5,160>>,Fixed<4,1
Some((c1.normal.cross(u0_u1)*d0+u0_u1.cross(c0.normal)*d1)/det)
}
fn solve3(c0:&Contact,c1:&Contact,c2:&Contact)->Option<Ratio<Vector3<Fixed<4,128>>,Fixed<3,96>>>{
const EPSILON:Fixed<3,96>=Fixed::from_bits(Fixed::<3,96>::ONE.to_bits().shr(10));
let n0_n1=c0.normal.cross(c1.normal);
let det=c2.normal.dot(n0_n1);
if det.abs()==Fixed::ZERO{
if det.abs()<EPSILON{
return None;
}
let d0=c0.normal.dot(c0.position);
@@ -146,7 +149,7 @@ fn is_space_enclosed_4(
}
const fn get_push_ray_0(point:Planar64Vec3)->Ray{
Ray{origin:point,direction:vec3::zero()}
Ray{origin:point,direction:vec3::ZERO}
}
fn get_push_ray_1(point:Planar64Vec3,c0:&Contact)->Option<Ray>{
//wrap for speed
@@ -189,7 +192,7 @@ fn get_push_ray_3(point:Planar64Vec3,c0:&Contact,c1:&Contact,c2:&Contact)->Optio
const fn get_best_push_ray_and_conts_0<'a>(point:Planar64Vec3)->(Ray,Conts<'a>){
(get_push_ray_0(point),Conts::new_const())
}
fn get_best_push_ray_and_conts_1(point:Planar64Vec3,c0:&Contact)->Option<(Ray,Conts<'_>)>{
fn get_best_push_ray_and_conts_1(point:Planar64Vec3,c0:&Contact)->Option<(Ray,Conts)>{
get_push_ray_1(point,c0)
.map(|ray|(ray,Conts::from_iter([c0])))
}
@@ -318,13 +321,13 @@ mod tests{
fn test_push_solve(){
let contacts=vec![
Contact{
position:vec3::zero(),
position:vec3::ZERO,
velocity:vec3::Y,
normal:vec3::Y,
}
];
assert_eq!(
vec3::zero(),
vec3::ZERO,
push_solve(&contacts,vec3::NEG_Y)
);
}

View File

@@ -10,6 +10,3 @@ strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
strafesnet_physics = { path = "../physics", registry = "strafesnet" }
strafesnet_settings = { path = "../settings", registry = "strafesnet" }
strafesnet_snf = { path = "../../lib/snf", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -31,7 +31,7 @@ pub enum SessionInputInstruction{
Mouse(glam::IVec2),
SetControl(strafesnet_common::physics::SetControlInstruction),
Mode(ImplicitModeInstruction),
Misc(MiscInstruction),
Misc(strafesnet_common::physics::MiscInstruction),
}
/// Implicit mode instruction are fed separately to session.
/// Session generates the explicit mode instructions interlaced with a SetSensitivity instruction
@@ -61,7 +61,6 @@ pub struct FrameState{
pub body:physics::Body,
pub camera:physics::PhysicsCamera,
pub time:PhysicsTime,
pub hit:Option<Hit>,
}
pub struct Simulation{
@@ -78,12 +77,11 @@ impl Simulation{
physics,
}
}
pub fn get_frame_state(&self,time:SessionTime,debug_model:Option<Hit>)->FrameState{
pub fn get_frame_state(&self,time:SessionTime)->FrameState{
FrameState{
body:self.physics.camera_body(),
camera:self.physics.camera(),
time:self.timer.time(time),
hit: debug_model,
}
}
}
@@ -151,25 +149,18 @@ enum ViewState{
Replay(BotId),
}
#[derive(Clone)]
pub struct Hit{
pub convex_mesh_id:physics::ConvexMeshId<physics::PhysicsModelId>,
pub closest_fev:Option<strafesnet_physics::model::FEV<strafesnet_physics::model::TransformedMesh<'static>>>
}
pub struct Session{
directories:Directories,
user_settings:UserSettings,
mouse_interpolator:MouseInterpolator,
mouse_interpolator:crate::mouse_interpolator::MouseInterpolator,
view_state:ViewState,
//gui:GuiState
geometry_shared:PhysicsData,
geometry_shared:physics::PhysicsData,
simulation:Simulation,
// below fields not included in lite session
recording:Recording,
//players:HashMap<PlayerId,Simulation>,
replays:HashMap<BotId,Replay>,
last_ray_hit:Option<Hit>,
}
impl Session{
pub fn new(
@@ -181,12 +172,11 @@ impl Session{
user_settings,
directories,
mouse_interpolator:MouseInterpolator::new(),
geometry_shared:PhysicsData::empty(),
geometry_shared:Default::default(),
simulation,
view_state:ViewState::Play,
recording:Default::default(),
replays:HashMap::new(),
last_ray_hit:None,
}
}
fn clear_recording(&mut self){
@@ -194,34 +184,16 @@ impl Session{
}
fn change_map(&mut self,map:&strafesnet_common::map::CompleteMap){
self.simulation.physics.clear();
self.geometry_shared=PhysicsData::new(map);
self.geometry_shared.generate_models(map);
}
pub fn get_frame_state(&self,time:SessionTime)->Option<FrameState>{
match &self.view_state{
ViewState::Play=>Some(self.simulation.get_frame_state(time,self.last_ray_hit.clone())),
ViewState::Play=>Some(self.simulation.get_frame_state(time)),
ViewState::Replay(bot_id)=>self.replays.get(bot_id).map(|replay|
replay.simulation.get_frame_state(time,None)
replay.simulation.get_frame_state(time)
),
}
}
pub fn debug_raycast_print_model_id_if_changed(&mut self,time:SessionTime){
if let Some(frame_state)=self.get_frame_state(time){
let ray=strafesnet_common::ray::Ray{
origin:frame_state.body.extrapolated_position(self.simulation.timer.time(time)),
direction:-frame_state.camera.rotation().z_axis,
};
match self.geometry_shared.trace_ray(ray){
Some(convex_mesh_id)=>{
let closest_fev=self.geometry_shared.closest_fev_not_inside(convex_mesh_id,self.simulation.physics.body().position);
self.last_ray_hit=Some(Hit{
convex_mesh_id,
closest_fev,
});
},
None=>self.last_ray_hit=None,
}
}
}
pub fn user_settings(&self)->&UserSettings{
&self.user_settings
}

View File

@@ -8,6 +8,3 @@ configparser = "3.0.2"
directories = "6.0.0"
glam = "0.30.0"
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -1 +0,0 @@
/test_files

View File

@@ -4,12 +4,6 @@ version = "0.1.0"
edition = "2024"
[dependencies]
glam = "0.30.0"
strafesnet_common = { path = "../lib/common", registry = "strafesnet" }
strafesnet_physics = { path = "../engine/physics", registry = "strafesnet" }
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet" }
# this is just for the primitive constructor
strafesnet_rbx_loader = { path = "../lib/rbx_loader", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -1,28 +0,0 @@
#[expect(dead_code)]
#[derive(Debug)]
pub enum ReplayError{
IO(std::io::Error),
SNF(strafesnet_snf::Error),
SNFM(strafesnet_snf::map::Error),
SNFB(strafesnet_snf::bot::Error),
}
impl From<std::io::Error> for ReplayError{
fn from(value:std::io::Error)->Self{
Self::IO(value)
}
}
impl From<strafesnet_snf::Error> for ReplayError{
fn from(value:strafesnet_snf::Error)->Self{
Self::SNF(value)
}
}
impl From<strafesnet_snf::map::Error> for ReplayError{
fn from(value:strafesnet_snf::map::Error)->Self{
Self::SNFM(value)
}
}
impl From<strafesnet_snf::bot::Error> for ReplayError{
fn from(value:strafesnet_snf::bot::Error)->Self{
Self::SNFB(value)
}
}

View File

@@ -1,15 +1,7 @@
mod error;
mod util;
#[cfg(test)]
mod tests;
#[cfg(test)]
mod test_scenes;
use std::io::Cursor;
use std::path::Path;
use std::time::Instant;
use error::ReplayError;
use util::read_entire_file;
use strafesnet_physics::physics::{PhysicsData,PhysicsState,PhysicsContext};
fn main(){
@@ -21,6 +13,40 @@ fn main(){
}
}
#[allow(unused)]
#[derive(Debug)]
enum ReplayError{
IO(std::io::Error),
SNF(strafesnet_snf::Error),
SNFM(strafesnet_snf::map::Error),
SNFB(strafesnet_snf::bot::Error),
}
impl From<std::io::Error> for ReplayError{
fn from(value:std::io::Error)->Self{
Self::IO(value)
}
}
impl From<strafesnet_snf::Error> for ReplayError{
fn from(value:strafesnet_snf::Error)->Self{
Self::SNF(value)
}
}
impl From<strafesnet_snf::map::Error> for ReplayError{
fn from(value:strafesnet_snf::map::Error)->Self{
Self::SNFM(value)
}
}
impl From<strafesnet_snf::bot::Error> for ReplayError{
fn from(value:strafesnet_snf::bot::Error)->Self{
Self::SNFB(value)
}
}
fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
let data=std::fs::read(path)?;
Ok(Cursor::new(data))
}
fn run_replay()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("../tools/bhop_maps/5692113331.snfm")?;
@@ -31,8 +57,9 @@ fn run_replay()->Result<(),ReplayError>{
let bot=strafesnet_snf::read_bot(data)?.read_all()?;
// create recording
let mut physics_data=PhysicsData::default();
println!("generating models..");
let physics_data=PhysicsData::new(&map);
physics_data.generate_models(&map);
println!("simulating...");
let mut physics=PhysicsState::default();
for ins in bot.instructions{
@@ -140,8 +167,9 @@ fn test_determinism()->Result<(),ReplayError>{
let data=read_entire_file("../tools/bhop_maps/5692113331.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
let mut physics_data=PhysicsData::default();
println!("generating models..");
let physics_data=PhysicsData::new(&map);
physics_data.generate_models(&map);
let (send,recv)=std::sync::mpsc::channel();

View File

@@ -1,117 +0,0 @@
use strafesnet_physics::physics::{InternalInstruction,PhysicsData,PhysicsState,PhysicsContext};
use strafesnet_common::gameplay_modes::NormalizedModes;
use strafesnet_common::gameplay_attributes::{CollisionAttributes,CollisionAttributesId};
use strafesnet_common::integer::{vec3,mat3,Planar64Affine3,Time};
use strafesnet_common::model::{Mesh,Model,MeshId,ModelId,RenderConfigId};
use strafesnet_common::map::CompleteMap;
use strafesnet_rbx_loader::primitives::{unit_cube,CubeFaceDescription};
struct TestSceneBuilder{
meshes:Vec<Mesh>,
models:Vec<Model>,
}
impl TestSceneBuilder{
fn new()->Self{
Self{
meshes:Vec::new(),
models:Vec::new(),
}
}
fn push_mesh(&mut self,mesh:Mesh)->MeshId{
let mesh_id=self.meshes.len();
self.meshes.push(mesh);
MeshId::new(mesh_id as u32)
}
fn push_mesh_instance(&mut self,mesh:MeshId,transform:Planar64Affine3)->ModelId{
let model=Model{
mesh,
attributes:CollisionAttributesId::new(0),
color:glam::Vec4::ONE,
transform,
};
let model_id=self.models.len();
self.models.push(model);
ModelId::new(model_id as u32)
}
fn build(self)->PhysicsData{
let modes=NormalizedModes::new(Vec::new());
let attributes=vec![CollisionAttributes::contact_default()];
let meshes=self.meshes;
let models=self.models;
let textures=Vec::new();
let render_configs=Vec::new();
PhysicsData::new(&CompleteMap{
modes,
attributes,
meshes,
models,
textures,
render_configs,
})
}
}
fn test_scene()->PhysicsData{
let mut builder=TestSceneBuilder::new();
let cube_face_description=CubeFaceDescription::new(Default::default(),RenderConfigId::new(0));
let mesh=builder.push_mesh(unit_cube(cube_face_description));
// place two 5x5x5 cubes.
builder.push_mesh_instance(mesh,Planar64Affine3::new(
mat3::from_diagonal(vec3::int(5,5,5)>>1),
vec3::int(0,0,0)
));
builder.push_mesh_instance(mesh,Planar64Affine3::new(
mat3::from_diagonal(vec3::int(5,5,5)>>1),
vec3::int(5,-5,0)
));
builder.build()
}
#[test]
fn simultaneous_collision(){
let physics_data=test_scene();
let body=strafesnet_physics::physics::Body::new(
(vec3::int(5+2,0,0)>>1)+vec3::int(1,1,0),
vec3::int(-1,-1,0),
vec3::int(0,0,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
physics.style_mut().gravity=vec3::zero();
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(2))
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick));
// the order that they hit does matter, but we aren't currently worrying about that.
// See multi-collision branch
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
assert!(phys_iter.next().is_none());
let body=physics.body();
assert_eq!(body.position,vec3::int(5,0,0));
assert_eq!(body.velocity,vec3::int(0,0,0));
assert_eq!(body.acceleration,vec3::int(0,0,0));
assert_eq!(body.time,Time::from_secs(1));
}
#[test]
fn bug_3(){
let physics_data=test_scene();
let body=strafesnet_physics::physics::Body::new(
(vec3::int(5+2,0,0)>>1)+vec3::int(1,2,0),
vec3::int(-1,-1,0),
vec3::int(0,0,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
physics.style_mut().gravity=vec3::zero();
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(3))
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick));
// touch side of part at 0,0,0
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
// touch top of part at 5,-5,0
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(2));
assert!(phys_iter.next().is_none());
let body=physics.body();
assert_eq!(body.position,vec3::int(5+2,0,0)>>1);
assert_eq!(body.velocity,vec3::int(0,0,0));
assert_eq!(body.acceleration,vec3::int(0,0,0));
assert_eq!(body.time,Time::from_secs(2));
}

View File

@@ -1,76 +0,0 @@
use crate::error::ReplayError;
use crate::util::read_entire_file;
use strafesnet_physics::physics::{PhysicsData,PhysicsState,PhysicsContext};
#[test]
#[ignore]
fn physics_bug_2()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("test_files/bhop_monster_jam.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
// create recording
println!("generating models..");
let physics_data=PhysicsData::new(&map);
println!("simulating...");
//teleport to bug
// body pos = Vector { array: [Fixed { bits: 554895163352 }, Fixed { bits: 1485633089990 }, Fixed { bits: 1279601007173 }] }
// after the fix it's still happening, possibly for a different reason, new position to evince:
// body pos = Vector { array: [Fixed { bits: 555690659654 }, Fixed { bits: 1490485868773 }, Fixed { bits: 1277783839382 }] }
use strafesnet_common::integer::{vec3,Time};
let body=strafesnet_physics::physics::Body::new(
vec3::raw_xyz(555690659654,1490485868773,1277783839382),
vec3::int(0,0,0),
vec3::int(0,-100,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
// wait one second to activate the bug
// hit=Some(ModelId(2262))
PhysicsContext::run_input_instruction(&mut physics,&physics_data,strafesnet_common::instruction::TimedInstruction{
time:Time::from_millis(500),
instruction:strafesnet_common::physics::Instruction::Idle,
});
Ok(())
}
#[test]
#[ignore]
fn physics_bug_3()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("../tools/bhop_maps/5692152916.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
// create recording
println!("generating models..");
let physics_data=PhysicsData::new(&map);
println!("simulating...");
//teleport to bug
use strafesnet_common::integer::{vec3,Time};
let body=strafesnet_physics::physics::Body::new(
// bhop_toc corner position after wall hits
// vec3::raw_xyz(-1401734815424,3315081280280,-2466057177493),
// vec3::raw_xyz(0,-96915585363,1265),
// vec3::raw_xyz(0,-429496729600,0),
// alternate room center position
// vec3::raw_xyz(-1129043783837,3324870327882,-2014012350212),
// vec3::raw_xyz(0,-96915585363,1265),
// vec3::raw_xyz(0,-429496729600,0),
// corner setup before wall hits
vec3::raw_xyz(-1392580080675,3325402529458,-2444727738679),
vec3::raw_xyz(-30259028820,-22950929553,-71141663007),
vec3::raw_xyz(0,-429496729600,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
// wait one second to activate the bug
PhysicsContext::run_input_instruction(&mut physics,&physics_data,strafesnet_common::instruction::TimedInstruction{
time:Time::from_millis(500),
instruction:strafesnet_common::physics::Instruction::Idle,
});
Ok(())
}

View File

@@ -1,7 +0,0 @@
use std::io::Cursor;
use std::path::Path;
pub fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
let data=std::fs::read(path)?;
Ok(Cursor::new(data))
}

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_bsp_loader"
version = "0.3.1"
version = "0.3.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -11,12 +11,9 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
glam = "0.30.0"
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../deferred_loader", registry = "strafesnet" }
vbsp = "0.9.1"
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }
vbsp = "0.8.0"
vbsp-entities-css = "0.6.0"
vmdl = "0.2.0"
vpk = "0.3.0"
[lints]
workspace = true

View File

@@ -1,5 +1,5 @@
use strafesnet_common::integer::Planar64;
use strafesnet_common::{model,integer};
use strafesnet_common::integer::{self,Planar64,Planar64Vec3};
use strafesnet_common::model::{self,VertexId};
use strafesnet_common::integer::{vec3::Vector3,Fixed,Ratio};
use crate::{valve_transform_normal,valve_transform_dist};
@@ -7,7 +7,7 @@ use crate::{valve_transform_normal,valve_transform_dist};
#[derive(Hash,Eq,PartialEq)]
struct Face{
normal:integer::Planar64Vec3,
dot:Planar64,
dot:integer::Planar64,
}
#[derive(Debug)]
@@ -138,7 +138,15 @@ fn planes_to_faces(face_list:std::collections::HashSet<Face>)->Result<Faces,Plan
loop{
// push point onto vertices
// problem: this may push a vertex that does not fit in the fixed point range and is thus meaningless
face.push(intersection.divide().narrow_1().unwrap());
//
// physics bug 2 originates from vertices being imprecise?
//
// Mask off the most precise 16 bits so that
// when face normals are calculated from
// the remaining 16 fractional bits
// they never exceed 32 bits of precision.
const MASK:Planar64=Planar64::raw(!((1<<16)-1));
face.push(intersection.divide().narrow_1().unwrap().map(|c|c&MASK));
// we looped back around to face1, we're done!
if core::ptr::eq(face1,face2){
@@ -187,7 +195,7 @@ fn planes_to_faces(face_list:std::collections::HashSet<Face>)->Result<Faces,Plan
}
}
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum BrushToMeshError{
SliceBrushSides,
@@ -204,24 +212,69 @@ impl std::fmt::Display for BrushToMeshError{
}
impl core::error::Error for BrushToMeshError{}
fn subdivide_max_area(tris:&mut Vec<Vec<VertexId>>,cw_verts:&[(VertexId,Planar64Vec3)],i0:usize,i2:usize,id0:VertexId,id2:VertexId,v0:Planar64Vec3,v2:Planar64Vec3){
if i0+1==i2{
return;
}
let mut best_i1=i0+1;
if i0+2<i2{
let mut best_area={
let (_,v1)=cw_verts[best_i1.rem_euclid(cw_verts.len())];
(v2-v0).cross(v1-v0).length_squared()
};
for i1 in i0+2..=i2-1{
let (_,v1)=cw_verts[i1.rem_euclid(cw_verts.len())];
let area=(v2-v0).cross(v1-v0).length_squared();
if best_area<area{
best_i1=i1;
best_area=area;
}
}
}
let i1=best_i1;
let (id1,v1)=cw_verts[i1.rem_euclid(cw_verts.len())];
// draw max area first
tris.push(vec![id0,id1,id2]);
subdivide_max_area(tris,cw_verts,i0,i1,id0,id1,v0,v1);
subdivide_max_area(tris,cw_verts,i1,i2,id1,id2,v1,v2);
}
pub fn faces_to_mesh(faces:Vec<Vec<integer::Planar64Vec3>>)->model::Mesh{
// generate the mesh
let mut mb=model::MeshBuilder::new();
let color=mb.acquire_color_id(glam::Vec4::ONE);
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
// normals are ignored by physics
let normal=mb.acquire_normal_id(integer::vec3::zero());
let normal=mb.acquire_normal_id(integer::vec3::ZERO);
let polygon_list=faces.into_iter().map(|face|{
face.into_iter().map(|pos|{
let pos=mb.acquire_pos_id(pos);
mb.acquire_vertex_id(model::IndexedVertex{
let polygon_list=faces.into_iter().flat_map(|face|{
let cw_verts=face.into_iter().map(|position|{
let pos=mb.acquire_pos_id(position);
(mb.acquire_vertex_id(model::IndexedVertex{
pos,
tex,
normal,
color,
})
}).collect()
}),position)
}).collect::<Vec<_>>();
// scan and select maximum area triangle O(n^3)
let len=cw_verts.len();
let cw_verts=cw_verts.as_slice();
let ((i0,i1,i2),(v0,v1,v2))=cw_verts[..len-2].iter().enumerate().flat_map(|(i0,&(_,v0))|
cw_verts[i0+1..len-1].iter().enumerate().flat_map(move|(i1,&(_,v1))|
cw_verts[i0+i1+2..].iter().enumerate().map(move|(i2,&(_,v2))|((i0,i0+i1+1,i0+i1+i2+2),(v0,v1,v2)))
)
).max_by_key(|&(_,(v0,v1,v2))|(v2-v0).cross(v1-v0).length_squared()).unwrap();
// scan and select more maximum area triangles n * O(n)
let mut tris=Vec::with_capacity(len-2);
// da big one
let (id0,id1,id2)=(cw_verts[i0].0,cw_verts[i1].0,cw_verts[i2].0);
tris.push(vec![id0,id1,id2]);
subdivide_max_area(&mut tris,cw_verts,i0,i1,id0,id1,v0,v1);
subdivide_max_area(&mut tris,cw_verts,i1,i2,id1,id2,v1,v2);
subdivide_max_area(&mut tris,cw_verts,i2,i0+len,id2,id0,v2,v0);
tris
}).collect();
let polygon_groups=vec![model::PolygonGroup::PolygonList(model::PolygonList::new(polygon_list))];

View File

@@ -105,7 +105,7 @@ pub fn convert<'a>(
water:Some(attr::IntersectingWater{
viscosity:integer::Planar64::ONE,
density:integer::Planar64::ONE,
velocity:integer::vec3::zero(),
velocity:integer::vec3::ZERO,
}),
},
general:attr::GeneralAttributes::default(),
@@ -295,7 +295,7 @@ pub fn convert<'a>(
attributes,
transform:integer::Planar64Affine3::new(
integer::mat3::identity(),
integer::vec3::zero(),
integer::vec3::ZERO,
),
color:glam::Vec4::ONE,
});
@@ -347,9 +347,9 @@ pub struct PartialMap1{
modes:NormalizedModes,
}
impl PartialMap1{
pub fn add_prop_meshes(
pub fn add_prop_meshes<'a>(
self,
prop_meshes:Meshes<model::Mesh>,
prop_meshes:Meshes,
)->PartialMap2{
PartialMap2{
attributes:self.attributes,

View File

@@ -5,6 +5,7 @@ use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
use crate::{Bsp,Vpk};
#[allow(dead_code)]
#[derive(Debug)]
pub enum TextureError{
Io(std::io::Error),
@@ -21,17 +22,17 @@ impl From<std::io::Error> for TextureError{
}
}
pub struct TextureLoader;
impl TextureLoader{
pub struct TextureLoader<'a>(std::marker::PhantomData<&'a ()>);
impl TextureLoader<'_>{
pub fn new()->Self{
Self
Self(std::marker::PhantomData)
}
}
impl Loader for TextureLoader{
impl<'a> Loader for TextureLoader<'a>{
type Error=TextureError;
type Index<'a>=Cow<'a,str>;
type Index=Cow<'a,str>;
type Resource=Texture;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
let file_name=format!("textures/{}.dds",index);
let mut file=std::fs::File::open(file_name)?;
let mut data=Vec::new();
@@ -40,6 +41,7 @@ impl Loader for TextureLoader{
}
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum MeshError{
Io(std::io::Error),
@@ -98,24 +100,30 @@ impl<'bsp,'vpk> BspFinder<'bsp,'vpk>{
}
}
pub struct ModelLoader<'bsp,'vpk>{
pub struct ModelLoader<'bsp,'vpk,'a>{
finder:BspFinder<'bsp,'vpk>,
life:core::marker::PhantomData<&'a ()>,
}
impl ModelLoader<'_,'_>{
impl ModelLoader<'_,'_,'_>{
#[inline]
pub const fn new<'bsp,'vpk>(
pub const fn new<'bsp,'vpk,'a>(
finder:BspFinder<'bsp,'vpk>,
)->ModelLoader<'bsp,'vpk>{
)->ModelLoader<'bsp,'vpk,'a>{
ModelLoader{
finder,
life:core::marker::PhantomData,
}
}
}
impl Loader for ModelLoader<'_,'_>{
impl<'bsp,'vpk,'a> Loader for ModelLoader<'bsp,'vpk,'a>
where
'bsp:'a,
'vpk:'a,
{
type Error=MeshError;
type Index<'a>=&'a str where Self:'a;
type Index=&'a str;
type Resource=vmdl::Model;
fn load<'a>(&'a mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>{
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
let mdl_path_lower=index.to_lowercase();
//.mdl, .vvd, .dx90.vtx
let path=std::path::PathBuf::from(mdl_path_lower.as_str());
@@ -135,27 +143,31 @@ impl Loader for ModelLoader<'_,'_>{
}
}
pub struct MeshLoader<'bsp,'vpk,'load,'str>{
pub struct MeshLoader<'bsp,'vpk,'load,'a>{
finder:BspFinder<'bsp,'vpk>,
deferred_loader:&'load mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'str,str>>,
deferred_loader:&'load mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'a,str>>,
}
impl MeshLoader<'_,'_,'_,'_>{
#[inline]
pub const fn new<'bsp,'vpk,'load,'str>(
pub const fn new<'bsp,'vpk,'load,'a>(
finder:BspFinder<'bsp,'vpk>,
deferred_loader:&'load mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'str,str>>,
)->MeshLoader<'bsp,'vpk,'load,'str>{
deferred_loader:&'load mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'a,str>>,
)->MeshLoader<'bsp,'vpk,'load,'a>{
MeshLoader{
finder,
deferred_loader
}
}
}
impl Loader for MeshLoader<'_,'_,'_,'_>{
impl<'bsp,'vpk,'load,'a> Loader for MeshLoader<'bsp,'vpk,'load,'a>
where
'bsp:'a,
'vpk:'a,
{
type Error=MeshError;
type Index<'a>=&'a str where Self:'a;
type Index=&'a str;
type Resource=Mesh;
fn load<'a>(&'a mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>{
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
let model=ModelLoader::new(self.finder).load(index)?;
let mesh=crate::mesh::convert_mesh(model,&mut self.deferred_loader);
Ok(mesh)

View File

@@ -61,7 +61,7 @@ pub fn convert_mesh(model:vmdl::Model,deferred_loader:&mut RenderConfigDeferredL
_=>None,
}
})
}).filter_map(|[v1,v2,v3]|{
}).flat_map(|[v1,v2,v3]|{
// this should probably be a fatal error :D
let v1=model_vertices.get(v1)?;
let v2=model_vertices.get(v2)?;

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_common"
version = "0.7.0"
version = "0.6.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -17,6 +17,3 @@ linear_ops = { version = "0.1.1", path = "../linear_ops", registry = "strafesnet
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet" }
glam = "0.30.0"
id = { version = "0.1.0", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -2,9 +2,7 @@ use crate::integer::{vec3,Planar64Vec3};
#[derive(Clone)]
pub struct Aabb{
// min is inclusive
min:Planar64Vec3,
// max is not inclusive
max:Planar64Vec3,
}
@@ -45,7 +43,7 @@ impl Aabb{
}
#[inline]
pub fn contains(&self,point:Planar64Vec3)->bool{
let bvec=self.min.le(point)&point.lt(self.max);
let bvec=self.min.lt(point)&point.lt(self.max);
bvec.all()
}
#[inline]
@@ -61,11 +59,11 @@ impl Aabb{
pub fn center(&self)->Planar64Vec3{
self.min.map_zip(self.max,|(min,max)|min.midpoint(max))
}
#[inline]
pub fn area_weight(&self)->fixed_wide::fixed::Fixed<2,64>{
let d=self.max-self.min;
d.x*d.y+d.y*d.z+d.z*d.x
}
//probably use floats for area & volume because we don't care about precision
// pub fn area_weight(&self)->f32{
// let d=self.max-self.min;
// d.x*d.y+d.y*d.z+d.z*d.x
// }
// pub fn volume(&self)->f32{
// let d=self.max-self.min;
// d.x*d.y*d.z

View File

@@ -245,19 +245,18 @@ pub fn generate_bvh<T>(boxen:Vec<(T,Aabb)>)->BvhNode<T>{
fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
let n=boxen.len();
const MAX_TERMINAL_BRANCH_LEAF_NODES:usize=20;
if force||n<MAX_TERMINAL_BRANCH_LEAF_NODES{
let mut aabb_outer=Aabb::default();
let nodes=boxen.into_iter().map(|(data,aabb)|{
aabb_outer.join(&aabb);
if force||n<20{
let mut aabb=Aabb::default();
let nodes=boxen.into_iter().map(|b|{
aabb.join(&b.1);
BvhNode{
content:RecursiveContent::Leaf(data),
aabb,
content:RecursiveContent::Leaf(b.0),
aabb:b.1,
}
}).collect();
BvhNode{
content:RecursiveContent::Branch(nodes),
aabb:aabb_outer,
aabb,
}
}else{
let mut sort_x=Vec::with_capacity(n);
@@ -273,9 +272,9 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
sort_y.sort_by_key(|&(_,c)|c);
sort_z.sort_by_key(|&(_,c)|c);
let h=n/2;
let (_,median_x)=sort_x[h];
let (_,median_y)=sort_y[h];
let (_,median_z)=sort_z[h];
let median_x=sort_x[h].1;
let median_y=sort_y[h].1;
let median_z=sort_z[h].1;
//locate a run of values equal to the median
//partition point gives the first index for which the predicate evaluates to false
let first_index_eq_median_x=sort_x.partition_point(|&(_,x)|x<median_x);
@@ -314,10 +313,10 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
};
list_list[list_id].push((data,aabb));
}
let mut aabb=Aabb::default();
if list_list.len()==1{
generate_bvh_node(list_list.remove(0),true)
}else{
let mut aabb=Aabb::default();
BvhNode{
content:RecursiveContent::Branch(
list_list.into_iter().map(|b|{

View File

@@ -140,15 +140,6 @@ impl ModeId{
pub const MAIN:Self=Self(0);
pub const BONUS:Self=Self(1);
}
impl core::fmt::Display for ModeId{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->core::fmt::Result{
match self{
&Self::MAIN=>write!(f,"Main"),
&Self::BONUS=>write!(f,"Bonus"),
&Self(mode_id)=>write!(f,"Bonus{mode_id}"),
}
}
}
#[derive(Clone)]
pub struct Mode{
style:gameplay_style::StyleModifiers,
@@ -449,11 +440,11 @@ impl ModesBuilder{
}
NormalizedModes::new(modes.into_iter().map(|mode_builder|NormalizedMode(mode_builder.mode)).collect())
}
pub fn insert_mode(&mut self,mode_id:ModeId,mode:Mode)->Result<(),ExistingEntryError>{
error_if_exists(self.modes.insert(mode_id,mode))
pub fn insert_mode(&mut self,mode_id:ModeId,mode:Mode){
assert!(self.modes.insert(mode_id,mode).is_none(),"Cannot replace existing mode");
}
pub fn insert_stage(&mut self,mode_id:ModeId,stage_id:StageId,stage:Stage)->Result<(),ExistingEntryError>{
error_if_exists(self.stages.entry(mode_id).or_insert(HashMap::new()).insert(stage_id,stage))
pub fn insert_stage(&mut self,mode_id:ModeId,stage_id:StageId,stage:Stage){
assert!(self.stages.entry(mode_id).or_insert(HashMap::new()).insert(stage_id,stage).is_none(),"Cannot replace existing stage");
}
pub fn push_mode_update(&mut self,mode_id:ModeId,mode_update:ModeUpdate){
self.mode_updates.push((mode_id,mode_update));
@@ -462,12 +453,3 @@ impl ModesBuilder{
// self.stage_updates.push((mode_id,stage_id,stage_update));
// }
}
#[derive(Debug)]
pub struct ExistingEntryError;
fn error_if_exists<T>(value:Option<T>)->Result<(),ExistingEntryError>{
match value{
Some(_)=>Err(ExistingEntryError),
None=>Ok(())
}
}

View File

@@ -34,7 +34,7 @@ pub struct StyleModifiers{
//unused
pub mass:Planar64,
}
impl Default for StyleModifiers{
impl std::default::Default for StyleModifiers{
fn default()->Self{
Self::roblox_bhop()
}
@@ -319,7 +319,7 @@ impl WalkSettings{
self.accelerate.accel.min((-gravity.y*friction).clamp_1())
}
pub fn get_walk_target_velocity(&self,control_dir:Planar64Vec3,normal:Planar64Vec3)->Planar64Vec3{
if control_dir==crate::integer::vec3::zero(){
if control_dir==crate::integer::vec3::ZERO{
return control_dir;
}
let nn=normal.length_squared();
@@ -329,13 +329,13 @@ impl WalkSettings{
let dd=d*d;
if dd<nnmm{
let cr=normal.cross(control_dir);
if cr==crate::integer::vec3::zero(){
crate::integer::vec3::zero()
if cr==crate::integer::vec3::ZERO_2{
crate::integer::vec3::ZERO
}else{
(cr.cross(normal)*self.accelerate.topspeed/((nn*(nnmm-dd)).sqrt())).divide().clamp_1()
}
}else{
crate::integer::vec3::zero()
crate::integer::vec3::ZERO
}
}
pub fn is_slope_walkable(&self,normal:Planar64Vec3,up:Planar64Vec3)->bool{
@@ -360,7 +360,7 @@ impl LadderSettings{
self.accelerate.accel
}
pub fn get_ladder_target_velocity(&self,mut control_dir:Planar64Vec3,normal:Planar64Vec3)->Planar64Vec3{
if control_dir==crate::integer::vec3::zero(){
if control_dir==crate::integer::vec3::ZERO{
return control_dir;
}
let nn=normal.length_squared();
@@ -382,13 +382,13 @@ impl LadderSettings{
//- fix the underlying issue
if dd<nnmm{
let cr=normal.cross(control_dir);
if cr==crate::integer::vec3::zero(){
crate::integer::vec3::zero()
if cr==crate::integer::vec3::ZERO_2{
crate::integer::vec3::ZERO
}else{
(cr.cross(normal)*self.accelerate.topspeed/((nn*(nnmm-dd)).sqrt())).divide().clamp_1()
}
}else{
crate::integer::vec3::zero()
crate::integer::vec3::ZERO
}
}
}

View File

@@ -34,41 +34,12 @@ pub trait InstructionFeedback<I,T>:InstructionEmitter<I,Time=T>+InstructionConsu
self.process_instruction(instruction);
}
}
#[inline]
fn into_iter(self,time_limit:T)->InstructionIter<I,T,Self>
where
Self:Sized
{
InstructionIter{
time_limit,
feedback:self,
_phantom:core::marker::PhantomData,
}
}
}
impl<I,T,F> InstructionFeedback<I,T> for F
impl<I,T,X> InstructionFeedback<I,T> for X
where
T:Copy,
F:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
X:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
{}
pub struct InstructionIter<I,T:Copy,F:InstructionFeedback<I,T>>{
time_limit:T,
feedback:F,
_phantom:core::marker::PhantomData<I>,
}
impl<I,T,F> Iterator for InstructionIter<I,T,F>
where
I:Clone,
T:Clone+Copy,
F:InstructionFeedback<I,T>,
{
type Item=TimedInstruction<I,T>;
fn next(&mut self)->Option<Self::Item>{
let instruction=self.feedback.next_instruction(self.time_limit)?;
self.feedback.process_instruction(instruction.clone());
Some(instruction)
}
}
//PROPER PRIVATE FIELDS!!!
pub struct InstructionCollector<I,T>{

View File

@@ -1,5 +1,5 @@
pub use fixed_wide::fixed::*;
pub use ratio_ops::ratio::{Ratio,Divide,Parity};
pub use ratio_ops::ratio::{Ratio,Divide};
//integer units
@@ -86,7 +86,7 @@ impl<T> std::fmt::Display for Time<T>{
write!(f,"{}s+{:09}ns",self.0/Self::ONE_SECOND.0,self.0%Self::ONE_SECOND.0)
}
}
impl<T> Default for Time<T>{
impl<T> std::default::Default for Time<T>{
fn default()->Self{
Self::raw(0)
}
@@ -126,27 +126,26 @@ impl_time_additive_assign_operator!(core::ops::AddAssign,add_assign);
impl_time_additive_assign_operator!(core::ops::SubAssign,sub_assign);
impl_time_additive_assign_operator!(core::ops::RemAssign,rem_assign);
impl<T> std::ops::Mul for Time<T>{
type Output=Ratio<Fixed<2,64>,Fixed<2,64>>;
type Output=Ratio<fixed_wide::fixed::Fixed<2,64>,fixed_wide::fixed::Fixed<2,64>>;
#[inline]
fn mul(self,rhs:Self)->Self::Output{
Ratio::new(Fixed::raw(self.0)*Fixed::raw(rhs.0),Fixed::raw_digit(1_000_000_000i64.pow(2)))
}
}
macro_rules! impl_time_i64_rhs_operator {
($op:ident,$method:ident)=>{
impl<T> core::ops::$op<i64> for Time<T>{
type Output=Self;
#[inline]
fn $method(self,rhs:i64)->Self::Output{
Self::raw(self.0.$method(rhs))
}
}
impl<T> std::ops::Div<i64> for Time<T>{
type Output=Self;
#[inline]
fn div(self,rhs:i64)->Self::Output{
Self::raw(self.0/rhs)
}
}
impl<T> std::ops::Mul<i64> for Time<T>{
type Output=Self;
#[inline]
fn mul(self,rhs:i64)->Self::Output{
Self::raw(self.0*rhs)
}
}
impl_time_i64_rhs_operator!(Div,div);
impl_time_i64_rhs_operator!(Mul,mul);
impl_time_i64_rhs_operator!(Shr,shr);
impl_time_i64_rhs_operator!(Shl,shl);
impl<T> core::ops::Mul<Time<T>> for Planar64{
type Output=Ratio<Fixed<2,64>,Planar64>;
fn mul(self,rhs:Time<T>)->Self::Output{
@@ -156,7 +155,7 @@ impl<T> core::ops::Mul<Time<T>> for Planar64{
#[cfg(test)]
mod test_time{
use super::*;
type Time=AbsoluteTime;
type Time=super::AbsoluteTime;
#[test]
fn time_from_planar64(){
let a:Time=Planar64::from(1).into();
@@ -402,10 +401,6 @@ impl Angle32{
pub const NEG_FRAC_PI_2:Self=Self(-1<<30);
pub const PI:Self=Self(-1<<31);
#[inline]
pub const fn raw(num:i32)->Self{
Self(num)
}
#[inline]
pub const fn wrap_from_i64(theta:i64)->Self{
//take lower bits
//note: this was checked on compiler explorer and compiles to 1 instruction!
@@ -552,7 +547,7 @@ impl TryFrom<[f32;3]> for Unit32Vec3{
}
*/
pub type Planar64TryFromFloatError=FixedFromFloatError;
pub type Planar64TryFromFloatError=fixed_wide::fixed::FixedFromFloatError;
pub type Planar64=fixed_wide::types::I32F32;
pub type Planar64Vec3=linear_ops::types::Vector3<Planar64>;
pub type Planar64Mat3=linear_ops::types::Matrix3<Planar64>;
@@ -561,6 +556,8 @@ pub mod vec3{
pub use linear_ops::types::Vector3;
pub const MIN:Planar64Vec3=Planar64Vec3::new([Planar64::MIN;3]);
pub const MAX:Planar64Vec3=Planar64Vec3::new([Planar64::MAX;3]);
pub const ZERO:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO;3]);
pub const ZERO_2:linear_ops::types::Vector3<Fixed::<2,64>>=linear_ops::types::Vector3::new([Fixed::<2,64>::ZERO;3]);
pub const X:Planar64Vec3=Planar64Vec3::new([Planar64::ONE,Planar64::ZERO,Planar64::ZERO]);
pub const Y:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::ONE,Planar64::ZERO]);
pub const Z:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::ZERO,Planar64::ONE]);
@@ -569,10 +566,6 @@ pub mod vec3{
pub const NEG_Y:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::NEG_ONE,Planar64::ZERO]);
pub const NEG_Z:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::ZERO,Planar64::NEG_ONE]);
pub const NEG_ONE:Planar64Vec3=Planar64Vec3::new([Planar64::NEG_ONE,Planar64::NEG_ONE,Planar64::NEG_ONE]);
// TODO: use #![feature(generic_const_items)] when stabilized https://github.com/rust-lang/rust/issues/113521
pub const fn zero<const N:usize,const F:usize>()->Vector3<Fixed<N,F>>{
Vector3::new([Fixed::ZERO;3])
}
#[inline]
pub const fn int(x:i32,y:i32,z:i32)->Planar64Vec3{
Planar64Vec3::new([Planar64::raw((x as i64)<<32),Planar64::raw((y as i64)<<32),Planar64::raw((z as i64)<<32)])
@@ -655,13 +648,13 @@ pub mod mat3{
}
}
#[derive(Clone,Copy,Debug,Default,Hash,Eq,PartialEq)]
#[derive(Clone,Copy,Default,Hash,Eq,PartialEq)]
pub struct Planar64Affine3{
pub matrix3:Planar64Mat3,//includes scale above 1
pub translation:Planar64Vec3,
}
impl Planar64Affine3{
pub const IDENTITY:Self=Self::new(mat3::identity(),vec3::zero());
pub const IDENTITY:Self=Self::new(mat3::identity(),vec3::ZERO);
#[inline]
pub const fn new(matrix3:Planar64Mat3,translation:Planar64Vec3)->Self{
Self{matrix3,translation}

View File

@@ -1,6 +1,6 @@
use std::collections::HashMap;
use crate::integer::{Planar64,Planar64Vec3,Planar64Affine3};
use crate::integer::{Planar64Vec3,Planar64Affine3};
use crate::gameplay_attributes;
pub type TextureCoordinate=glam::Vec2;
@@ -168,11 +168,6 @@ impl MeshBuilder{
}
}
pub fn acquire_pos_id(&mut self,pos:Planar64Vec3)->PositionId{
// Truncate the 16 most precise bits of the vertex positions.
// This allows the normal vectors to exactly represent the face.
// Remove this in Mesh V2
const MASK:Planar64=Planar64::raw(!((1<<16)-1));
let pos=pos.map(|c|c&MASK);
*self.pos_id_from.entry(pos).or_insert_with(||{
let pos_id=PositionId::new(self.unique_pos.len() as u32);
self.unique_pos.push(pos);

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_deferred_loader"
version = "0.5.1"
version = "0.5.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -10,7 +10,4 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
[lints]
workspace = true
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }

View File

@@ -2,7 +2,7 @@ use std::collections::HashMap;
use crate::loader::Loader;
use crate::mesh::Meshes;
use crate::texture::{RenderConfigs,Texture};
use strafesnet_common::model::{MeshId,RenderConfig,RenderConfigId,TextureId};
use strafesnet_common::model::{Mesh,MeshId,RenderConfig,RenderConfigId,TextureId};
#[derive(Clone,Copy,Debug)]
pub enum LoadFailureMode{
@@ -45,7 +45,7 @@ impl<H:core::hash::Hash+Eq> RenderConfigDeferredLoader<H>{
pub fn into_indices(self)->impl Iterator<Item=H>{
self.render_config_id_from_asset_id.into_keys().flatten()
}
pub fn into_render_configs<'a,L:Loader<Resource=Texture,Index<'a>=H>+'a>(mut self,loader:&mut L,failure_mode:LoadFailureMode)->Result<RenderConfigs,L::Error>{
pub fn into_render_configs<L:Loader<Resource=Texture,Index=H>>(mut self,loader:&mut L,failure_mode:LoadFailureMode)->Result<RenderConfigs,L::Error>{
let mut sorted_textures=vec![None;self.texture_count as usize];
for (index_option,render_config_id) in self.render_config_id_from_asset_id{
let render_config=&mut self.render_configs[render_config_id.get() as usize];
@@ -93,7 +93,7 @@ impl<H:core::hash::Hash+Eq> MeshDeferredLoader<H>{
pub fn into_indices(self)->impl Iterator<Item=H>{
self.mesh_id_from_asset_id.into_keys()
}
pub fn into_meshes<'a,M:Clone,L:Loader<Resource=M,Index<'a>=H>+'a>(self,loader:&mut L,failure_mode:LoadFailureMode)->Result<Meshes<M>,L::Error>{
pub fn into_meshes<L:Loader<Resource=Mesh,Index=H>>(self,loader:&mut L,failure_mode:LoadFailureMode)->Result<Meshes,L::Error>{
let mut mesh_list=vec![None;self.mesh_id_from_asset_id.len()];
for (index,mesh_id) in self.mesh_id_from_asset_id{
let resource_result=loader.load(index);

View File

@@ -2,7 +2,7 @@ use std::error::Error;
pub trait Loader{
type Error:Error;
type Index<'a> where Self:'a;
type Index;
type Resource;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>;
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>;
}

View File

@@ -1,15 +1,15 @@
use strafesnet_common::model::MeshId;
use strafesnet_common::model::{Mesh,MeshId};
pub struct Meshes<M>{
meshes:Vec<Option<M>>,
pub struct Meshes{
meshes:Vec<Option<Mesh>>,
}
impl<M> Meshes<M>{
pub(crate) const fn new(meshes:Vec<Option<M>>)->Self{
impl Meshes{
pub(crate) const fn new(meshes:Vec<Option<Mesh>>)->Self{
Self{
meshes,
}
}
pub fn consume(self)->impl Iterator<Item=(MeshId,M)>{
pub fn consume(self)->impl Iterator<Item=(MeshId,Mesh)>{
self.meshes.into_iter().enumerate().filter_map(|(mesh_id,maybe_mesh)|
maybe_mesh.map(|mesh|(MeshId::new(mesh_id as u32),mesh))
)

View File

@@ -1,6 +1,6 @@
[package]
name = "fixed_wide"
version = "0.2.1"
version = "0.2.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -18,6 +18,3 @@ bnum = "0.13.0"
arrayvec = { version = "0.7.6", optional = true }
paste = "1.0.15"
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet", optional = true }
[lints]
workspace = true

View File

@@ -1,8 +1,6 @@
use bnum::{BInt,cast::As};
const BNUM_DIGIT_WIDTH:usize=64;
#[derive(Clone,Copy,Default,Hash,PartialEq,Eq,PartialOrd,Ord)]
#[derive(Clone,Copy,Debug,Default,Hash,PartialEq,Eq,PartialOrd,Ord)]
/// A Fixed point number for which multiply operations widen the bits in the output. (when the wide-mul feature is enabled)
/// N is the number of u64s to use
/// F is the number of fractional bits (always N*32 lol)
@@ -70,34 +68,6 @@ impl<const N:usize,const F:usize> Fixed<N,F>{
pub const fn midpoint(self,other:Self)->Self{
Self::from_bits(self.bits.midpoint(other.bits))
}
#[inline]
pub const fn min(self,other:Self)->Self{
Self::from_bits(self.bits.min(other.bits))
}
#[inline]
pub const fn max(self,other:Self)->Self{
Self::from_bits(self.bits.max(other.bits))
}
/// return the result of self*sign(other)
#[inline]
pub const fn mul_sign<const N1:usize,const F1:usize>(self,other:Fixed<N1,F1>)->Self{
if other.is_negative(){
Self::from_bits(self.bits.neg())
}else if other.is_zero(){
Fixed::ZERO
}else{
self
}
}
/// return the result of self/sign(other) (divide by zero does not change the sign)
#[inline]
pub const fn div_sign<const N1:usize,const F1:usize>(self,other:Fixed<N1,F1>)->Self{
if other.is_negative(){
Self::from_bits(self.bits.neg())
}else{
self
}
}
}
impl<const F:usize> Fixed<1,F>{
/// My old code called this function everywhere so let's provide it
@@ -129,6 +99,28 @@ impl_from!(
i8,i16,i32,i64,i128,isize
);
impl<const N:usize,const F:usize,T> PartialEq<T> for Fixed<N,F>
where
T:Copy,
BInt::<N>:From<T>,
{
#[inline]
fn eq(&self,&other:&T)->bool{
self.bits.eq(&other.into())
}
}
impl<const N:usize,const F:usize,T> PartialOrd<T> for Fixed<N,F>
where
T:Copy,
BInt::<N>:From<T>,
{
#[inline]
fn partial_cmp(&self,&other:&T)->Option<std::cmp::Ordering>{
self.bits.partial_cmp(&other.into())
}
}
impl<const N:usize,const F:usize> std::ops::Neg for Fixed<N,F>{
type Output=Self;
#[inline]
@@ -241,11 +233,6 @@ impl FixedFromFloatError{
}
}
}
impl core::fmt::Display for FixedFromFloatError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
macro_rules! impl_from_float {
( $decode:ident, $input: ty, $mantissa_bits:expr ) => {
impl<const N:usize,const F:usize> TryFrom<$input> for Fixed<N,F>{
@@ -294,23 +281,6 @@ macro_rules! impl_from_float {
impl_from_float!(integer_decode_f32,f32,24);
impl_from_float!(integer_decode_f64,f64,53);
impl<const N:usize,const F:usize> core::fmt::Debug for Fixed<N,F>{
#[inline]
fn fmt(&self,f:&mut core::fmt::Formatter)->Result<(),core::fmt::Error>{
let integral=self.as_bits().unsigned_abs()>>F;
let fractional=self.as_bits().unsigned_abs()&((bnum::BUint::<N>::ONE<<F)-bnum::BUint::<N>::ONE);
let leading_zeroes=(fractional.leading_zeros() as usize).saturating_sub(N*BNUM_DIGIT_WIDTH-F)>>2;
if self.is_negative(){
core::write!(f,"-")?;
}
if fractional.is_zero(){
core::write!(f,"{integral:x}.{}","0".repeat(leading_zeroes))
}else{
core::write!(f,"{integral:x}.{}{fractional:x}","0".repeat(leading_zeroes))
}
}
}
impl<const N:usize,const F:usize> core::fmt::Display for Fixed<N,F>{
#[inline]
fn fmt(&self,f:&mut core::fmt::Formatter)->Result<(),core::fmt::Error>{
@@ -334,6 +304,16 @@ macro_rules! impl_additive_operator {
self.$method(other)
}
}
impl<const N:usize,const F:usize,U> core::ops::$trait<U> for $struct<N,F>
where
BInt::<N>:From<U>,
{
type Output = $output;
#[inline]
fn $method(self, other: U) -> Self::Output {
Self::from_bits(self.bits.$method(BInt::<N>::from(other).shl(F as u32)))
}
}
};
}
macro_rules! impl_additive_assign_operator {
@@ -344,6 +324,15 @@ macro_rules! impl_additive_assign_operator {
self.bits.$method(other.bits);
}
}
impl<const N:usize,const F:usize,U> core::ops::$trait<U> for $struct<N,F>
where
BInt::<N>:From<U>,
{
#[inline]
fn $method(&mut self, other: U) {
self.bits.$method(BInt::<N>::from(other).shl(F as u32));
}
}
};
}
@@ -366,7 +355,7 @@ impl_additive_operator!( Fixed, BitXor, bitxor, Self );
// non-wide operators. The result is the same width as the inputs.
// This macro is not used in the default configuration.
#[expect(unused_macros)]
#[allow(unused_macros)]
macro_rules! impl_multiplicative_operator_not_const_generic {
( ($struct: ident, $trait: ident, $method: ident, $output: ty ), $width:expr ) => {
impl<const F:usize> core::ops::$trait for $struct<$width,F>{
@@ -545,7 +534,7 @@ impl_shift_operator!( Fixed, Shr, shr, Self );
// wide operators. The result width is the sum of the input widths, i.e. none of the multiplication
#[expect(unused_macros)]
#[allow(unused_macros)]
macro_rules! impl_wide_operators{
($lhs:expr,$rhs:expr)=>{
impl core::ops::Mul<Fixed<$rhs,{$rhs*32}>> for Fixed<$lhs,{$lhs*32}>{

View File

@@ -229,16 +229,3 @@ fn test_zeroes_deferred_division(){
])
);
}
#[test]
fn test_debug(){
assert_eq!(format!("{:?}",I32F32::EPSILON),"0.00000001");
assert_eq!(format!("{:?}",I32F32::ONE),"1.00000000");
assert_eq!(format!("{:?}",I32F32::TWO),"2.00000000");
assert_eq!(format!("{:?}",I32F32::MAX),"7fffffff.ffffffff");
assert_eq!(format!("{:?}",I32F32::try_from(core::f64::consts::PI).unwrap()),"3.243f6a88");
assert_eq!(format!("{:?}",I32F32::NEG_EPSILON),"-0.00000001");
assert_eq!(format!("{:?}",I32F32::NEG_ONE),"-1.00000000");
assert_eq!(format!("{:?}",I32F32::NEG_TWO),"-2.00000000");
assert_eq!(format!("{:?}",I32F32::MIN),"-80000000.00000000");
}

View File

@@ -20,6 +20,3 @@ paste = { version = "1.0.15", optional = true }
[dev-dependencies]
fixed_wide = { path = "../fixed_wide", registry = "strafesnet", features = ["wide-mul"] }
[lints]
workspace = true

View File

@@ -204,21 +204,13 @@ macro_rules! impl_matrix_named_fields_shape {
type Target=$struct_outer<Vector<$size_inner,T>>;
#[inline]
fn deref(&self)->&Self::Target{
// This cast is valid because Matrix has #[repr(transparent)]
let ptr:*const [[T;$size_inner];$size_outer]=&self.array;
let ptr=ptr as *const Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&*ptr}
unsafe{core::mem::transmute(&self.array)}
}
}
impl<T> core::ops::DerefMut for Matrix<$size_outer,$size_inner,T>{
#[inline]
fn deref_mut(&mut self)->&mut Self::Target{
// This cast is valid because Matrix has #[repr(transparent)]
let ptr:*mut [[T;$size_inner];$size_outer]=&mut self.array;
let ptr=ptr as *mut Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&mut*ptr}
unsafe{core::mem::transmute(&mut self.array)}
}
}
}

View File

@@ -330,21 +330,13 @@ macro_rules! impl_vector_named_fields {
type Target=$struct<T>;
#[inline]
fn deref(&self)->&Self::Target{
// This cast is valid because Vector has #[repr(transparent)]
let ptr:*const [T;$size]=&self.array;
let ptr=ptr as *const Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&*ptr}
unsafe{core::mem::transmute(&self.array)}
}
}
impl<T> core::ops::DerefMut for Vector<$size,T>{
#[inline]
fn deref_mut(&mut self)->&mut Self::Target{
// This cast is valid because Vector has #[repr(transparent)]
let ptr:*mut [T;$size]=&mut self.array;
let ptr=ptr as *mut Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&mut*ptr}
unsafe{core::mem::transmute(&mut self.array)}
}
}
}

View File

@@ -8,6 +8,3 @@ description = "Ratio operations using trait bounds for avoiding division like th
authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_rbx_loader"
version = "0.7.0"
version = "0.6.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -12,17 +12,13 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
bytemuck = "1.14.3"
glam = "0.30.0"
regex = { version = "1.11.3", default-features = false }
rbx_mesh = "0.5.0"
lazy-regex = "3.1.0"
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
rbx_mesh = "0.3.1"
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", path = "../rbxassetid", registry = "strafesnet" }
roblox_emulator = { version = "0.5.1", path = "../roblox_emulator", default-features = false, registry = "strafesnet" }
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../deferred_loader", registry = "strafesnet" }
rbx_binary = "2.0.1"
rbx_dom_weak = "4.1.0"
rbx_reflection = "6.1.0"
rbx_reflection_database = "2.0.2"
rbx_xml = "2.0.1"
[lints]
workspace = true
roblox_emulator = { version = "0.4.7", path = "../roblox_emulator", registry = "strafesnet" }
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }

View File

@@ -1,245 +0,0 @@
use std::collections::HashSet;
use std::num::ParseIntError;
use strafesnet_common::gameplay_modes::{StageId,ModeId};
use strafesnet_common::integer::{FixedFromFloatError,Planar64TryFromFloatError};
/// A collection of errors which can be ignored at your peril
#[derive(Debug,Default)]
pub struct RecoverableErrors{
/// A basepart has an invalid / missing property.
pub basepart_property:Vec<InstancePath>,
/// A part has an unconvertable CFrame.
pub basepart_cframe:Vec<CFrameError>,
/// A part has an unconvertable Velocity.
pub basepart_velocity:Vec<Planar64ConvertError>,
/// A part has an invalid / missing property.
pub part_property:Vec<InstancePath>,
/// A part has an invalid shape.
pub part_shape:Vec<ShapeError>,
/// A meshpart has an invalid / missing property.
pub meshpart_property:Vec<InstancePath>,
/// A meshpart has no mesh.
pub meshpart_content:Vec<InstancePath>,
/// A basepart has an unsupported subclass.
pub unsupported_class:HashSet<String>,
/// A decal has an invalid / missing property.
pub decal_property:Vec<InstancePath>,
/// A decal has an invalid normal_id.
pub normal_id:Vec<NormalIdError>,
/// A texture has an invalid / missing property.
pub texture_property:Vec<InstancePath>,
/// A mode_id failed to parse.
pub mode_id_parse_int:Vec<ParseIntContext>,
/// There is a duplicate mode.
pub duplicate_mode:HashSet<ModeId>,
/// A mode_id failed to parse.
pub stage_id_parse_int:Vec<ParseIntContext>,
/// A Stage was duplicated leading to undefined behaviour.
pub duplicate_stage:HashSet<DuplicateStageError>,
/// A WormholeOut id failed to parse.
pub wormhole_out_id_parse_int:Vec<ParseIntContext>,
/// A WormholeOut was duplicated leading to undefined behaviour.
pub duplicate_wormhole_out:HashSet<u32>,
/// A WormholeIn id failed to parse.
pub wormhole_in_id_parse_int:Vec<ParseIntContext>,
/// A jump limit failed to parse.
pub jump_limit_parse_int:Vec<ParseIntContext>,
}
impl RecoverableErrors{
pub fn count(&self)->usize{
self.basepart_property.len()+
self.basepart_cframe.len()+
self.basepart_velocity.len()+
self.part_property.len()+
self.part_shape.len()+
self.meshpart_property.len()+
self.meshpart_content.len()+
self.unsupported_class.len()+
self.decal_property.len()+
self.normal_id.len()+
self.texture_property.len()+
self.mode_id_parse_int.len()+
self.duplicate_mode.len()+
self.stage_id_parse_int.len()+
self.duplicate_stage.len()+
self.wormhole_out_id_parse_int.len()+
self.duplicate_wormhole_out.len()+
self.wormhole_in_id_parse_int.len()+
self.jump_limit_parse_int.len()
}
}
fn write_comma_separated<T>(
f:&mut std::fmt::Formatter<'_>,
mut it:impl Iterator<Item=T>,
custom_write:impl Fn(&mut std::fmt::Formatter<'_>,T)->std::fmt::Result
)->std::fmt::Result{
if let Some(t)=it.next(){
custom_write(f,t)?;
for t in it{
write!(f,", ")?;
custom_write(f,t)?;
}
}
Ok(())
}
macro_rules! write_instance_path_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal,$problem:literal)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} {}: ",$problem)?;
write_comma_separated($f,$self.$field.iter(),|f,InstancePath(path)|
write!(f,"{path}")
)?;
writeln!($f)?;
}
};
}
macro_rules! write_duplicate_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} duplicates: ")?;
write_comma_separated($f,$self.$field.iter(),|f,id|
write!(f,"{id}")
)?;
writeln!($f)?;
}
};
}
macro_rules! write_bespoke_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal,$problem:literal,$path_field:ident,$error_field:ident)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} {}: ",$problem)?;
write_comma_separated($f,$self.$field.iter(),|f,context|
write!(f,"{} ({})",context.$path_field,context.$error_field)
)?;
writeln!($f)?;
}
};
}
impl core::fmt::Display for RecoverableErrors{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write_instance_path_error!(f,self,basepart_property,"BasePart is","BaseParts are","missing a property");
write_bespoke_error!(f,self,basepart_cframe,"BasePart","BaseParts","CFrame float convert failed",path,error);
write_bespoke_error!(f,self,basepart_velocity,"BasePart","BaseParts","Velocity float convert failed",path,error);
write_instance_path_error!(f,self,part_property,"Part is","Parts are","missing a property");
write_bespoke_error!(f,self,part_shape,"Part","Parts","Shape is invalid",path,shape);
write_instance_path_error!(f,self,meshpart_property,"MeshPart is","MeshParts are","missing a property");
write_instance_path_error!(f,self,meshpart_content,"MeshPart has","MeshParts have","no mesh");
{
let len=self.unsupported_class.len();
if len!=0{
let plural=if len==1{"Class is"}else{"Classes are"};
write!(f,"The following {plural} not supported: ")?;
write_comma_separated(f,self.unsupported_class.iter(),|f,classname|write!(f,"{classname}"))?;
writeln!(f)?;
}
}
write_instance_path_error!(f,self,decal_property,"Decal is","Decals are","missing a property");
write_bespoke_error!(f,self,normal_id,"Decal","Decals","NormalId is invalid",path,normal_id);
write_instance_path_error!(f,self,texture_property,"Texture is","Textures are","missing a property");
write_bespoke_error!(f,self,mode_id_parse_int,"ModeId","ModeIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_mode,"ModeId has","ModeIds have");
write_bespoke_error!(f,self,stage_id_parse_int,"StageId","StageIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_stage,"StageId has","StageIds have");
write_bespoke_error!(f,self,wormhole_out_id_parse_int,"WormholeOutId","WormholeOutIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_wormhole_out,"WormholeOutId has","WormholeOutIds have");
write_bespoke_error!(f,self,wormhole_in_id_parse_int,"WormholeInId","WormholeInIds","failed to parse",context,error);
write_bespoke_error!(f,self,jump_limit_parse_int,"jump limit","jump limits","failed to parse",context,error);
Ok(())
}
}
/// A Decal was missing required properties
#[derive(Debug)]
pub struct InstancePath(pub String);
impl core::fmt::Display for InstancePath{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
self.0.fmt(f)
}
}
impl InstancePath{
pub fn new(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)->InstancePath{
let mut names:Vec<_>=core::iter::successors(
Some(instance),
|i|dom.get_by_ref(i.parent())
).map(
|i|i.name.as_str()
).collect();
// discard the name of the root object
names.pop();
names.reverse();
InstancePath(names.join("."))
}
}
#[derive(Debug)]
pub struct ParseIntContext{
pub context:String,
pub error:ParseIntError,
}
impl ParseIntContext{
pub fn parse<T:core::str::FromStr<Err=ParseIntError>>(input:&str)->Result<T,Self>{
input.parse().map_err(|error|ParseIntContext{
context:input.to_owned(),
error,
})
}
}
#[derive(Debug)]
pub struct NormalIdError{
pub path:InstancePath,
pub normal_id:u32,
}
#[derive(Debug)]
pub struct ShapeError{
pub path:InstancePath,
pub shape:u32,
}
#[derive(Debug)]
pub enum CFrameErrorType{
ZeroDeterminant,
Convert(FixedFromFloatError),
}
impl core::fmt::Display for CFrameErrorType{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
#[derive(Debug)]
pub struct CFrameError{
pub path:InstancePath,
pub error:CFrameErrorType,
}
#[derive(Debug)]
pub struct Planar64ConvertError{
pub path:InstancePath,
pub error:Planar64TryFromFloatError,
}
#[derive(Debug,Hash,Eq,PartialEq)]
pub struct DuplicateStageError{
pub mode_id:ModeId,
pub stage_id:StageId,
}
impl core::fmt::Display for DuplicateStageError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{}-Spawn{}",self.mode_id,self.stage_id.get())
}
}

View File

@@ -1,18 +1,12 @@
use std::io::Read;
use rbx_dom_weak::WeakDom;
use roblox_emulator::context::Context;
use strafesnet_common::map::CompleteMap;
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
pub use error::RecoverableErrors;
pub use roblox_emulator::runner::Error as RunnerError;
mod rbx;
mod mesh;
mod error;
mod union;
pub mod loader;
pub mod primitives;
mod primitives;
pub mod data{
pub struct RobloxMeshBytes(Vec<u8>);
@@ -33,7 +27,13 @@ impl Model{
fn new(dom:WeakDom)->Self{
Self{dom}
}
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
pub fn into_place(self)->Place{
let Self{mut dom}=self;
let context=roblox_emulator::context::Context::from_mut(&mut dom);
let services=context.convert_into_place();
Place{dom,services}
}
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
to_snf(self,failure_mode)
}
}
@@ -44,43 +44,36 @@ impl AsRef<WeakDom> for Model{
}
pub struct Place{
context:Context,
dom:WeakDom,
services:roblox_emulator::context::Services,
}
impl Place{
pub fn new(dom:WeakDom)->Result<Self,roblox_emulator::context::ServicesError>{
let context=Context::from_place(dom)?;
Ok(Self{
context,
pub fn new(dom:WeakDom)->Option<Self>{
let context=roblox_emulator::context::Context::from_ref(&dom);
Some(Self{
services:context.find_services()?,
dom,
})
}
pub fn run_scripts(&mut self)->Result<Vec<RunnerError>,RunnerError>{
let Place{context}=self;
let runner=roblox_emulator::runner::Runner::new()?;
pub fn run_scripts(&mut self){
let Place{dom,services}=self;
let runner=roblox_emulator::runner::Runner::new().unwrap();
let context=roblox_emulator::context::Context::from_mut(dom);
let scripts=context.scripts();
let runnable=runner.runnable_context(context)?;
let mut errors=Vec::new();
let runnable=runner.runnable_context_with_services(context,services).unwrap();
for script in scripts{
if let Err(e)=runnable.run_script(script){
errors.push(e);
println!("runner error: {e}");
}
}
Ok(errors)
}
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
to_snf(self,failure_mode)
}
}
impl AsRef<WeakDom> for Place{
fn as_ref(&self)->&WeakDom{
self.context.as_ref()
}
}
impl From<Model> for Place{
fn from(model:Model)->Self{
let context=Context::from_model(model.dom);
Self{
context,
}
&self.dom
}
}
@@ -101,9 +94,9 @@ impl std::error::Error for ReadError{}
pub fn read<R:Read>(input:R)->Result<Model,ReadError>{
let mut buf=std::io::BufReader::new(input);
let peek=std::io::BufRead::fill_buf(&mut buf).map_err(ReadError::Io)?;
match peek.get(0..8){
Some(b"<roblox!")=>rbx_binary::from_reader(buf).map(Model::new).map_err(ReadError::RbxBinary),
Some(b"<roblox ")=>rbx_xml::from_reader_default(buf).map(Model::new).map_err(ReadError::RbxXml),
match &peek[0..8]{
b"<roblox!"=>rbx_binary::from_reader(buf).map(Model::new).map_err(ReadError::RbxBinary),
b"<roblox "=>rbx_xml::from_reader_default(buf).map(Model::new).map_err(ReadError::RbxXml),
_=>Err(ReadError::UnknownFileFormat),
}
}
@@ -130,7 +123,7 @@ impl From<loader::MeshError> for LoadError{
}
}
fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
let dom=dom.as_ref();
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
@@ -150,5 +143,7 @@ fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<(Complet
let mut texture_loader=loader::TextureLoader::new();
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,failure_mode).map_err(LoadError::Texture)?;
Ok(map_step2.add_render_configs_and_textures(render_configs))
let map=map_step2.add_render_configs_and_textures(render_configs);
Ok(map)
}

View File

@@ -4,12 +4,7 @@ use strafesnet_common::model::Mesh;
use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
use crate::data::RobloxMeshBytes;
use crate::rbx::RobloxPartDescription;
// disallow non-static lifetimes
fn static_ustr(s:&'static str)->rbx_dom_weak::Ustr{
rbx_dom_weak::ustr(s)
}
use crate::rbx::RobloxFaceTextureDescription;
fn read_entire_file(path:impl AsRef<std::path::Path>)->Result<Vec<u8>,std::io::Error>{
let mut file=std::fs::File::open(path)?;
@@ -18,6 +13,7 @@ fn read_entire_file(path:impl AsRef<std::path::Path>)->Result<Vec<u8>,std::io::E
Ok(data)
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum TextureError{
Io(std::io::Error),
@@ -40,17 +36,17 @@ impl From<RobloxAssetIdParseErr> for TextureError{
}
}
pub struct TextureLoader;
impl TextureLoader{
pub struct TextureLoader<'a>(std::marker::PhantomData<&'a ()>);
impl TextureLoader<'_>{
pub fn new()->Self{
Self
Self(std::marker::PhantomData)
}
}
impl Loader for TextureLoader{
impl<'a> Loader for TextureLoader<'a>{
type Error=TextureError;
type Index<'a>=&'a str;
type Index=&'a str;
type Resource=Texture;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
let RobloxAssetId(asset_id)=index.parse()?;
let file_name=format!("textures/{}.dds",asset_id);
let data=read_entire_file(file_name)?;
@@ -58,6 +54,7 @@ impl Loader for TextureLoader{
}
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum MeshError{
Io(std::io::Error),
@@ -107,7 +104,7 @@ pub enum MeshType<'a>{
mesh_data:&'a [u8],
physics_data:&'a [u8],
size_float_bits:[u32;3],
part_texture_description:RobloxPartDescription,
part_texture_description:[Option<RobloxFaceTextureDescription>;6],
},
}
#[derive(Hash,Eq,PartialEq)]
@@ -116,7 +113,7 @@ pub struct MeshIndex<'a>{
content:&'a str,
}
impl MeshIndex<'_>{
pub fn file_mesh(content:&str)->MeshIndex<'_>{
pub fn file_mesh(content:&str)->MeshIndex{
MeshIndex{
mesh_type:MeshType::FileMesh,
content,
@@ -127,7 +124,7 @@ impl MeshIndex<'_>{
mesh_data:&'a [u8],
physics_data:&'a [u8],
size:&rbx_dom_weak::types::Vector3,
part_texture_description:RobloxPartDescription,
part_texture_description:crate::rbx::RobloxPartDescription,
)->MeshIndex<'a>{
MeshIndex{
mesh_type:MeshType::Union{
@@ -141,23 +138,17 @@ impl MeshIndex<'_>{
}
}
#[derive(Clone)]
pub struct MeshWithSize{
pub(crate) mesh:Mesh,
pub(crate) size:strafesnet_common::integer::Planar64Vec3,
}
pub struct MeshLoader;
impl MeshLoader{
pub struct MeshLoader<'a>(std::marker::PhantomData<&'a ()>);
impl MeshLoader<'_>{
pub fn new()->Self{
Self
Self(std::marker::PhantomData)
}
}
impl Loader for MeshLoader{
impl<'a> Loader for MeshLoader<'a>{
type Error=MeshError;
type Index<'a>=MeshIndex<'a>;
type Resource=MeshWithSize;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
type Index=MeshIndex<'a>;
type Resource=Mesh;
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
let mesh=match index.mesh_type{
MeshType::FileMesh=>{
let RobloxAssetId(asset_id)=index.content.parse()?;
@@ -180,12 +171,12 @@ impl Loader for MeshLoader{
return Err(MeshError::MissingInstance);
};
if physics_data.is_empty(){
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get(&static_ustr("PhysicsData")){
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get("PhysicsData"){
physics_data=data.as_ref();
}
}
if mesh_data.is_empty(){
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get(&static_ustr("MeshData")){
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get("MeshData"){
mesh_data=data.as_ref();
}
}

View File

@@ -1,14 +1,12 @@
use std::collections::HashMap;
use rbx_mesh::mesh::{Vertex2,Vertex2Truncated};
use strafesnet_common::aabb::Aabb;
use strafesnet_common::integer::vec3;
use strafesnet_common::model::{self,ColorId,IndexedVertex,PolygonGroup,PolygonList,RenderConfigId,VertexId};
use crate::loader::MeshWithSize;
use strafesnet_common::{integer::vec3,model::{self,ColorId,IndexedVertex,NormalId,PolygonGroup,PolygonList,PositionId,RenderConfigId,TextureCoordinateId,VertexId}};
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Planar64Vec3(strafesnet_common::integer::Planar64TryFromFloatError),
RbxMesh(rbx_mesh::mesh::Error)
}
impl std::fmt::Display for Error{
@@ -18,46 +16,69 @@ impl std::fmt::Display for Error{
}
impl std::error::Error for Error{}
fn ingest_vertices2(
fn ingest_vertices2<
AcquirePosId,
AcquireTexId,
AcquireNormalId,
AcquireColorId,
AcquireVertexId,
>(
vertices:Vec<Vertex2>,
mb:&mut model::MeshBuilder,
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{
acquire_pos_id:&mut AcquirePosId,
acquire_tex_id:&mut AcquireTexId,
acquire_normal_id:&mut AcquireNormalId,
acquire_color_id:&mut AcquireColorId,
acquire_vertex_id:&mut AcquireVertexId,
)->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>
where
AcquirePosId:FnMut([f32;3])->Result<PositionId,Error>,
AcquireTexId:FnMut([f32;2])->TextureCoordinateId,
AcquireNormalId:FnMut([f32;3])->Result<NormalId,Error>,
AcquireColorId:FnMut([f32;4])->ColorId,
AcquireVertexId:FnMut(IndexedVertex)->VertexId,
{
//this monster is collecting a map of old_vertices_index -> unique_vertices_index
//while also doing the inserting unique entries into lists simultaneously
// vertex positions that fail to convert are DROPPED
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
rbx_mesh::mesh::VertexId2(vertex_id as u32),
{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32))),
};
mb.acquire_vertex_id(vertex)
}
))).collect()
acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id(vertex.tex),
normal:acquire_normal_id(vertex.norm)?,
color:acquire_color_id(vertex.color.map(|f|f as f32/255.0f32))
}),
))).collect::<Result<_,_>>()?)
}
fn ingest_vertices_truncated2(
fn ingest_vertices_truncated2<
AcquirePosId,
AcquireTexId,
AcquireNormalId,
AcquireVertexId,
>(
vertices:Vec<Vertex2Truncated>,
mb:&mut model::MeshBuilder,
acquire_pos_id:&mut AcquirePosId,
acquire_tex_id:&mut AcquireTexId,
acquire_normal_id:&mut AcquireNormalId,
static_color_id:ColorId,//pick one color and fill everything with it
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{
acquire_vertex_id:&mut AcquireVertexId,
)->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>
where
AcquirePosId:FnMut([f32;3])->Result<PositionId,Error>,
AcquireTexId:FnMut([f32;2])->TextureCoordinateId,
AcquireNormalId:FnMut([f32;3])->Result<NormalId,Error>,
AcquireVertexId:FnMut(IndexedVertex)->VertexId,
{
//this monster is collecting a map of old_vertices_index -> unique_vertices_index
//while also doing the inserting unique entries into lists simultaneously
// vertex positions that fail to convert are DROPPED
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
rbx_mesh::mesh::VertexId2(vertex_id as u32),
{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:static_color_id,
};
mb.acquire_vertex_id(vertex)
}
))).collect()
acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id(vertex.tex),
normal:acquire_normal_id(vertex.norm)?,
color:static_color_id
}),
))).collect::<Result<_,_>>()?)
}
fn ingest_faces2_lods3(
@@ -68,80 +89,129 @@ fn ingest_faces2_lods3(
){
//faces have to be split into polygon groups based on lod
polygon_groups.extend(lods.windows(2).map(|lod_pair|
PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().filter_map(|rbx_mesh::mesh::Face2(v0,v1,v2)|
Some(vec![*vertex_id_map.get(&v0)?,*vertex_id_map.get(&v1)?,*vertex_id_map.get(&v2)?])
PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().map(|rbx_mesh::mesh::Face2(v0,v1,v2)|
vec![vertex_id_map[&v0],vertex_id_map[&v1],vertex_id_map[&v2]]
).collect()))
))
}
pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<MeshWithSize,Error>{
pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<model::Mesh,Error>{
//generate that mesh boi
let mut unique_pos=Vec::new();
let mut pos_id_from=HashMap::new();
let mut unique_tex=Vec::new();
let mut tex_id_from=HashMap::new();
let mut unique_normal=Vec::new();
let mut normal_id_from=HashMap::new();
let mut unique_color=Vec::new();
let mut color_id_from=HashMap::new();
let mut unique_vertices=Vec::new();
let mut vertex_id_from=HashMap::new();
let mut polygon_groups=Vec::new();
let mut mb=model::MeshBuilder::new();
let mut acquire_pos_id=|pos|{
let p=vec3::try_from_f32_array(pos).map_err(Error::Planar64Vec3)?;
Ok(PositionId::new(*pos_id_from.entry(p).or_insert_with(||{
let pos_id=unique_pos.len();
unique_pos.push(p);
pos_id
}) as u32))
};
let mut acquire_tex_id=|tex|{
let h=bytemuck::cast::<[f32;2],[u32;2]>(tex);
TextureCoordinateId::new(*tex_id_from.entry(h).or_insert_with(||{
let tex_id=unique_tex.len();
unique_tex.push(glam::Vec2::from_array(tex));
tex_id
}) as u32)
};
let mut acquire_normal_id=|normal|{
let n=vec3::try_from_f32_array(normal).map_err(Error::Planar64Vec3)?;
Ok(NormalId::new(*normal_id_from.entry(n).or_insert_with(||{
let normal_id=unique_normal.len();
unique_normal.push(n);
normal_id
}) as u32))
};
let mut acquire_color_id=|color|{
let h=bytemuck::cast::<[f32;4],[u32;4]>(color);
ColorId::new(*color_id_from.entry(h).or_insert_with(||{
let color_id=unique_color.len();
unique_color.push(glam::Vec4::from_array(color));
color_id
}) as u32)
};
let mut acquire_vertex_id=|vertex:IndexedVertex|{
VertexId::new(*vertex_id_from.entry(vertex.clone()).or_insert_with(||{
let vertex_id=unique_vertices.len();
unique_vertices.push(vertex);
vertex_id
}) as u32)
};
match rbx_mesh::read_versioned(roblox_mesh_bytes.cursor()).map_err(Error::RbxMesh)?{
rbx_mesh::mesh::Mesh::V1(mesh)=>{
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).filter_map(|trip|{
let mut ingest_vertex1=|vertex:&rbx_mesh::mesh::Vertex1|{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::vec2(vertex.tex[0],vertex.tex[1])),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:color_id,
};
Some(mb.acquire_vertex_id(vertex))
};
Some(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?])
}).collect())));
rbx_mesh::mesh::VersionedMesh::Version1(mesh)=>{
let color_id=acquire_color_id([1.0f32;4]);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).map(|trip|{
let mut ingest_vertex1=|vertex:&rbx_mesh::mesh::Vertex1|Ok(acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id([vertex.tex[0],vertex.tex[1]]),
normal:acquire_normal_id(vertex.norm)?,
color:color_id,
}));
Ok(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?])
}).collect::<Result<_,_>>()?)));
},
rbx_mesh::mesh::Mesh::V2(mesh)=>{
rbx_mesh::mesh::VersionedMesh::Version2(mesh)=>{
let vertex_id_map=match mesh.header.sizeof_vertex{
rbx_mesh::mesh::SizeOfVertex2::Truncated=>{
//pick white and make all the vertices white
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
let color_id=acquire_color_id([1.0f32;4]);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,color_id,&mut acquire_vertex_id)
},
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
};
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id),
}?;
//one big happy group for all the faces
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().filter_map(|face|
Some(vec![*vertex_id_map.get(&face.0)?,*vertex_id_map.get(&face.1)?,*vertex_id_map.get(&face.2)?])
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|face|
vec![vertex_id_map[&face.0],vertex_id_map[&face.1],vertex_id_map[&face.2]]
).collect())));
},
rbx_mesh::mesh::Mesh::V3(mesh)=>{
rbx_mesh::mesh::VersionedMesh::Version3(mesh)=>{
let vertex_id_map=match mesh.header.sizeof_vertex{
rbx_mesh::mesh::SizeOfVertex2::Truncated=>{
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
let color_id=acquire_color_id([1.0f32;4]);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,color_id,&mut acquire_vertex_id)
},
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
};
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id),
}?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
rbx_mesh::mesh::Mesh::V4(mesh)=>{
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb);
rbx_mesh::mesh::VersionedMesh::Version4(mesh)=>{
let vertex_id_map=ingest_vertices2(
mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id
)?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
rbx_mesh::mesh::Mesh::V5(mesh)=>{
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb);
rbx_mesh::mesh::VersionedMesh::Version5(mesh)=>{
let vertex_id_map=ingest_vertices2(
mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id
)?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
}
let mesh=mb.build(
Ok(model::Mesh{
unique_pos,
unique_normal,
unique_tex,
unique_color,
unique_vertices,
polygon_groups,
//these should probably be moved to the model...
//but what if models want to use the same texture
vec![model::IndexedGraphicsGroup{
graphics_groups:vec![model::IndexedGraphicsGroup{
render:RenderConfigId::new(0),
//the lowest lod is highest quality
groups:vec![model::PolygonGroupId::new(0)]
}],
//disable physics
Vec::new(),
);
let mut aabb=Aabb::default();
for &point in &mesh.unique_pos{
aabb.grow(point);
}
Ok(MeshWithSize{mesh,size:aabb.size()})
physics_groups:Vec::new(),
})
}

View File

@@ -1,6 +1,5 @@
use crate::rbx::{RobloxPartDescription,RobloxWedgeDescription,RobloxCornerWedgeDescription};
use strafesnet_common::model::{Color4,TextureCoordinate,Mesh,MeshBuilder,IndexedGraphicsGroup,IndexedPhysicsGroup,IndexedVertex,PolygonGroupId,PolygonGroup,PolygonList,PositionId,TextureCoordinateId,NormalId,ColorId,VertexId,RenderConfigId};
use strafesnet_common::integer::{vec3,Planar64,Planar64Vec3};
use strafesnet_common::model::{Color4,TextureCoordinate,Mesh,IndexedGraphicsGroup,IndexedPhysicsGroup,IndexedVertex,PolygonGroupId,PolygonGroup,PolygonList,PositionId,TextureCoordinateId,NormalId,ColorId,VertexId,RenderConfigId};
use strafesnet_common::integer::{vec3,Planar64Vec3};
#[derive(Debug)]
pub enum Primitives{
@@ -10,22 +9,7 @@ pub enum Primitives{
Wedge,
CornerWedge,
}
#[derive(Debug)]
pub struct PrimitivesError;
impl TryFrom<u32> for Primitives{
type Error=PrimitivesError;
fn try_from(value:u32)->Result<Self,Self::Error>{
match value{
0=>Ok(Primitives::Sphere),
1=>Ok(Primitives::Cube),
2=>Ok(Primitives::Cylinder),
3=>Ok(Primitives::Wedge),
4=>Ok(Primitives::CornerWedge),
_=>Err(PrimitivesError),
}
}
}
#[derive(Clone,Copy,Hash,PartialEq,Eq)]
#[derive(Hash,PartialEq,Eq)]
pub enum CubeFace{
Right,
Top,
@@ -34,29 +18,13 @@ pub enum CubeFace{
Bottom,
Front,
}
#[derive(Debug)]
pub struct CubeFaceError;
impl TryFrom<u32> for CubeFace{
type Error=CubeFaceError;
fn try_from(value:u32)->Result<Self,Self::Error>{
match value{
0=>Ok(CubeFace::Right),
1=>Ok(CubeFace::Top),
2=>Ok(CubeFace::Back),
3=>Ok(CubeFace::Left),
4=>Ok(CubeFace::Bottom),
5=>Ok(CubeFace::Front),
_=>Err(CubeFaceError),
}
}
}
const CUBE_DEFAULT_TEXTURE_COORDS:[TextureCoordinate;4]=[
TextureCoordinate::new(0.0,0.0),
TextureCoordinate::new(1.0,0.0),
TextureCoordinate::new(1.0,1.0),
TextureCoordinate::new(0.0,1.0),
];
pub const CUBE_DEFAULT_VERTICES:[Planar64Vec3;8]=[
const CUBE_DEFAULT_VERTICES:[Planar64Vec3;8]=[
vec3::int(-1,-1, 1),//0 left bottom back
vec3::int( 1,-1, 1),//1 right bottom back
vec3::int( 1, 1, 1),//2 right top back
@@ -66,7 +34,7 @@ pub const CUBE_DEFAULT_VERTICES:[Planar64Vec3;8]=[
vec3::int( 1,-1,-1),//6 right bottom front
vec3::int(-1,-1,-1),//7 left bottom front
];
pub const CUBE_DEFAULT_NORMALS:[Planar64Vec3;6]=[
const CUBE_DEFAULT_NORMALS:[Planar64Vec3;6]=[
vec3::int( 1, 0, 0),//CubeFace::Right
vec3::int( 0, 1, 0),//CubeFace::Top
vec3::int( 0, 0, 1),//CubeFace::Back
@@ -75,36 +43,103 @@ pub const CUBE_DEFAULT_NORMALS:[Planar64Vec3;6]=[
vec3::int( 0, 0,-1),//CubeFace::Front
];
pub struct CubeFaceDescription([FaceDescription;Self::FACES]);
#[derive(Hash,PartialEq,Eq)]
pub enum WedgeFace{
Right,
TopFront,
Back,
Left,
Bottom,
}
const WEDGE_DEFAULT_NORMALS:[Planar64Vec3;5]=[
vec3::int( 1, 0, 0),//Wedge::Right
vec3::int( 0, 1,-1),//Wedge::TopFront
vec3::int( 0, 0, 1),//Wedge::Back
vec3::int(-1, 0, 0),//Wedge::Left
vec3::int( 0,-1, 0),//Wedge::Bottom
];
/*
local cornerWedgeVerticies = {
Vector3.new(-1/2,-1/2,-1/2),7
Vector3.new(-1/2,-1/2, 1/2),0
Vector3.new( 1/2,-1/2,-1/2),6
Vector3.new( 1/2,-1/2, 1/2),1
Vector3.new( 1/2, 1/2,-1/2),5
}
*/
#[derive(Hash,PartialEq,Eq)]
pub enum CornerWedgeFace{
Right,
TopBack,
TopLeft,
Bottom,
Front,
}
const CORNERWEDGE_DEFAULT_NORMALS:[Planar64Vec3;5]=[
vec3::int( 1, 0, 0),//CornerWedge::Right
vec3::int( 0, 1, 1),//CornerWedge::BackTop
vec3::int(-1, 1, 0),//CornerWedge::LeftTop
vec3::int( 0,-1, 0),//CornerWedge::Bottom
vec3::int( 0, 0,-1),//CornerWedge::Front
];
#[derive(Default)]
pub struct CubeFaceDescription([Option<FaceDescription>;6]);
impl CubeFaceDescription{
pub const FACES:usize=6;
pub fn new(RobloxPartDescription(part_description):RobloxPartDescription,textureless_render_id:RenderConfigId)->Self{
Self(part_description.map(|face_description|match face_description{
Some(roblox_texture_transform)=>roblox_texture_transform.to_face_description(),
None=>FaceDescription::new_with_render_id(textureless_render_id),
}))
pub fn insert(&mut self,index:CubeFace,value:FaceDescription){
self.0[index as usize]=Some(value);
}
pub fn pairs(self)->impl Iterator<Item=(usize,FaceDescription)>{
self.0.into_iter().enumerate().filter_map(|(i,v)|v.map(|u|(i,u)))
}
}
pub struct WedgeFaceDescription([FaceDescription;Self::FACES]);
pub fn unit_cube(render:RenderConfigId)->Mesh{
let mut t=CubeFaceDescription::default();
t.insert(CubeFace::Right,FaceDescription::new_with_render_id(render));
t.insert(CubeFace::Top,FaceDescription::new_with_render_id(render));
t.insert(CubeFace::Back,FaceDescription::new_with_render_id(render));
t.insert(CubeFace::Left,FaceDescription::new_with_render_id(render));
t.insert(CubeFace::Bottom,FaceDescription::new_with_render_id(render));
t.insert(CubeFace::Front,FaceDescription::new_with_render_id(render));
generate_partial_unit_cube(t)
}
#[derive(Default)]
pub struct WedgeFaceDescription([Option<FaceDescription>;5]);
impl WedgeFaceDescription{
pub const FACES:usize=5;
pub fn new(RobloxWedgeDescription(part_description):RobloxWedgeDescription,textureless_render_id:RenderConfigId)->Self{
Self(part_description.map(|face_description|match face_description{
Some(roblox_texture_transform)=>roblox_texture_transform.to_face_description(),
None=>FaceDescription::new_with_render_id(textureless_render_id),
}))
pub fn insert(&mut self,index:WedgeFace,value:FaceDescription){
self.0[index as usize]=Some(value);
}
pub fn pairs(self)->std::iter::FilterMap<std::iter::Enumerate<std::array::IntoIter<Option<FaceDescription>,5>>,impl FnMut((usize,Option<FaceDescription>))->Option<(usize,FaceDescription)>>{
self.0.into_iter().enumerate().filter_map(|v|v.1.map(|u|(v.0,u)))
}
}
pub struct CornerWedgeFaceDescription([FaceDescription;Self::FACES]);
// pub fn unit_wedge(render:RenderConfigId)->Mesh{
// let mut t=WedgeFaceDescription::default();
// t.insert(WedgeFace::Right,FaceDescription::new_with_render_id(render));
// t.insert(WedgeFace::TopFront,FaceDescription::new_with_render_id(render));
// t.insert(WedgeFace::Back,FaceDescription::new_with_render_id(render));
// t.insert(WedgeFace::Left,FaceDescription::new_with_render_id(render));
// t.insert(WedgeFace::Bottom,FaceDescription::new_with_render_id(render));
// generate_partial_unit_wedge(t)
// }
#[derive(Default)]
pub struct CornerWedgeFaceDescription([Option<FaceDescription>;5]);
impl CornerWedgeFaceDescription{
pub const FACES:usize=5;
pub fn new(RobloxCornerWedgeDescription(part_description):RobloxCornerWedgeDescription,textureless_render_id:RenderConfigId)->Self{
Self(part_description.map(|face_description|match face_description{
Some(roblox_texture_transform)=>roblox_texture_transform.to_face_description(),
None=>FaceDescription::new_with_render_id(textureless_render_id),
}))
pub fn insert(&mut self,index:CornerWedgeFace,value:FaceDescription){
self.0[index as usize]=Some(value);
}
pub fn pairs(self)->std::iter::FilterMap<std::iter::Enumerate<std::array::IntoIter<Option<FaceDescription>,5>>,impl FnMut((usize,Option<FaceDescription>))->Option<(usize,FaceDescription)>>{
self.0.into_iter().enumerate().filter_map(|v|v.1.map(|u|(v.0,u)))
}
}
// pub fn unit_cornerwedge(render:RenderConfigId)->Mesh{
// let mut t=CornerWedgeFaceDescription::default();
// t.insert(CornerWedgeFace::Right,FaceDescription::new_with_render_id(render));
// t.insert(CornerWedgeFace::TopBack,FaceDescription::new_with_render_id(render));
// t.insert(CornerWedgeFace::TopLeft,FaceDescription::new_with_render_id(render));
// t.insert(CornerWedgeFace::Bottom,FaceDescription::new_with_render_id(render));
// t.insert(CornerWedgeFace::Front,FaceDescription::new_with_render_id(render));
// generate_partial_unit_cornerwedge(t)
// }
#[derive(Clone)]
pub struct FaceDescription{
@@ -113,7 +148,7 @@ pub struct FaceDescription{
pub color:Color4,
}
impl FaceDescription{
pub fn new_with_render_id(render:RenderConfigId)->Self{
pub fn new_with_render_id(render:RenderConfigId)->Self {
Self{
render,
transform:glam::Affine2::IDENTITY,
@@ -121,51 +156,51 @@ impl FaceDescription{
}
}
}
pub const CUBE_DEFAULT_POLYS:[[[u32;2];4];6]=[
pub fn generate_partial_unit_cube(face_descriptions:CubeFaceDescription)->Mesh{
const CUBE_DEFAULT_POLYS:[[[u32;3];4];6]=[
// right (1, 0, 0)
[
[6,2],//[vertex,tex]
[5,1],
[2,0],
[1,3],
[6,2,0],//[vertex,tex,norm]
[5,1,0],
[2,0,0],
[1,3,0],
],
// top (0, 1, 0)
[
[5,3],
[4,2],
[3,1],
[2,0],
[5,3,1],
[4,2,1],
[3,1,1],
[2,0,1],
],
// back (0, 0, 1)
[
[0,3],
[1,2],
[2,1],
[3,0],
[0,3,2],
[1,2,2],
[2,1,2],
[3,0,2],
],
// left (-1, 0, 0)
[
[0,2],
[3,1],
[4,0],
[7,3],
[0,2,3],
[3,1,3],
[4,0,3],
[7,3,3],
],
// bottom (0,-1, 0)
[
[1,1],
[0,0],
[7,3],
[6,2],
[1,1,4],
[0,0,4],
[7,3,4],
[6,2,4],
],
// front (0, 0,-1)
[
[4,1],
[5,0],
[6,3],
[7,2],
[4,1,5],
[5,0,5],
[6,3,5],
[7,2,5],
],
];
pub fn unit_cube(CubeFaceDescription(face_descriptions):CubeFaceDescription)->Mesh{
let mut generated_pos=Vec::new();
let mut generated_tex=Vec::new();
let mut generated_normal=Vec::new();
@@ -176,7 +211,7 @@ pub fn unit_cube(CubeFaceDescription(face_descriptions):CubeFaceDescription)->Me
let mut physics_group=IndexedPhysicsGroup::default();
let mut transforms=Vec::new();
//note that on a cube every vertex is guaranteed to be unique, so there's no need to hash them against existing vertices.
for (face_id,face_description) in face_descriptions.into_iter().enumerate(){
for (face_id,face_description) in face_descriptions.pairs(){
//assume that scanning short lists is faster than hashing.
let transform_index=if let Some(transform_index)=transforms.iter().position(|&transform|transform==face_description.transform){
transform_index
@@ -203,8 +238,8 @@ pub fn unit_cube(CubeFaceDescription(face_descriptions):CubeFaceDescription)->Me
//push vertices as they are needed
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(vec![
CUBE_DEFAULT_POLYS[face_id].map(|[pos_id,tex_id]|{
let pos=CUBE_DEFAULT_VERTICES[pos_id as usize];
CUBE_DEFAULT_POLYS[face_id].map(|tup|{
let pos=CUBE_DEFAULT_VERTICES[tup[0] as usize];
let pos_index=if let Some(pos_index)=generated_pos.iter().position(|&p|p==pos){
pos_index
}else{
@@ -216,7 +251,7 @@ pub fn unit_cube(CubeFaceDescription(face_descriptions):CubeFaceDescription)->Me
//always push vertex
let vertex=IndexedVertex{
pos:PositionId::new(pos_index),
tex:TextureCoordinateId::new(tex_id+4*transform_index),
tex:TextureCoordinateId::new(tup[1]+4*transform_index),
normal:NormalId::new(normal_index),
color:ColorId::new(color_index),
};
@@ -243,49 +278,42 @@ pub fn unit_cube(CubeFaceDescription(face_descriptions):CubeFaceDescription)->Me
}
}
//don't think too hard about the copy paste because this is all going into the map tool eventually...
pub fn unit_wedge(WedgeFaceDescription(face_descriptions):WedgeFaceDescription)->Mesh{
const WEDGE_DEFAULT_POLYS:[&[[u32;2]];5]=[
pub fn generate_partial_unit_wedge(face_descriptions:WedgeFaceDescription)->Mesh{
const WEDGE_DEFAULT_POLYS:[&[[u32;3]];5]=[
// right (1, 0, 0)
&[
[6,2],//[vertex,tex]
[2,0],
[1,3],
[6,2,0],//[vertex,tex,norm]
[2,0,0],
[1,3,0],
],
// FrontTop (0, 1, -1)
&[
[3,1],
[2,0],
[6,3],
[7,2],
[3,1,1],
[2,0,1],
[6,3,1],
[7,2,1],
],
// back (0, 0, 1)
&[
[0,3],
[1,2],
[2,1],
[3,0],
[0,3,2],
[1,2,2],
[2,1,2],
[3,0,2],
],
// left (-1, 0, 0)
&[
[0,2],
[3,1],
[7,3],
[0,2,3],
[3,1,3],
[7,3,3],
],
// bottom (0,-1, 0)
&[
[1,1],
[0,0],
[7,3],
[6,2],
[1,1,4],
[0,0,4],
[7,3,4],
[6,2,4],
],
];
const WEDGE_DEFAULT_NORMALS:[Planar64Vec3;5]=[
vec3::int( 1, 0, 0),//Wedge::Right
vec3::int( 0, 1,-1),//Wedge::TopFront
vec3::int( 0, 0, 1),//Wedge::Back
vec3::int(-1, 0, 0),//Wedge::Left
vec3::int( 0,-1, 0),//Wedge::Bottom
];
let mut generated_pos=Vec::new();
let mut generated_tex=Vec::new();
let mut generated_normal=Vec::new();
@@ -296,7 +324,7 @@ pub fn unit_wedge(WedgeFaceDescription(face_descriptions):WedgeFaceDescription)-
let mut physics_group=IndexedPhysicsGroup::default();
let mut transforms=Vec::new();
//note that on a cube every vertex is guaranteed to be unique, so there's no need to hash them against existing vertices.
for (face_id,face_description) in face_descriptions.into_iter().enumerate(){
for (face_id,face_description) in face_descriptions.pairs(){
//assume that scanning short lists is faster than hashing.
let transform_index=if let Some(transform_index)=transforms.iter().position(|&transform|transform==face_description.transform){
transform_index
@@ -323,8 +351,8 @@ pub fn unit_wedge(WedgeFaceDescription(face_descriptions):WedgeFaceDescription)-
//push vertices as they are needed
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(vec![
WEDGE_DEFAULT_POLYS[face_id].iter().map(|&[pos_id,tex_id]|{
let pos=CUBE_DEFAULT_VERTICES[pos_id as usize];
WEDGE_DEFAULT_POLYS[face_id].iter().map(|tup|{
let pos=CUBE_DEFAULT_VERTICES[tup[0] as usize];
let pos_index=if let Some(pos_index)=generated_pos.iter().position(|&p|p==pos){
pos_index
}else{
@@ -336,7 +364,7 @@ pub fn unit_wedge(WedgeFaceDescription(face_descriptions):WedgeFaceDescription)-
//always push vertex
let vertex=IndexedVertex{
pos:PositionId::new(pos_index),
tex:TextureCoordinateId::new(tex_id+4*transform_index),
tex:TextureCoordinateId::new(tup[1]+4*transform_index),
normal:NormalId::new(normal_index),
color:ColorId::new(color_index),
};
@@ -363,47 +391,40 @@ pub fn unit_wedge(WedgeFaceDescription(face_descriptions):WedgeFaceDescription)-
}
}
pub fn unit_cornerwedge(CornerWedgeFaceDescription(face_descriptions):CornerWedgeFaceDescription)->Mesh{
const CORNERWEDGE_DEFAULT_POLYS:[&[[u32;2]];5]=[
pub fn generate_partial_unit_cornerwedge(face_descriptions:CornerWedgeFaceDescription)->Mesh{
const CORNERWEDGE_DEFAULT_POLYS:[&[[u32;3]];5]=[
// right (1, 0, 0)
&[
[6,2],//[vertex,tex]
[5,1],
[1,3],
[6,2,0],//[vertex,tex,norm]
[5,1,0],
[1,3,0],
],
// BackTop (0, 1, 1)
&[
[5,3],
[0,1],
[1,0],
[5,3,1],
[0,1,1],
[1,0,1],
],
// LeftTop (-1, 1, 0)
&[
[5,3],
[7,2],
[0,1],
[5,3,2],
[7,2,2],
[0,1,2],
],
// bottom (0,-1, 0)
&[
[1,1],
[0,0],
[7,3],
[6,2],
[1,1,3],
[0,0,3],
[7,3,3],
[6,2,3],
],
// front (0, 0,-1)
&[
[5,0],
[6,3],
[7,2],
[5,0,4],
[6,3,4],
[7,2,4],
],
];
const CORNERWEDGE_DEFAULT_NORMALS:[Planar64Vec3;5]=[
vec3::int( 1, 0, 0),//CornerWedge::Right
vec3::int( 0, 1, 1),//CornerWedge::BackTop
vec3::int(-1, 1, 0),//CornerWedge::LeftTop
vec3::int( 0,-1, 0),//CornerWedge::Bottom
vec3::int( 0, 0,-1),//CornerWedge::Front
];
let mut generated_pos=Vec::new();
let mut generated_tex=Vec::new();
let mut generated_normal=Vec::new();
@@ -414,7 +435,7 @@ pub fn unit_cornerwedge(CornerWedgeFaceDescription(face_descriptions):CornerWedg
let mut physics_group=IndexedPhysicsGroup::default();
let mut transforms=Vec::new();
//note that on a cube every vertex is guaranteed to be unique, so there's no need to hash them against existing vertices.
for (face_id,face_description) in face_descriptions.into_iter().enumerate(){
for (face_id,face_description) in face_descriptions.pairs(){
//assume that scanning short lists is faster than hashing.
let transform_index=if let Some(transform_index)=transforms.iter().position(|&transform|transform==face_description.transform){
transform_index
@@ -441,8 +462,8 @@ pub fn unit_cornerwedge(CornerWedgeFaceDescription(face_descriptions):CornerWedg
//push vertices as they are needed
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(vec![
CORNERWEDGE_DEFAULT_POLYS[face_id].iter().map(|&[pos_id,tex_id]|{
let pos=CUBE_DEFAULT_VERTICES[pos_id as usize];
CORNERWEDGE_DEFAULT_POLYS[face_id].iter().map(|tup|{
let pos=CUBE_DEFAULT_VERTICES[tup[0] as usize];
let pos_index=if let Some(pos_index)=generated_pos.iter().position(|&p|p==pos){
pos_index
}else{
@@ -454,7 +475,7 @@ pub fn unit_cornerwedge(CornerWedgeFaceDescription(face_descriptions):CornerWedg
//always push vertex
let vertex=IndexedVertex{
pos:PositionId::new(pos_index),
tex:TextureCoordinateId::new(tex_id+4*transform_index),
tex:TextureCoordinateId::new(tup[1]+4*transform_index),
normal:NormalId::new(normal_index),
color:ColorId::new(color_index),
};
@@ -480,133 +501,3 @@ pub fn unit_cornerwedge(CornerWedgeFaceDescription(face_descriptions):CornerWedg
physics_groups:vec![physics_group],
}
}
// TODO: fix face texture orientation
pub fn unit_cylinder(face_descriptions:CubeFaceDescription)->Mesh{
// cylinder is oriented about the x axis
// roblox cylinders use projected grid coordinates
/// how many grid coordinates to use (positive and negative)
const GON:i32=3;
/// grid perimeter
const POINTS:[[i32;2];4*2*GON as usize]=const{
let mut points=[[0;2];{4*2*GON as usize}];
let mut i=-GON;
while i<GON{
points[(i+GON) as usize]=[i,GON];
points[(i+GON+1*2*GON) as usize]=[GON,-i];
points[(i+GON+2*2*GON) as usize]=[-i,-GON];
points[(i+GON+3*2*GON) as usize]=[-GON,i];
i+=1;
}
points
};
let mut mb=MeshBuilder::new();
let mut polygon_groups=Vec::with_capacity(CubeFaceDescription::FACES);
let mut graphics_groups=Vec::with_capacity(CubeFaceDescription::FACES);
let mut physics_group=IndexedPhysicsGroup{groups:Vec::with_capacity(CubeFaceDescription::FACES)};
let CubeFaceDescription([right,top,back,left,bottom,front])=face_descriptions;
macro_rules! end_face{
($face_description:expr,$end:expr,$iter:expr)=>{
let normal=mb.acquire_normal_id($end);
let color=mb.acquire_color_id($face_description.color);
// single polygon for physics
let polygon:Vec<_>=$iter.map(|[x,y]|{
let tex=mb.acquire_tex_id(
$face_description.transform.transform_point2(
(glam::vec2(-x as f32,y as f32).normalize()+1.0)/2.0
)
);
let pos=mb.acquire_pos_id($end+vec3::int(0,-x,y).with_length(Planar64::ONE).divide().wrap_1());
mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color})
}).collect();
// fanned polygons for graphics
let pos=mb.acquire_pos_id($end);
let tex=mb.acquire_tex_id($face_description.transform.transform_point2(glam::Vec2::ONE/2.0));
let center=mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color});
let polygon_list=(0..POINTS.len()).map(|i|
vec![center,polygon[i],polygon[(i+1)%POINTS.len()]]
).collect();
// end face graphics
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(polygon_list)));
graphics_groups.push(IndexedGraphicsGroup{
render:$face_description.render,
groups:vec![group_id],
});
// end face physics
let polygon_list=vec![polygon];
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(polygon_list)));
physics_group.groups.push(group_id);
}
}
macro_rules! tex{
($face_description:expr,$tex:expr)=>{{
let [x,y]=$tex;
$face_description.transform.transform_point2(
glam::vec2((x+GON) as f32,(y+GON) as f32)/(2*GON) as f32
)
}};
}
macro_rules! barrel_face{
($face_description:expr,$loop:ident,$lo_dir:expr,$hi_dir:expr,$tex_0:expr,$tex_1:expr,$tex_2:expr,$tex_3:expr)=>{
let mut polygon_list=Vec::with_capacity(CubeFaceDescription::FACES);
for $loop in -GON..GON{
// lo Z
let lz_dir=$lo_dir.with_length(Planar64::ONE).divide().wrap_1();
// hi Z
let hz_dir=$hi_dir.with_length(Planar64::ONE).divide().wrap_1();
// pos
let lx_lz_pos=mb.acquire_pos_id(vec3::NEG_X+lz_dir);
let lx_hz_pos=mb.acquire_pos_id(vec3::NEG_X+hz_dir);
let hx_hz_pos=mb.acquire_pos_id(vec3::X+hz_dir);
let hx_lz_pos=mb.acquire_pos_id(vec3::X+lz_dir);
// tex
let lx_lz_tex=mb.acquire_tex_id(tex!($face_description,$tex_0));
let lx_hz_tex=mb.acquire_tex_id(tex!($face_description,$tex_1));
let hx_hz_tex=mb.acquire_tex_id(tex!($face_description,$tex_2));
let hx_lz_tex=mb.acquire_tex_id(tex!($face_description,$tex_3));
// norm
let lz_norm=mb.acquire_normal_id(lz_dir);
let hz_norm=mb.acquire_normal_id(hz_dir);
// color
let color=mb.acquire_color_id($face_description.color);
polygon_list.push(vec![
mb.acquire_vertex_id(IndexedVertex{pos:lx_lz_pos,tex:lx_lz_tex,normal:lz_norm,color}),
mb.acquire_vertex_id(IndexedVertex{pos:lx_hz_pos,tex:lx_hz_tex,normal:hz_norm,color}),
mb.acquire_vertex_id(IndexedVertex{pos:hx_hz_pos,tex:hx_hz_tex,normal:hz_norm,color}),
mb.acquire_vertex_id(IndexedVertex{pos:hx_lz_pos,tex:hx_lz_tex,normal:lz_norm,color}),
]);
}
// push face
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(polygon_list)));
graphics_groups.push(IndexedGraphicsGroup{
render:$face_description.render,
groups:vec![group_id],
});
physics_group.groups.push(group_id);
};
}
end_face!(right, vec3::X,POINTS.into_iter());
barrel_face!(top, z,vec3::int(0,GON,z),vec3::int(0,GON,z+1), [GON,z],[GON,z+1],[-GON,z+1],[-GON,z]);
barrel_face!(back, y,vec3::int(0,y+1,GON),vec3::int(0,y,GON), [GON,y+1],[GON,y],[-GON,y],[-GON,y+1]);
end_face!(left, vec3::NEG_X,POINTS.into_iter().rev());
barrel_face!(bottom, z,vec3::int(0,-GON,z+1),vec3::int(0,-GON,z), [-GON,z+1],[-GON,z],[GON,z],[GON,z+1]);
barrel_face!(front, y,vec3::int(0,y,-GON),vec3::int(0,y+1,-GON), [-GON,y],[-GON,y+1],[GON,y+1],[GON,y]);
let physics_groups=vec![physics_group];
mb.build(polygon_groups,graphics_groups,physics_groups)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,12 +1,8 @@
use crate::loader::MeshWithSize;
use crate::rbx::RobloxPartDescription;
use crate::primitives::{CUBE_DEFAULT_VERTICES,CUBE_DEFAULT_POLYS,FaceDescription};
use rbx_mesh::mesh_data::{VertexId as MeshDataVertexId,NormalId as MeshDataNormalId,NormalId2 as MeshDataNormalId2,NormalId5 as MeshDataNormalId5};
use rbx_mesh::physics_data::VertexId as PhysicsDataVertexId;
use strafesnet_common::model::{self,IndexedVertex,MeshBuilder,PolygonGroup,PolygonGroupId,PolygonList,RenderConfigId,VertexId};
use rbx_mesh::mesh_data::NormalId2 as MeshDataNormalId2;
use strafesnet_common::model::{self,IndexedVertex,PolygonGroup,PolygonGroupId,PolygonList,RenderConfigId};
use strafesnet_common::integer::vec3;
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Block,
@@ -24,7 +20,7 @@ impl std::fmt::Display for Error{
// wacky state machine to make sure all vertices in a face agree upon what NormalId to use.
// Roblox duplicates this information per vertex when it should only exist per-face.
enum MeshDataNormalStatus{
Agree(MeshDataNormalId),
Agree(MeshDataNormalId2),
Conflicting,
}
struct MeshDataNormalChecker{
@@ -34,7 +30,7 @@ impl MeshDataNormalChecker{
fn new()->Self{
Self{status:None}
}
fn check(&mut self,normal:MeshDataNormalId){
fn check(&mut self,normal:MeshDataNormalId2){
self.status=match self.status.take(){
None=>Some(MeshDataNormalStatus::Agree(normal)),
Some(MeshDataNormalStatus::Agree(old_normal))=>{
@@ -47,7 +43,7 @@ impl MeshDataNormalChecker{
Some(MeshDataNormalStatus::Conflicting)=>Some(MeshDataNormalStatus::Conflicting),
};
}
fn into_agreed_normal(self)->Option<MeshDataNormalId>{
fn into_agreed_normal(self)->Option<MeshDataNormalId2>{
self.status.and_then(|status|match status{
MeshDataNormalStatus::Agree(normal)=>Some(normal),
MeshDataNormalStatus::Conflicting=>None,
@@ -55,127 +51,18 @@ impl MeshDataNormalChecker{
}
}
fn build_mesh2(
mb:&mut MeshBuilder,
polygon_groups_normal_id:&mut [Vec<Vec<VertexId>>;NORMAL_FACES],
cube_face_description:&[Option<FaceDescription>;NORMAL_FACES],
mesh:rbx_mesh::mesh_data::Mesh2,
)->Result<(),Error>{
//autoscale to size, idk what roblox is doing with the graphics mesh size
let mut pos_min=glam::Vec3::MAX;
let mut pos_max=glam::Vec3::MIN;
for vertex in &mesh.vertices{
let p=vertex.pos.into();
pos_min=pos_min.min(p);
pos_max=pos_max.max(p);
}
let graphics_size=pos_max-pos_min;
for [MeshDataVertexId(vertex_id0),MeshDataVertexId(vertex_id1),MeshDataVertexId(vertex_id2)] in mesh.faces{
let face=[
mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
];
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|vertex|{
let MeshDataNormalId2(normal_id)=vertex.normal_id;
normal_agreement_checker.check(normal_id);
let pos=glam::Vec3::from_array(vertex.pos)/graphics_size;
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(pos.to_array())?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?);
let tex_coord=glam::Vec2::from_array(vertex.tex);
let maybe_face_description=&cube_face_description[normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>().map_err(Error::Planar64Vec3)?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
Ok(())
}
fn build_mesh5(
mb:&mut MeshBuilder,
polygon_groups_normal_id:&mut [Vec<Vec<VertexId>>;NORMAL_FACES],
cube_face_description:&[Option<FaceDescription>;NORMAL_FACES],
mesh:rbx_mesh::mesh_data::CSGMDL5,
)->Result<(),Error>{
//autoscale to size, idk what roblox is doing with the graphics mesh size
let mut pos_min=glam::Vec3::MAX;
let mut pos_max=glam::Vec3::MIN;
for &pos in &mesh.positions{
let p=pos.into();
pos_min=pos_min.min(p);
pos_max=pos_max.max(p);
}
let graphics_size=pos_max-pos_min;
for face in mesh.faces.indices.chunks_exact(3){
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|&vertex_id|{
let vertex_index=vertex_id as usize;
let &pos=mesh.positions.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &MeshDataNormalId5(normal_id)=mesh.normal_ids.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &norm=mesh.normals.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &tex=mesh.tex.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &color=mesh.colors.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
normal_agreement_checker.check(normal_id);
let pos=glam::Vec3::from_array(pos)/graphics_size;
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(pos.to_array()).map_err(Error::Planar64Vec3)?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(norm).map_err(Error::Planar64Vec3)?);
let tex_coord=glam::Vec2::from_array(tex);
let maybe_face_description=&cube_face_description[normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>()?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
Ok(())
}
const NORMAL_FACES:usize=6;
impl std::error::Error for Error{}
pub fn convert(
roblox_physics_data:&[u8],
roblox_mesh_data:&[u8],
size:glam::Vec3,
RobloxPartDescription(part_texture_description):RobloxPartDescription,
)->Result<MeshWithSize,Error>{
let mut polygon_groups_normal_id:[_;NORMAL_FACES]=[vec![],vec![],vec![],vec![],vec![],vec![]];
part_texture_description:crate::rbx::RobloxPartDescription,
)->Result<model::Mesh,Error>{
const NORMAL_FACES:usize=6;
let mut polygon_groups_normal_id=vec![Vec::new();NORMAL_FACES];
// build graphics and physics meshes
let mut mb=MeshBuilder::new();
let mut mb=strafesnet_common::model::MeshBuilder::new();
// graphics
let graphics_groups=if !roblox_mesh_data.is_empty(){
// create per-face texture coordinate affine transforms
@@ -187,12 +74,46 @@ pub fn convert(
let mesh_data=rbx_mesh::read_mesh_data_versioned(
std::io::Cursor::new(roblox_mesh_data)
).map_err(Error::RobloxMeshData)?;
match mesh_data{
let graphics_mesh=match mesh_data{
rbx_mesh::mesh_data::MeshData::CSGK(_)=>return Err(Error::Block),
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V2(mesh_data2))=>build_mesh2(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data2.mesh)?,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V4(mesh_data4))=>build_mesh2(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data4.mesh)?,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V5(mesh_data4))=>build_mesh5(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data4)?,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL2(mesh_data2))=>mesh_data2.mesh,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL4(mesh_data4))=>mesh_data4.mesh,
};
for [vertex_id0,vertex_id1,vertex_id2] in graphics_mesh.faces{
let face=[
graphics_mesh.vertices.get(vertex_id0.0 as usize).ok_or(Error::MissingVertexId(vertex_id0.0))?,
graphics_mesh.vertices.get(vertex_id1.0 as usize).ok_or(Error::MissingVertexId(vertex_id1.0))?,
graphics_mesh.vertices.get(vertex_id2.0 as usize).ok_or(Error::MissingVertexId(vertex_id2.0))?,
];
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|vertex|{
normal_agreement_checker.check(vertex.normal_id);
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos)?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?);
let tex_coord=glam::Vec2::from_array(vertex.tex);
let maybe_face_description=&cube_face_description[vertex.normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>().map_err(Error::Planar64Vec3)?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
(0..NORMAL_FACES).map(|polygon_group_id|{
model::IndexedGraphicsGroup{
render:cube_face_description[polygon_group_id].as_ref().map_or(RenderConfigId::new(0),|face_description|face_description.render),
@@ -204,11 +125,7 @@ pub fn convert(
};
//physics
let polygon_groups_normal_it=polygon_groups_normal_id.into_iter().map(|faces|
// graphics polygon groups (to be rendered)
Ok(PolygonGroup::PolygonList(PolygonList::new(faces)))
);
let polygon_groups:Vec<PolygonGroup>=if !roblox_physics_data.is_empty(){
let physics_convex_meshes=if !roblox_physics_data.is_empty(){
let physics_data=rbx_mesh::read_physics_data_versioned(
std::io::Cursor::new(roblox_physics_data)
).map_err(Error::RobloxPhysicsData)?;
@@ -217,56 +134,44 @@ pub fn convert(
// have not seen this format in practice
|rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Block)
=>return Err(Error::Block),
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V3(meshes))
|rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V5(meshes))
=>meshes.meshes,
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V6(meshes))
=>vec![meshes.mesh],
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V7(meshes))
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Meshes(meshes))
=>meshes.meshes,
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::PhysicsInfoMesh(pim))
=>vec![pim.mesh],
};
let physics_convex_meshes_it=physics_convex_meshes.into_iter().map(|mesh|{
// this can be factored out of the loop but I am lazy
let color=mb.acquire_color_id(glam::Vec4::ONE);
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
// physics polygon groups (to do physics)
Ok(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|[PhysicsDataVertexId(vertex_id0),PhysicsDataVertexId(vertex_id1),PhysicsDataVertexId(vertex_id2)]|{
let face=[
mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
].map(|v|glam::Vec3::from_slice(v)/size);
let vertex_norm=(face[1]-face[0])
.cross(face[2]-face[0]);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex_norm.to_array()).map_err(Error::Planar64Vec3)?);
face.into_iter().map(|vertex_pos|{
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex_pos.to_array()).map_err(Error::Planar64Vec3)?);
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect()
}).collect::<Result<_,_>>()?)))
});
polygon_groups_normal_it.chain(physics_convex_meshes_it).collect::<Result<_,_>>()?
physics_convex_meshes
}else{
// generate a unit cube as default physics
let pos_list=CUBE_DEFAULT_VERTICES.map(|pos|mb.acquire_pos_id(pos>>1));
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
let normal=mb.acquire_normal_id(vec3::zero());
let color=mb.acquire_color_id(glam::Vec4::ONE);
let polygon_group=PolygonGroup::PolygonList(PolygonList::new(CUBE_DEFAULT_POLYS.map(|poly|poly.map(|[pos_id,_]|
mb.acquire_vertex_id(IndexedVertex{pos:pos_list[pos_id as usize],tex,normal,color})
).to_vec()).to_vec()));
polygon_groups_normal_it.chain([Ok(polygon_group)]).collect::<Result<_,_>>()?
Vec::new()
};
let polygon_groups:Vec<PolygonGroup>=polygon_groups_normal_id.into_iter().map(|faces|
// graphics polygon groups (to be rendered)
Ok(PolygonGroup::PolygonList(PolygonList::new(faces)))
).chain(physics_convex_meshes.into_iter().map(|mesh|{
// this can be factored out of the loop but I am lazy
let color=mb.acquire_color_id(glam::Vec4::ONE);
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
// physics polygon groups (to do physics)
Ok(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|[vertex_id0,vertex_id1,vertex_id2]|{
let face=[
mesh.vertices.get(vertex_id0.0 as usize).ok_or(Error::MissingVertexId(vertex_id0.0))?,
mesh.vertices.get(vertex_id1.0 as usize).ok_or(Error::MissingVertexId(vertex_id1.0))?,
mesh.vertices.get(vertex_id2.0 as usize).ok_or(Error::MissingVertexId(vertex_id2.0))?,
].map(|v|glam::Vec3::from_slice(v)/size);
let vertex_norm=(face[1]-face[0])
.cross(face[2]-face[0]);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex_norm.to_array()).map_err(Error::Planar64Vec3)?);
face.into_iter().map(|vertex_pos|{
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex_pos.to_array()).map_err(Error::Planar64Vec3)?);
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect()
}).collect::<Result<_,_>>()?)))
})).collect::<Result<_,_>>()?;
let physics_groups=(NORMAL_FACES..polygon_groups.len()).map(|id|model::IndexedPhysicsGroup{
groups:vec![PolygonGroupId::new(id as u32)]
}).collect();
let mesh=mb.build(
Ok(mb.build(
polygon_groups,
graphics_groups,
physics_groups,
);
Ok(MeshWithSize{
mesh,
size:vec3::ONE,
})
))
}

View File

@@ -9,6 +9,3 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
url = "2.5.4"
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "roblox_emulator"
version = "0.5.2"
version = "0.4.7"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -13,12 +13,9 @@ run-service=[]
[dependencies]
glam = "0.30.0"
mlua = { version = "0.11.3", features = ["luau"] }
phf = { version = "0.13.1", features = ["macros"] }
rbx_dom_weak = "4.1.0"
rbx_reflection = "6.1.0"
rbx_reflection_database = "2.0.2"
rbx_types = "3.1.0"
[lints]
workspace = true
mlua = { version = "0.10.1", features = ["luau"] }
phf = { version = "0.11.2", features = ["macros"] }
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
rbx_reflection = { version = "4.7.0", registry = "strafesnet" }
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
rbx_types = { version = "1.10.0", registry = "strafesnet" }

View File

@@ -1,121 +1,93 @@
use crate::util::static_ustr;
use rbx_dom_weak::{types::Ref,InstanceBuilder,WeakDom};
#[derive(Debug)]
pub enum ServicesError{
WorkspaceNotFound,
}
impl std::fmt::Display for ServicesError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for ServicesError{}
pub struct Services{
pub(crate) game:Ref,
pub(crate) workspace:Ref,
pub fn class_is_a(class:&str,superclass:&str)->bool{
class==superclass
||rbx_reflection_database::get().classes.get(class)
.is_some_and(|descriptor|
descriptor.superclass.as_ref().is_some_and(|class_super|
class_is_a(class_super,superclass)
)
)
}
impl Services{
fn find_services(dom:&WeakDom)->Result<Services,ServicesError>{
Ok(Services{
workspace:*dom.root().children().iter().find(|&&r|
dom.get_by_ref(r).is_some_and(|instance|instance.class=="Workspace")
).ok_or(ServicesError::WorkspaceNotFound)?,
game:dom.root_ref(),
})
}
}
pub type LuaAppData=&'static mut WeakDom;
#[repr(transparent)]
pub struct Context{
pub(crate)dom:WeakDom,
pub(crate)services:Services,
}
impl Context{
pub fn from_place(dom:WeakDom)->Result<Context,ServicesError>{
let services=Services::find_services(&dom)?;
Ok(Self{dom,services})
pub const fn new(dom:WeakDom)->Self{
Self{dom}
}
pub fn script_singleton(source:String)->(Context,crate::runner::instance::Instance){
pub fn script_singleton(source:String)->(Context,crate::runner::instance::Instance,Services){
let script=InstanceBuilder::new("Script")
.with_property("Source",rbx_types::Variant::String(source));
let script_ref=script.referent();
let dom=WeakDom::new(
let mut context=Self::new(WeakDom::new(
InstanceBuilder::new("DataModel")
.with_child(script)
);
let context=Self::from_model(dom);
(context,crate::runner::instance::Instance::new_unchecked(script_ref))
));
let services=context.convert_into_place();
(context,crate::runner::instance::Instance::new(script_ref),services)
}
pub fn from_ref(dom:&WeakDom)->&Context{
unsafe{&*(dom as *const WeakDom as *const Context)}
}
pub fn from_mut(dom:&mut WeakDom)->&mut Context{
unsafe{&mut *(dom as *mut WeakDom as *mut Context)}
}
/// Creates an iterator over all items of a particular class.
pub fn superclass_iter<'a>(&'a self,superclass:&'a str)->impl Iterator<Item=Ref>+'a{
let db=rbx_reflection_database::get().unwrap();
let Some(superclass)=db.classes.get(superclass)else{
panic!("Invalid class");
};
self.dom.descendants().filter_map(|instance|{
let class=db.classes.get(instance.class.as_str())?;
db.has_superclass(class,superclass).then(||instance.referent())
})
self.dom.descendants().filter(|&instance|
class_is_a(instance.class.as_ref(),superclass)
).map(|instance|instance.referent())
}
pub fn scripts(&self)->Vec<crate::runner::instance::Instance>{
self.superclass_iter("Script")
.filter_map(|script_ref|{
let script=self.dom.get_by_ref(script_ref)?;
if let None|Some(rbx_dom_weak::types::Variant::Bool(false))=script.properties.get(&static_ustr("Disabled")){
return Some(crate::runner::instance::Instance::new_unchecked(script_ref));
}
None
})
.collect()
self.superclass_iter("LuaSourceContainer").map(crate::runner::instance::Instance::new).collect()
}
pub fn from_model(mut dom:WeakDom)->Context{
pub fn find_services(&self)->Option<Services>{
Some(Services{
workspace:*self.dom.root().children().iter().find(|&&r|
self.dom.get_by_ref(r).is_some_and(|instance|instance.class=="Workspace")
)?,
game:self.dom.root_ref(),
})
}
pub fn convert_into_place(&mut self)->Services{
//snapshot root instances
let children=dom.root().children().to_owned();
let children=self.dom.root().children().to_owned();
//insert services
let game=dom.root_ref();
let terrain_bldr=InstanceBuilder::new("Terrain")
.with_properties([
("CFrame",rbx_dom_weak::types::Variant::CFrame(rbx_dom_weak::types::CFrame::new(rbx_dom_weak::types::Vector3::new(0.0,0.0,0.0),rbx_dom_weak::types::Matrix3::identity()))),
("Size",rbx_dom_weak::types::Variant::Vector3(rbx_dom_weak::types::Vector3::new(1.0,1.0,1.0))),
("Velocity",rbx_dom_weak::types::Variant::Vector3(rbx_dom_weak::types::Vector3::new(0.0,0.0,0.0))),
("Transparency",rbx_dom_weak::types::Variant::Float32(0.0)),
("Color",rbx_dom_weak::types::Variant::Color3uint8(rbx_dom_weak::types::Color3uint8::new(255,255,255))),
("CanCollide",rbx_dom_weak::types::Variant::Bool(true)),
]);
let workspace=dom.insert(game,
let game=self.dom.root_ref();
let terrain_bldr=InstanceBuilder::new("Terrain");
let workspace=self.dom.insert(game,
InstanceBuilder::new("Workspace")
//Set Workspace.Terrain property equal to Terrain
.with_property("Terrain",terrain_bldr.referent())
.with_child(terrain_bldr)
);
{
//Lowercase and upper case workspace property!
let game=self.dom.root_mut();
game.properties.insert("workspace".to_owned(),rbx_types::Variant::Ref(workspace));
game.properties.insert("Workspace".to_owned(),rbx_types::Variant::Ref(workspace));
}
self.dom.insert(game,InstanceBuilder::new("Lighting"));
//transfer original root instances into workspace
for instance in children{
dom.transfer_within(instance,workspace);
self.dom.transfer_within(instance,workspace);
}
{
//Lowercase and upper case workspace property!
let game=dom.root_mut();
// TODO: DELETE THIS!
game.properties.insert(static_ustr("workspace"),rbx_types::Variant::Ref(workspace));
game.properties.insert(static_ustr("Workspace"),rbx_types::Variant::Ref(workspace));
Services{
game,
workspace,
}
dom.insert(game,InstanceBuilder::new("Lighting"));
let services=Services{game,workspace};
Self{dom,services}
}
}
impl AsRef<WeakDom> for Context{
fn as_ref(&self)->&WeakDom{
&self.dom
}
pub struct Services{
pub game:Ref,
pub workspace:Ref,
}

View File

@@ -1,4 +1,3 @@
mod util;
pub mod runner;
pub mod context;
#[cfg(feature="run-service")]
@@ -6,7 +5,3 @@ pub(crate) mod scheduler;
#[cfg(test)]
mod tests;
pub mod mlua{
pub use mlua::{Result,Error};
}

View File

@@ -1,59 +0,0 @@
use super::color3::Color3;
#[derive(Clone,Copy)]
pub struct BrickColor(rbx_types::BrickColor);
impl BrickColor{
pub fn from_name(name:&str)->Option<Self>{
Some(BrickColor(rbx_types::BrickColor::from_name(name)?))
}
pub fn from_number(number:u16)->Option<Self>{
Some(BrickColor(rbx_types::BrickColor::from_number(number)?))
}
}
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let table=lua.create_table()?;
table.raw_set("new",
lua.create_function(|_,r:mlua::Value|
match r{
mlua::Value::String(name)=>Ok(BrickColor::from_name(&*name.to_str()?)),
mlua::Value::Integer(number)=>Ok(BrickColor::from_number(number as u16)),
_=>Err(mlua::Error::runtime("Unsupported arguments"))
}
)?
)?;
macro_rules! brickcolor_constructor{
($fname:expr,$internal:ident)=>{
table.raw_set($fname,
lua.create_function(|_,_:()|
Ok(BrickColor(rbx_types::BrickColor::$internal))
)?
)?;
};
}
brickcolor_constructor!("White",White);
brickcolor_constructor!("Gray",MediumStoneGrey);
brickcolor_constructor!("DarkGray",DarkStoneGrey);
brickcolor_constructor!("Black",Black);
brickcolor_constructor!("Red",BrightRed);
brickcolor_constructor!("Yellow",BrightYellow);
brickcolor_constructor!("Green",DarkGreen);
brickcolor_constructor!("Blue",BrightBlue);
globals.set("BrickColor",table)?;
Ok(())
}
impl mlua::UserData for BrickColor{
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("Color",|_,BrickColor(this)|{
let rbx_types::Color3uint8{r,g,b}=this.to_color3uint8();
Ok(Color3::from_rgb(r,g,b))
});
}
}
type_from_lua_userdata!(BrickColor);

View File

@@ -1,10 +1,7 @@
use mlua::FromLua;
use super::number::Number;
use super::vector3::Vector3;
#[derive(Clone,Copy)]
pub struct CFrame(glam::Affine3A);
pub struct CFrame(pub(crate)glam::Affine3A);
impl CFrame{
pub fn new(
@@ -37,14 +34,14 @@ fn vec3_from_glam(v:rbx_types::Vector3)->glam::Vec3A{
glam::vec3a(v.x,v.y,v.z)
}
impl From<CFrame> for rbx_types::CFrame{
fn from(CFrame(cf):CFrame)->rbx_types::CFrame{
impl Into<rbx_types::CFrame> for CFrame{
fn into(self)->rbx_types::CFrame{
rbx_types::CFrame::new(
vec3_to_glam(cf.translation),
vec3_to_glam(self.0.translation),
rbx_types::Matrix3::new(
vec3_to_glam(cf.matrix3.x_axis),
vec3_to_glam(cf.matrix3.y_axis),
vec3_to_glam(cf.matrix3.z_axis),
vec3_to_glam(self.0.matrix3.x_axis),
vec3_to_glam(self.0.matrix3.y_axis),
vec3_to_glam(self.0.matrix3.z_axis),
)
)
}
@@ -63,25 +60,16 @@ impl From<rbx_types::CFrame> for CFrame{
}
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let table=lua.create_table()?;
let cframe_table=lua.create_table()?;
//CFrame.new
table.raw_set("new",
lua.create_function(|lua,tuple:(
mlua::Value,mlua::Value,Option<Number>,
Option<Number>,Option<Number>,Option<Number>,
Option<Number>,Option<Number>,Option<Number>,
Option<Number>,Option<Number>,Option<Number>,
cframe_table.raw_set("new",
lua.create_function(|_,tuple:(
mlua::Value,mlua::Value,Option<f32>,
Option<f32>,Option<f32>,Option<f32>,
Option<f32>,Option<f32>,Option<f32>,
Option<f32>,Option<f32>,Option<f32>,
)|match tuple{
//CFrame.new()
(
mlua::Value::Nil,mlua::Value::Nil,None,
None,None,None,
None,None,None,
None,None,None,
)=>{
Ok(CFrame(glam::Affine3A::IDENTITY))
},
//CFrame.new(pos)
(
mlua::Value::UserData(pos),mlua::Value::Nil,None,
@@ -89,8 +77,8 @@ pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
None,None,None,
None,None,None,
)=>{
let Vector3(pos):&Vector3=&*pos.borrow()?;
Ok(CFrame::point(pos.x,pos.y,pos.z))
let pos:Vector3=pos.take()?;
Ok(CFrame::point(pos.0.x,pos.0.y,pos.0.z))
},
//TODO: CFrame.new(pos,look)
(
@@ -99,99 +87,85 @@ pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
None,None,None,
None,None,None,
)=>{
let _pos:&Vector3=&*pos.borrow()?;
let _look:&Vector3=&*look.borrow()?;
let _pos:Vector3=pos.take()?;
let _look:Vector3=look.take()?;
Err(mlua::Error::runtime("Not yet implemented"))
},
//CFrame.new(x,y,z)
(
x,y,Some(z),
mlua::Value::Number(x),mlua::Value::Number(y),Some(z),
None,None,None,
None,None,None,
None,None,None,
)=>Ok(CFrame::point(Number::from_lua(x,lua)?.into(),Number::from_lua(y,lua)?.into(),z.into())),
)=>Ok(CFrame::point(x as f32,y as f32,z)),
//CFrame.new(x,y,z,xx,yx,zx,xy,yy,zy,xz,yz,zz)
(
x,y,Some(z),
mlua::Value::Number(x),mlua::Value::Number(y),Some(z),
Some(xx),Some(yx),Some(zx),
Some(xy),Some(yy),Some(zy),
Some(xz),Some(yz),Some(zz),
)=>Ok(CFrame::new(Number::from_lua(x,lua)?.into(),Number::from_lua(y,lua)?.into(),z.into(),
xx.into(),yx.into(),zx.into(),
xy.into(),yy.into(),zy.into(),
xz.into(),yz.into(),zz.into(),
)=>Ok(CFrame::new(x as f32,y as f32,z,
xx,yx,zx,
xy,yy,zy,
xz,yz,zz,
)),
_=>Err(mlua::Error::runtime("Invalid arguments"))
})?
)?;
//CFrame.Angles
let from_euler_angles=lua.create_function(|_,(x,y,z):(Number,Number,Number)|
Ok(CFrame::angles(x.into(),y.into(),z.into()))
cframe_table.raw_set("Angles",
lua.create_function(|_,(x,y,z):(f32,f32,f32)|
Ok(CFrame::angles(x,y,z))
)?
)?;
table.raw_set("Angles",from_euler_angles.clone())?;
table.raw_set("fromEulerAnglesXYZ",from_euler_angles.clone())?;
table.raw_set("FromEulerAnglesXYZ",from_euler_angles)?;
globals.set("CFrame",table)?;
globals.set("CFrame",cframe_table)?;
Ok(())
}
impl mlua::UserData for CFrame{
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("p",|_,CFrame(this)|Ok(Vector3(this.translation)));
fields.add_field_method_get("x",|_,CFrame(this)|Ok(this.translation.x));
fields.add_field_method_get("X",|_,CFrame(this)|Ok(this.translation.x));
fields.add_field_method_get("y",|_,CFrame(this)|Ok(this.translation.y));
fields.add_field_method_get("Y",|_,CFrame(this)|Ok(this.translation.y));
fields.add_field_method_get("z",|_,CFrame(this)|Ok(this.translation.z));
fields.add_field_method_get("Z",|_,CFrame(this)|Ok(this.translation.z));
fields.add_field_method_get("rightVector",|_,CFrame(this)|Ok(Vector3(this.matrix3.x_axis)));
fields.add_field_method_get("RightVector",|_,CFrame(this)|Ok(Vector3(this.matrix3.x_axis)));
fields.add_field_method_get("upVector",|_,CFrame(this)|Ok(Vector3(this.matrix3.y_axis)));
fields.add_field_method_get("UpVector",|_,CFrame(this)|Ok(Vector3(this.matrix3.y_axis)));
fields.add_field_method_get("lookVector",|_,CFrame(this)|Ok(Vector3(-this.matrix3.z_axis)));
fields.add_field_method_get("LookVector",|_,CFrame(this)|Ok(Vector3(-this.matrix3.z_axis)));
fields.add_field_method_get("XVector",|_,CFrame(this)|Ok(Vector3(this.matrix3.row(0))));
fields.add_field_method_get("YVector",|_,CFrame(this)|Ok(Vector3(this.matrix3.row(1))));
fields.add_field_method_get("ZVector",|_,CFrame(this)|Ok(Vector3(this.matrix3.row(2))));
//CFrame.p
fields.add_field_method_get("p",|_,this|Ok(Vector3(this.0.translation)));
}
fn add_methods<M:mlua::UserDataMethods<Self>>(methods:&mut M){
methods.add_method("components",|_,CFrame(this),()|Ok((
this.translation.x,
this.translation.y,
this.translation.z,
this.matrix3.x_axis.x,
this.matrix3.y_axis.x,
this.matrix3.z_axis.x,
this.matrix3.x_axis.y,
this.matrix3.y_axis.y,
this.matrix3.z_axis.y,
this.matrix3.x_axis.z,
this.matrix3.y_axis.z,
this.matrix3.z_axis.z,
methods.add_method("components",|_,this,()|Ok((
this.0.translation.x,
this.0.translation.y,
this.0.translation.z,
this.0.matrix3.x_axis.x,
this.0.matrix3.y_axis.x,
this.0.matrix3.z_axis.x,
this.0.matrix3.x_axis.y,
this.0.matrix3.y_axis.y,
this.0.matrix3.z_axis.y,
this.0.matrix3.x_axis.z,
this.0.matrix3.y_axis.z,
this.0.matrix3.z_axis.z,
)));
methods.add_method("VectorToWorldSpace",|_,CFrame(this),Vector3(v):Vector3|
Ok(Vector3(this.transform_vector3a(v)))
methods.add_method("VectorToWorldSpace",|_,this,v:Vector3|
Ok(Vector3(this.0.transform_vector3a(v.0)))
);
methods.add_meta_function(mlua::MetaMethod::Mul,|_,(CFrame(this),CFrame(val)):(Self,Self)|Ok(Self(this*val)));
methods.add_meta_function(mlua::MetaMethod::ToString,|_,CFrame(this):Self|
//methods.add_meta_method(mlua::MetaMethod::Mul,|_,this,val:&Vector3|Ok(Vector3(this.0.matrix3*val.0+this.0.translation)));
methods.add_meta_function(mlua::MetaMethod::Mul,|_,(this,val):(Self,Self)|Ok(Self(this.0*val.0)));
methods.add_meta_function(mlua::MetaMethod::ToString,|_,this:Self|
Ok(format!("CFrame.new({},{},{},{},{},{},{},{},{},{},{},{})",
this.translation.x,
this.translation.y,
this.translation.z,
this.matrix3.x_axis.x,
this.matrix3.y_axis.x,
this.matrix3.z_axis.x,
this.matrix3.x_axis.y,
this.matrix3.y_axis.y,
this.matrix3.z_axis.y,
this.matrix3.x_axis.z,
this.matrix3.y_axis.z,
this.matrix3.z_axis.z,
this.0.translation.x,
this.0.translation.y,
this.0.translation.z,
this.0.matrix3.x_axis.x,
this.0.matrix3.y_axis.x,
this.0.matrix3.z_axis.x,
this.0.matrix3.x_axis.y,
this.0.matrix3.y_axis.y,
this.0.matrix3.z_axis.y,
this.0.matrix3.x_axis.z,
this.0.matrix3.y_axis.z,
this.0.matrix3.z_axis.z,
))
);
}

View File

@@ -1,5 +1,3 @@
use super::number::Number;
#[derive(Clone,Copy)]
pub struct Color3{
r:f32,
@@ -10,37 +8,28 @@ impl Color3{
pub const fn new(r:f32,g:f32,b:f32)->Self{
Self{r,g,b}
}
pub const fn from_rgb(r:u8,g:u8,b:u8)->Self{
Color3::new(r as f32/255.0,g as f32/255.0,b as f32/255.0)
}
}
impl From<rbx_types::Color3> for Color3{
fn from(value:rbx_types::Color3)->Color3{
Color3::new(value.r,value.g,value.b)
}
}
impl From<Color3> for rbx_types::Color3{
fn from(value:Color3)->rbx_types::Color3{
rbx_types::Color3::new(value.r,value.g,value.b)
impl Into<rbx_types::Color3> for Color3{
fn into(self)->rbx_types::Color3{
rbx_types::Color3::new(self.r,self.g,self.b)
}
}
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let table=lua.create_table()?;
let color3_table=lua.create_table()?;
table.raw_set("new",
lua.create_function(|_,(r,g,b):(Number,Number,Number)|
Ok(Color3::new(r.into(),g.into(),b.into()))
color3_table.raw_set("new",
lua.create_function(|_,(r,g,b):(f32,f32,f32)|
Ok(Color3::new(r,g,b))
)?
)?;
color3_table.raw_set("fromRGB",
lua.create_function(|_,(r,g,b):(u8,u8,u8)|
Ok(Color3::new(r as f32/255.0,g as f32/255.0,b as f32/255.0))
)?
)?;
let from_rgb=lua.create_function(|_,(r,g,b):(u8,u8,u8)|
Ok(Color3::from_rgb(r,g,b))
)?;
table.raw_set("fromRGB",from_rgb.clone())?;
table.raw_set("FromRGB",from_rgb)?;
globals.set("Color3",table)?;
globals.set("Color3",color3_table)?;
Ok(())
}

View File

@@ -1,29 +1,31 @@
#[derive(Clone)]
pub struct ColorSequence(rbx_types::ColorSequence);
#[derive(Clone,Copy)]
pub struct ColorSequence{}
impl ColorSequence{
pub const fn new(keypoints:Vec<rbx_types::ColorSequenceKeypoint>)->Self{
Self(rbx_types::ColorSequence{keypoints})
pub const fn new()->Self{
Self{}
}
}
impl From<ColorSequence> for rbx_types::ColorSequence{
fn from(ColorSequence(value):ColorSequence)->rbx_types::ColorSequence{
value
impl Into<rbx_types::ColorSequence> for ColorSequence{
fn into(self)->rbx_types::ColorSequence{
rbx_types::ColorSequence{
keypoints:Vec::new()
}
}
}
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let table=lua.create_table()?;
let number_sequence_table=lua.create_table()?;
table.raw_set("new",
number_sequence_table.raw_set("new",
lua.create_function(|_,_:mlua::MultiValue|
Ok(ColorSequence::new(Vec::new()))
Ok(ColorSequence::new())
)?
)?;
globals.set("ColorSequence",table)?;
globals.set("ColorSequence",number_sequence_table)?;
Ok(())
}
impl mlua::UserData for ColorSequence{}
type_from_lua_userdata_clone!(ColorSequence);
type_from_lua_userdata!(ColorSequence);

View File

@@ -1,136 +1,63 @@
#[derive(Clone,Copy)]
pub struct EnumItem<'a>{
name:Option<&'a str>,
value:u32,
}
impl<'a> EnumItem<'a>{
fn known_name((name,&value):(&'a std::borrow::Cow<'a,str>,&u32))->Self{
Self{name:Some(name.as_ref()),value}
}
}
impl From<rbx_types::Enum> for EnumItem<'_>{
fn from(e:rbx_types::Enum)->Self{
EnumItem{
name:None,
value:e.to_u32(),
}
}
}
impl From<EnumItem<'_>> for rbx_types::Enum{
fn from(e:EnumItem)->rbx_types::Enum{
rbx_types::Enum::from_u32(e.value)
}
}
impl PartialEq for EnumItem<'_>{
fn eq(&self,other:&EnumItem<'_>)->bool{
self.value==other.value&&{
// if both names are known, they must match, otherwise whatever
match (self.name,other.name){
(Some(lhs),Some(rhs))=>lhs==rhs,
_=>true,
}
}
}
}
use mlua::IntoLua;
#[derive(Clone,Copy)]
pub struct Enums;
impl Enums{
pub fn get(&self,index:&str)->Option<EnumItems<'static>>{
let db=rbx_reflection_database::get().unwrap();
db.enums.get(index).map(|ed|EnumItems{ed})
}
}
pub struct Enum(u32);
#[derive(Clone,Copy)]
pub struct EnumItems<'a>{
pub struct EnumItems;
#[derive(Clone,Copy)]
pub struct EnumItem<'a>{
ed:&'a rbx_reflection::EnumDescriptor<'a>,
}
impl<'a> EnumItems<'a>{
pub fn from_value(&self,value:u32)->Option<EnumItem<'a>>{
self.ed.items.iter().find(|&(_,&v)|v==value).map(EnumItem::known_name)
}
pub fn from_name(&self,name:&str)->Option<EnumItem<'a>>{
self.ed.items.get_key_value(name).map(EnumItem::known_name)
}
pub fn from_enum(&self,enum_item:EnumItem)->Option<EnumItem<'a>>{
match enum_item.name{
Some(s)=>{
let got=self.from_name(s)?;
(got.value==enum_item.value).then_some(got)
},
None=>self.from_value(enum_item.value)
}
impl Into<rbx_types::Enum> for Enum{
fn into(self)->rbx_types::Enum{
rbx_types::Enum::from_u32(self.0)
}
}
pub enum CoerceEnum<'a>{
Integer(i64),
String(mlua::String),
Enum(EnumItem<'a>),
}
impl CoerceEnum<'_>{
pub fn coerce_to<'a>(self,enum_items:EnumItems<'a>)->mlua::Result<EnumItem<'a>>{
match self{
CoerceEnum::Integer(int)=>enum_items.from_value(int as u32),
CoerceEnum::String(s)=>enum_items.from_name(&*s.to_str()?),
CoerceEnum::Enum(enum_item)=>enum_items.from_enum(enum_item),
}.ok_or_else(||mlua::Error::runtime(format!("Bad {} EnumItem",enum_items.ed.name)))
}
}
impl mlua::FromLua for CoerceEnum<'_>{
fn from_lua(value:mlua::Value,_lua:&mlua::Lua)->Result<Self,mlua::Error>{
match value{
mlua::Value::Integer(int)=>Ok(CoerceEnum::Integer(int)),
mlua::Value::String(s)=>Ok(CoerceEnum::String(s)),
mlua::Value::UserData(ud)=>Ok(CoerceEnum::Enum(*ud.borrow()?)),
other=>Err(mlua::Error::runtime(format!("Expected {} got {:?}",stringify!(Enum),other))),
}
impl<'a> EnumItem<'a>{
const fn new(ed:&'a rbx_reflection::EnumDescriptor)->Self{
Self{ed}
}
}
pub fn set_globals(_lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
globals.set("Enum",Enums)
globals.set("Enum",EnumItems)
}
impl mlua::UserData for EnumItems<'static>{
fn add_fields<F:mlua::UserDataFields<Self>>(_fields:&mut F){
}
fn add_methods<M:mlua::UserDataMethods<Self>>(methods:&mut M){
methods.add_method("FromName",|_,this:&EnumItems,name:mlua::String|Ok(this.from_name(&*name.to_str()?)));
methods.add_method("FromValue",|_,this:&EnumItems,value:u32|Ok(this.from_value(value)));
methods.add_method("GetEnumItems",|_,this:&EnumItems,()|->mlua::Result<Vec<EnumItem>>{
Ok(this.ed.items.iter().map(EnumItem::known_name).collect())
});
methods.add_meta_function(mlua::MetaMethod::Index,|_,(this,val):(EnumItems,mlua::String)|{
let index=&*val.to_str()?;
Ok(this.ed.items.get_key_value(index).map(EnumItem::known_name))
});
}
}
type_from_lua_userdata_lua_lifetime!(EnumItems);
impl mlua::UserData for Enums{
fn add_fields<F:mlua::UserDataFields<Self>>(_fields:&mut F){
}
fn add_methods<M:mlua::UserDataMethods<Self>>(methods:&mut M){
methods.add_meta_function(mlua::MetaMethod::Index,|_,(enums,val):(Self,mlua::String)|{
Ok(enums.get(&*val.to_str()?))
});
}
}
type_from_lua_userdata!(Enums);
impl mlua::UserData for EnumItem<'_>{
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("Name",|_,this|Ok(this.name));
fields.add_field_method_get("Value",|_,this|Ok(this.value));
fn add_fields<F:mlua::UserDataFields<Self>>(_fields:&mut F){
}
fn add_methods<M:mlua::UserDataMethods<Self>>(methods:&mut M){
methods.add_meta_function(mlua::MetaMethod::Eq,|_,(lhs,rhs):(EnumItem<'_>,EnumItem<'_>)|{
Ok(lhs==rhs)
methods.add_meta_function(mlua::MetaMethod::Index,|lua,(this,val):(EnumItem<'_>,mlua::String)|{
match this.ed.items.get(&*val.to_str()?){
Some(&id)=>Enum(id).into_lua(lua),
None=>mlua::Value::Nil.into_lua(lua),
}
});
}
}
type_from_lua_userdata_lua_lifetime!(EnumItem);
impl mlua::UserData for EnumItems{
fn add_fields<F:mlua::UserDataFields<Self>>(_fields:&mut F){
}
fn add_methods<M:mlua::UserDataMethods<Self>>(methods:&mut M){
methods.add_meta_function(mlua::MetaMethod::Index,|lua,(_,val):(Self,mlua::String)|{
let db=rbx_reflection_database::get();
match db.enums.get(&*val.to_str()?){
Some(ed)=>EnumItem::new(ed).into_lua(lua),
None=>mlua::Value::Nil.into_lua(lua),
}
});
}
}
type_from_lua_userdata!(EnumItems);
impl mlua::UserData for Enum{
fn add_fields<F:mlua::UserDataFields<Self>>(_fields:&mut F){
}
fn add_methods<M:mlua::UserDataMethods<Self>>(_methods:&mut M){
}
}
type_from_lua_userdata!(Enum);

View File

@@ -2,48 +2,49 @@ use std::collections::{hash_map::Entry,HashMap};
use mlua::{FromLua,FromLuaMulti,IntoLua,IntoLuaMulti};
use rbx_types::Ref;
use rbx_dom_weak::{Ustr,InstanceBuilder,WeakDom};
use rbx_dom_weak::{InstanceBuilder,WeakDom};
use crate::util::static_ustr;
use crate::runner::number::Number;
use crate::runner::vector3::Vector3;
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
//class functions store
lua.set_app_data(ClassMethodsStore::default());
lua.set_app_data(InstanceValueStore::default());
let table=lua.create_table()?;
let instance_table=lua.create_table()?;
//Instance.new
table.raw_set("new",
instance_table.raw_set("new",
lua.create_function(|lua,(class_name,parent):(mlua::String,Option<Instance>)|{
let class_name_str=&*class_name.to_str()?;
let parent_ref=parent.map_or(Ref::none(),|instance|instance.referent);
let parent=parent.ok_or_else(||mlua::Error::runtime("Nil Parent not yet supported"))?;
dom_mut(lua,|dom|{
Ok(Instance::new_unchecked(dom.insert(parent_ref,InstanceBuilder::new(class_name_str))))
//TODO: Nil instances
Ok(Instance::new(dom.insert(parent.referent,InstanceBuilder::new(class_name_str))))
})
})?
)?;
globals.set("Instance",table)?;
globals.set("Instance",instance_table)?;
Ok(())
}
// LMAO look at this function!
pub fn dom_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut WeakDom)->mlua::Result<T>)->mlua::Result<T>{
let mut dom=lua.app_data_mut::<crate::context::LuaAppData>().ok_or_else(||mlua::Error::runtime("DataModel missing"))?;
let mut dom=lua.app_data_mut::<&'static mut WeakDom>().ok_or_else(||mlua::Error::runtime("DataModel missing"))?;
f(*dom)
}
pub fn class_is_a(class:&str,superclass:&str)->bool{
let db=rbx_reflection_database::get().unwrap();
let (Some(class),Some(superclass))=(db.classes.get(class),db.classes.get(superclass))else{
return false;
};
db.has_superclass(class,superclass)
fn coerce_float32(value:&mlua::Value)->Option<f32>{
match value{
&mlua::Value::Integer(i)=>Some(i as f32),
&mlua::Value::Number(f)=>Some(f as f32),
_=>None,
}
}
fn get_full_name(dom:&WeakDom,instance:&rbx_dom_weak::Instance)->String{
fn get_full_name(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)->String{
let mut full_name=instance.name.clone();
let mut pref=instance.parent();
while let Some(parent)=dom.get_by_ref(pref){
@@ -57,7 +58,7 @@ fn get_full_name(dom:&WeakDom,instance:&rbx_dom_weak::Instance)->String{
pub fn get_name_source(lua:&mlua::Lua,script:Instance)->Result<(String,String),mlua::Error>{
dom_mut(lua,|dom|{
let instance=script.get(dom)?;
let source=match instance.properties.get(&static_ustr("Source")){
let source=match instance.properties.get("Source"){
Some(rbx_dom_weak::types::Variant::String(s))=>s.clone(),
_=>Err(mlua::Error::external("Missing script.Source"))?,
};
@@ -65,46 +66,28 @@ pub fn get_name_source(lua:&mlua::Lua,script:Instance)->Result<(String,String),m
})
}
pub fn find_first_child<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_child<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|inst.name==name)
}
pub fn find_first_descendant<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_descendant<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
dom.descendants_of(instance.referent()).find(|&inst|inst.name==name)
}
pub fn find_first_child_of_class<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_child_of_class<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|inst.class==class)
}
pub fn find_first_descendant_of_class<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_descendant_of_class<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
dom.descendants_of(instance.referent()).find(|&inst|inst.class==class)
}
pub fn find_first_child_which_is_a<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
let db=rbx_reflection_database::get().unwrap();
let superclass_descriptor=db.classes.get(superclass)?;
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|{
db.classes.get(inst.class.as_str()).is_some_and(|descriptor|db.has_superclass(descriptor,superclass_descriptor))
})
}
pub fn find_first_descendant_which_is_a<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
let db=rbx_reflection_database::get().unwrap();
let superclass_descriptor=db.classes.get(superclass)?;
dom.descendants_of(instance.referent()).find(|inst|{
db.classes.get(inst.class.as_str()).is_some_and(|descriptor|db.has_superclass(descriptor,superclass_descriptor))
})
}
#[derive(Clone,Copy)]
pub struct Instance{
referent:Ref,
}
impl Instance{
pub const fn new_unchecked(referent:Ref)->Self{
pub const fn new(referent:Ref)->Self{
Self{referent}
}
pub fn new(referent:Ref)->Option<Self>{
referent.is_some().then_some(Self{referent})
}
pub fn get<'a>(&self,dom:&'a WeakDom)->mlua::Result<&'a rbx_dom_weak::Instance>{
dom.get_by_ref(self.referent).ok_or_else(||mlua::Error::runtime("Instance missing"))
}
@@ -114,25 +97,40 @@ impl Instance{
}
type_from_lua_userdata!(Instance);
//TODO: update rbx_reflection and use dom.superclasses_iter
pub struct SuperClassIter<'a> {
database: &'a rbx_reflection::ReflectionDatabase<'a>,
descriptor: Option<&'a rbx_reflection::ClassDescriptor<'a>>,
}
impl<'a> SuperClassIter<'a> {
fn next_descriptor(&self) -> Option<&'a rbx_reflection::ClassDescriptor<'a>> {
let superclass = self.descriptor?.superclass.as_ref()?;
self.database.classes.get(superclass)
}
}
impl<'a> Iterator for SuperClassIter<'a> {
type Item = &'a rbx_reflection::ClassDescriptor<'a>;
fn next(&mut self) -> Option<Self::Item> {
let next_descriptor = self.next_descriptor();
std::mem::replace(&mut self.descriptor, next_descriptor)
}
}
impl mlua::UserData for Instance{
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fn get_parent(lua:&mlua::Lua,this:&Instance)->mlua::Result<Option<Instance>>{
fields.add_field_method_get("Parent",|lua,this|{
dom_mut(lua,|dom|{
let instance=this.get(dom)?;
Ok(Instance::new(instance.parent()))
})
}
fields.add_field_method_get("parent",get_parent);
fields.add_field_method_get("Parent",get_parent);
fn set_parent(lua:&mlua::Lua,this:&mut Instance,new_parent:Option<Instance>)->mlua::Result<()>{
let parent_ref=new_parent.map_or(Ref::none(),|instance|instance.referent);
});
fields.add_field_method_set("Parent",|lua,this,val:Option<Instance>|{
let parent=val.ok_or_else(||mlua::Error::runtime("Nil Parent not yet supported"))?;
dom_mut(lua,|dom|{
dom.transfer_within(this.referent,parent_ref);
dom.transfer_within(this.referent,parent.referent);
Ok(())
})
}
fields.add_field_method_set("parent",set_parent);
fields.add_field_method_set("Parent",set_parent);
});
fields.add_field_method_get("Name",|lua,this|{
dom_mut(lua,|dom|{
let instance=this.get(dom)?;
@@ -150,38 +148,22 @@ impl mlua::UserData for Instance{
fields.add_field_method_get("ClassName",|lua,this|{
dom_mut(lua,|dom|{
let instance=this.get(dom)?;
Ok(instance.class.to_owned())
Ok(instance.class.clone())
})
});
}
fn add_methods<M:mlua::UserDataMethods<Self>>(methods:&mut M){
fn clone(lua:&mlua::Lua,this:&Instance,_:())->mlua::Result<Instance>{
dom_mut(lua,|dom|{
let instance_ref=dom.clone_within(this.referent);
Ok(Instance::new_unchecked(instance_ref))
})
}
methods.add_method("clone",clone);
methods.add_method("Clone",clone);
fn get_children(lua:&mlua::Lua,this:&Instance,_:())->mlua::Result<Vec<Instance>>{
methods.add_method("GetChildren",|lua,this,_:()|
dom_mut(lua,|dom|{
let instance=this.get(dom)?;
let children:Vec<_>=instance
.children()
.iter()
.copied()
.map(Instance::new_unchecked)
.map(Instance::new)
.collect();
Ok(children)
})
}
methods.add_method("children",get_children);
methods.add_method("GetChildren",get_children);
methods.add_method("GetFullName",|lua,this,()|
dom_mut(lua,|dom|{
let instance=this.get(dom)?;
Ok(get_full_name(dom,instance))
})
);
fn ffc(lua:&mlua::Lua,this:&Instance,(name,search_descendants):(mlua::String,Option<bool>))->mlua::Result<Option<Instance>>{
let name_str=&*name.to_str()?;
@@ -193,7 +175,7 @@ impl mlua::UserData for Instance{
false=>find_first_child(dom,instance,name_str),
}
.map(|instance|
Instance::new_unchecked(instance.referent())
Instance::new(instance.referent())
)
)
})
@@ -203,29 +185,13 @@ impl mlua::UserData for Instance{
methods.add_method("FindFirstChildOfClass",|lua,this,(class,search_descendants):(mlua::String,Option<bool>)|{
let class_str=&*class.to_str()?;
dom_mut(lua,|dom|{
let inst=this.get(dom)?;
Ok(
match search_descendants.unwrap_or(false){
true=>find_first_descendant_of_class(dom,inst,class_str),
false=>find_first_child_of_class(dom,inst,class_str),
true=>find_first_descendant_of_class(dom,this.get(dom)?,class_str),
false=>find_first_child_of_class(dom,this.get(dom)?,class_str),
}
.map(|instance|
Instance::new_unchecked(instance.referent())
)
)
})
});
methods.add_method("FindFirstChildWhichIsA",|lua,this,(class,search_descendants):(mlua::String,Option<bool>)|{
let class_str=&*class.to_str()?;
dom_mut(lua,|dom|{
let inst=this.get(dom)?;
Ok(
match search_descendants.unwrap_or(false){
true=>find_first_descendant_which_is_a(dom,inst,class_str),
false=>find_first_child_which_is_a(dom,inst,class_str),
}
.map(|instance|
Instance::new_unchecked(instance.referent())
Instance::new(instance.referent())
)
)
})
@@ -235,42 +201,24 @@ impl mlua::UserData for Instance{
let children:Vec<_>=dom
.descendants_of(this.referent)
.map(|instance|
Instance::new_unchecked(instance.referent())
Instance::new(instance.referent())
)
.collect();
Ok(children)
})
);
methods.add_method("IsAncestorOf",|lua,this,descendant:Instance|
dom_mut(lua,|dom|{
let instance=descendant.get(dom)?;
Ok(std::iter::successors(Some(instance),|inst|dom.get_by_ref(inst.parent())).any(|inst|inst.referent()==this.referent))
})
);
methods.add_method("IsDescendantOf",|lua,this,ancestor:Instance|
methods.add_method("IsA",|lua,this,classname:mlua::String|
dom_mut(lua,|dom|{
let instance=this.get(dom)?;
Ok(std::iter::successors(Some(instance),|inst|dom.get_by_ref(inst.parent())).any(|inst|inst.referent()==ancestor.referent))
Ok(crate::context::class_is_a(instance.class.as_str(),&*classname.to_str()?))
})
);
fn is_a(lua:&mlua::Lua,this:&Instance,classname:mlua::String)->mlua::Result<bool>{
methods.add_method("Destroy",|lua,this,()|
dom_mut(lua,|dom|{
let instance=this.get(dom)?;
Ok(class_is_a(instance.class.as_str(),&*classname.to_str()?))
})
}
methods.add_method("isA",is_a);
methods.add_method("IsA",is_a);
fn destroy(lua:&mlua::Lua,this:&Instance,_:())->mlua::Result<()>{
dom_mut(lua,|dom|{
dom.transfer_within(this.referent,Ref::none());
dom.destroy(this.referent);
Ok(())
})
}
methods.add_method("remove",destroy);
methods.add_method("Remove",destroy);
methods.add_method("destroy",destroy);
methods.add_method("Destroy",destroy);
);
methods.add_meta_function(mlua::MetaMethod::ToString,|lua,this:Instance|{
dom_mut(lua,|dom|{
let instance=this.get(dom)?;
@@ -282,40 +230,41 @@ impl mlua::UserData for Instance{
dom_mut(lua,|dom|{
let instance=this.get(dom)?;
//println!("__index t={} i={index:?}",instance.name);
let db=rbx_reflection_database::get().unwrap();
let db=rbx_reflection_database::get();
let class=db.classes.get(instance.class.as_str()).ok_or_else(||mlua::Error::runtime("Class missing"))?;
// Find existing property
// Interestingly, ustr can know ahead of time if
// a property does not exist in any runtime instance
match Ustr::from_existing(index_str)
.and_then(|index_ustr|
instance.properties.get(&index_ustr).cloned()
)
//Find existing property
match instance.properties.get(index_str)
.cloned()
//Find default value
.or_else(||db.find_default_property(class,index_str).cloned())
//Find virtual property
.or_else(||db.superclasses_iter(class).find_map(|class|
find_virtual_property(&instance.properties,class,index_str)
))
.or_else(||{
SuperClassIter{
database:db,
descriptor:Some(class),
}
.find_map(|class|
find_virtual_property(&instance.properties,class,index_str)
)
})
{
Some(rbx_types::Variant::Bool(val))=>return val.into_lua(lua),
Some(rbx_types::Variant::Int32(val))=>return val.into_lua(lua),
Some(rbx_types::Variant::Int64(val))=>return val.into_lua(lua),
Some(rbx_types::Variant::Float32(val))=>return val.into_lua(lua),
Some(rbx_types::Variant::Float64(val))=>return val.into_lua(lua),
Some(rbx_types::Variant::String(val))=>return val.into_lua(lua),
Some(rbx_types::Variant::Ref(val))=>return Instance::new_unchecked(val).into_lua(lua),
Some(rbx_types::Variant::Enum(e))=>return crate::runner::r#enum::EnumItem::from(e).into_lua(lua),
Some(rbx_types::Variant::Color3(c))=>return crate::runner::color3::Color3::from(c).into_lua(lua),
Some(rbx_types::Variant::CFrame(cf))=>return crate::runner::cframe::CFrame::from(cf).into_lua(lua),
Some(rbx_types::Variant::Vector2(v))=>return crate::runner::vector2::Vector2::from(v).into_lua(lua),
Some(rbx_types::Variant::Vector3(v))=>return crate::runner::vector3::Vector3::from(v).into_lua(lua),
Some(rbx_types::Variant::Ref(val))=>return Instance::new(val).into_lua(lua),
Some(rbx_types::Variant::CFrame(cf))=>return Into::<crate::runner::cframe::CFrame>::into(cf).into_lua(lua),
Some(rbx_types::Variant::Vector3(v))=>return Into::<crate::runner::vector3::Vector3>::into(v).into_lua(lua),
None=>(),
other=>return Err(mlua::Error::runtime(format!("Instance.__index Unsupported property type instance={} index={index_str} value={other:?}",instance.name))),
}
//find a function with a matching name
if let Some(function)=class_methods_store_mut(lua,|cf|{
db.superclasses_iter(class).find_map(|class|{
let mut iter=SuperClassIter{
database:db,
descriptor:Some(class),
};
iter.find_map(|class|{
let mut class_methods=cf.get_or_create_class_methods(&class.name)?;
class_methods.get_or_create_function(lua,index_str)
.transpose()
@@ -336,106 +285,69 @@ impl mlua::UserData for Instance{
}
//find a child with a matching name
find_first_child(dom,instance,index_str)
.map(|instance|Instance::new_unchecked(instance.referent()))
.map(|instance|Instance::new(instance.referent()))
.into_lua(lua)
})
});
methods.add_meta_function(mlua::MetaMethod::NewIndex,|lua,(this,index,value):(Instance,mlua::String,mlua::Value)|{
let index_str=&*index.to_str()?;
dom_mut(lua,|dom|{
let instance=this.get_mut(dom)?;
let db=rbx_reflection_database::get().unwrap();
//println!("__newindex t={} i={index:?} v={value:?}",instance.name);
let index_str=&*index.to_str()?;
let db=rbx_reflection_database::get();
let class=db.classes.get(instance.class.as_str()).ok_or_else(||mlua::Error::runtime("Class missing"))?;
let property=db.superclasses_iter(class).find_map(|cls|
cls.properties.get(index_str)
).ok_or_else(||
mlua::Error::runtime(format!("Property '{index_str}' missing on class '{}'",class.name))
)?;
let value=match &property.data_type{
let mut iter=SuperClassIter{
database:db,
descriptor:Some(class),
};
let property=iter.find_map(|cls|cls.properties.get(index_str)).ok_or_else(||mlua::Error::runtime(format!("Property '{index_str}' missing on class '{}'",class.name)))?;
match &property.data_type{
rbx_reflection::DataType::Value(rbx_types::VariantType::Vector3)=>{
let typed_value:Vector3=*value.as_userdata().ok_or_else(||mlua::Error::runtime("Expected Userdata"))?.borrow()?;
instance.properties.insert(index_str.to_owned(),rbx_types::Variant::Vector3(typed_value.into()));
},
rbx_reflection::DataType::Value(rbx_types::VariantType::Float32)=>{
let typed_value=Number::from_lua(value.clone(),lua)?.to_f32();
rbx_types::Variant::Float32(typed_value)
let typed_value:f32=coerce_float32(&value).ok_or_else(||mlua::Error::runtime("Expected f32"))?;
instance.properties.insert(index_str.to_owned(),rbx_types::Variant::Float32(typed_value));
},
rbx_reflection::DataType::Enum(enum_name)=>{
let typed_value=match &value{
&mlua::Value::Integer(int)=>Ok(rbx_types::Enum::from_u32(int as u32)),
&mlua::Value::Number(num)=>Ok(rbx_types::Enum::from_u32(num as u32)),
mlua::Value::String(s)=>{
let e=db.enums.get(enum_name).ok_or_else(||mlua::Error::runtime("Database DataType Enum name does not exist"))?;
let e=db.enums.get(enum_name).ok_or_else(||mlua::Error::runtime("Database DataType Enum name does not exist"))?;
Ok(rbx_types::Enum::from_u32(*e.items.get(&*s.to_str()?).ok_or_else(||mlua::Error::runtime("Invalid enum item"))?))
},
mlua::Value::UserData(any_user_data)=>{
let e:crate::runner::r#enum::EnumItem=*any_user_data.borrow()?;
let e:crate::runner::r#enum::Enum=*any_user_data.borrow()?;
Ok(e.into())
},
_=>Err(mlua::Error::runtime("Expected Enum")),
}?;
rbx_types::Variant::Enum(typed_value)
instance.properties.insert(index_str.to_owned(),rbx_types::Variant::Enum(typed_value));
},
rbx_reflection::DataType::Value(rbx_types::VariantType::Color3)=>{
let typed_value:crate::runner::color3::Color3=*value.as_userdata().ok_or_else(||mlua::Error::runtime("Expected Color3"))?.borrow()?;
rbx_types::Variant::Color3(typed_value.into())
instance.properties.insert(index_str.to_owned(),rbx_types::Variant::Color3(typed_value.into()));
},
rbx_reflection::DataType::Value(rbx_types::VariantType::Bool)=>{
let typed_value=value.as_boolean().ok_or_else(||mlua::Error::runtime("Expected boolean"))?;
rbx_types::Variant::Bool(typed_value)
},
rbx_reflection::DataType::Value(rbx_types::VariantType::Int32)=>{
let typed_value=value.as_i32().ok_or_else(||mlua::Error::runtime("Expected Int32"))?;
rbx_types::Variant::Int32(typed_value)
instance.properties.insert(index_str.to_owned(),rbx_types::Variant::Bool(typed_value));
},
rbx_reflection::DataType::Value(rbx_types::VariantType::String)=>{
let typed_value=match &value{
mlua::Value::Integer(i)=>i.to_string(),
mlua::Value::Number(n)=>n.to_string(),
mlua::Value::String(s)=>s.to_str()?.to_owned(),
_=>return Err(mlua::Error::runtime("Expected string")),
};
rbx_types::Variant::String(typed_value)
},
rbx_reflection::DataType::Value(rbx_types::VariantType::UDim2)=>{
let typed_value:&crate::runner::udim2::UDim2=&*value.as_userdata().ok_or_else(||mlua::Error::runtime("Expected UDim2"))?.borrow()?;
rbx_types::Variant::UDim2(typed_value.clone().into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::NumberRange)=>{
let typed_value:&crate::runner::number_range::NumberRange=&*value.as_userdata().ok_or_else(||mlua::Error::runtime("Expected NumberRange"))?.borrow()?;
rbx_types::Variant::NumberRange(typed_value.clone().into())
let typed_value=value.as_str().ok_or_else(||mlua::Error::runtime("Expected boolean"))?;
instance.properties.insert(index_str.to_owned(),rbx_types::Variant::String(typed_value.to_owned()));
},
rbx_reflection::DataType::Value(rbx_types::VariantType::NumberSequence)=>{
let typed_value:&crate::runner::number_sequence::NumberSequence=&*value.as_userdata().ok_or_else(||mlua::Error::runtime("Expected NumberSequence"))?.borrow()?;
rbx_types::Variant::NumberSequence(typed_value.clone().into())
let typed_value:crate::runner::number_sequence::NumberSequence=*value.as_userdata().ok_or_else(||mlua::Error::runtime("Expected NumberSequence"))?.borrow()?;
instance.properties.insert(index_str.to_owned(),rbx_types::Variant::NumberSequence(typed_value.into()));
},
rbx_reflection::DataType::Value(rbx_types::VariantType::ColorSequence)=>{
let typed_value:&crate::runner::color_sequence::ColorSequence=&*value.as_userdata().ok_or_else(||mlua::Error::runtime("Expected ColorSequence"))?.borrow()?;
rbx_types::Variant::ColorSequence(typed_value.clone().into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::Vector2)=>{
let typed_value:crate::runner::vector2::Vector2=*value.as_userdata().ok_or_else(||mlua::Error::runtime("Expected Vector2"))?.borrow()?;
rbx_types::Variant::Vector2(typed_value.clone().into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::Vector3)=>{
let typed_value:crate::runner::vector3::Vector3=*value.as_userdata().ok_or_else(||mlua::Error::runtime("Expected Vector3"))?.borrow()?;
rbx_types::Variant::Vector3(typed_value.clone().into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::CFrame)=>{
let typed_value:crate::runner::cframe::CFrame=*value.as_userdata().ok_or_else(||mlua::Error::runtime("Expected CFrame"))?.borrow()?;
rbx_types::Variant::CFrame(typed_value.clone().into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::ContentId)=>{
let typed_value=value.as_string().ok_or_else(||mlua::Error::runtime("Expected string"))?.to_str()?.to_owned();
rbx_types::Variant::ContentId(typed_value.into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::Ref)=>{
// why clone?
let typed_value=Option::<Instance>::from_lua(value.clone(),lua)?;
rbx_types::Variant::Ref(typed_value.map_or(Ref::none(),|instance|instance.referent))
let typed_value:crate::runner::color_sequence::ColorSequence=*value.as_userdata().ok_or_else(||mlua::Error::runtime("Expected ColorSequence"))?.borrow()?;
instance.properties.insert(index_str.to_owned(),rbx_types::Variant::ColorSequence(typed_value.into()));
},
other=>return Err(mlua::Error::runtime(format!("Unimplemented property type: {other:?}"))),
};
// the index is known to be a real property at this point
// allow creating a permanent ustr (memory leak)
let index_ustr=rbx_dom_weak::ustr(index_str);
instance.properties.insert(index_ustr,value);
}
Ok(())
})
});
@@ -460,53 +372,28 @@ type CFD=phf::Map<&'static str,// Class name
ClassFunctionPointer
>
>;
const GET_SERVICE:ClassFunctionPointer=cf!(|lua,_this,service:mlua::String|{
dom_mut(lua,|dom|{
//dom.root_ref()==this.referent ?
let service=&*service.to_str()?;
match service{
"Lighting"|"RunService"|"Players"|"Workspace"|"MaterialService"|"TweenService"=>{
let referent=find_first_child_of_class(dom,dom.root(),service)
.map(|instance|instance.referent())
.unwrap_or_else(||
dom.insert(dom.root_ref(),InstanceBuilder::new(service))
);
Ok(Instance::new_unchecked(referent))
},
other=>Err(mlua::Error::runtime(format!("Service '{other}' not supported"))),
}
})
});
const GET_PLAYERS:ClassFunctionPointer=cf!(|_lua,_this,()|->mlua::Result<_>{
Ok(Vec::<Instance>::new())
});
const NO_OP:ClassFunctionPointer=cf!(|_lua,_this,_:mlua::MultiValue|->mlua::Result<_>{Ok(())});
static CLASS_FUNCTION_DATABASE:CFD=phf::phf_map!{
"DataModel"=>phf::phf_map!{
"service"=>GET_SERVICE,
"GetService"=>GET_SERVICE,
"GetService"=>cf!(|lua,_this,service:mlua::String|{
dom_mut(lua,|dom|{
//dom.root_ref()==this.referent ?
let service=&*service.to_str()?;
match service{
"Lighting"|"RunService"=>{
let referent=find_first_child_of_class(dom,dom.root(),service)
.map(|instance|instance.referent())
.unwrap_or_else(||
dom.insert(dom.root_ref(),InstanceBuilder::new(service))
);
Ok(Instance::new(referent))
},
other=>Err::<Instance,_>(mlua::Error::runtime(format!("Service '{other}' not supported"))),
}
})
}),
},
"Terrain"=>phf::phf_map!{
"FillBall"=>cf!(|_lua,_,_:(crate::runner::vector3::Vector3,Number,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillBlock"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,crate::runner::vector3::Vector3,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillCylinder"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,Number,Number,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"SetMaterialColor"=>cf!(|_lua,_,_:(crate::runner::r#enum::CoerceEnum,crate::runner::color3::Color3)|mlua::Result::Ok(())),
},
"Players"=>phf::phf_map!{
"players"=>GET_PLAYERS,
"GetPlayers"=>GET_PLAYERS,
},
"Sound"=>phf::phf_map!{
"Play"=>NO_OP,
},
"TweenService"=>phf::phf_map!{
"Create"=>cf!(|_lua,_,(instance,tween_info,goal):(Instance,crate::runner::tween_info::TweenInfo,mlua::Table)|->mlua::Result<_>{
Ok(crate::runner::tween::Tween::create(
instance,
tween_info,
goal,
))
}),
"FillBlock"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,Vector3,crate::runner::r#enum::Enum)|mlua::Result::Ok(()))
},
};
@@ -522,7 +409,7 @@ struct ClassMethodsStore{
}
impl ClassMethodsStore{
/// return self.classes[class] or create the ClassMethods and then return it
fn get_or_create_class_methods(&mut self,class:&str)->Option<ClassMethods<'_>>{
fn get_or_create_class_methods(&mut self,class:&str)->Option<ClassMethods>{
// Use get_entry to get the &'static str keys of the database
// and use it as a key for the classes hashmap
CLASS_FUNCTION_DATABASE.get_entry(class)
@@ -593,16 +480,16 @@ static VIRTUAL_PROPERTY_DATABASE:VPD=phf::phf_map!{
};
fn find_virtual_property(
properties:&rbx_dom_weak::UstrMap<rbx_types::Variant>,
properties:&HashMap<String,rbx_types::Variant>,
class:&rbx_reflection::ClassDescriptor,
index:&str,
index:&str
)->Option<rbx_types::Variant>{
//Find virtual property
let class_virtual_properties=VIRTUAL_PROPERTY_DATABASE.get(&class.name)?;
let virtual_property=class_virtual_properties.get(index)?;
//Get source property
let variant=properties.get(&static_ustr(virtual_property.property))?;
let variant=properties.get(virtual_property.property)?;
//Transform Source property with provided function
(virtual_property.pointer)(variant)
@@ -617,30 +504,11 @@ type LUD=phf::Map<&'static str,// Class name
CreateUserData
>
>;
fn create_script_signal(lua:&mlua::Lua)->mlua::Result<mlua::AnyUserData>{
lua.create_any_userdata(crate::runner::script_signal::ScriptSignal::new())
}
static LAZY_USER_DATA:LUD=phf::phf_map!{
"RunService"=>phf::phf_map!{
"Stepped"=>create_script_signal,
"Heartbeat"=>create_script_signal,
"RenderStepped"=>create_script_signal,
},
"Players"=>phf::phf_map!{
"PlayerAdded"=>create_script_signal,
},
"BasePart"=>phf::phf_map!{
"Touched"=>create_script_signal,
"TouchEnded"=>create_script_signal,
},
"Instance"=>phf::phf_map!{
"ChildAdded"=>create_script_signal,
"ChildRemoved"=>create_script_signal,
"DescendantAdded"=>create_script_signal,
"DescendantRemoved"=>create_script_signal,
},
"ClickDetector"=>phf::phf_map!{
"MouseClick"=>create_script_signal,
"RenderStepped"=>|lua|{
lua.create_any_userdata(crate::runner::script_signal::ScriptSignal::new())
},
},
};
#[derive(Default)]
@@ -656,7 +524,7 @@ pub struct InstanceValues<'a>{
values:&'a mut HashMap<&'static str,mlua::AnyUserData>,
}
impl InstanceValueStore{
pub fn get_or_create_instance_values(&mut self,instance:&rbx_dom_weak::Instance)->Option<InstanceValues<'_>>{
pub fn get_or_create_instance_values(&mut self,instance:&rbx_dom_weak::Instance)->Option<InstanceValues>{
LAZY_USER_DATA.get(instance.class.as_str())
.map(|named_values|
InstanceValues{

View File

@@ -10,18 +10,6 @@ macro_rules! type_from_lua_userdata{
}
};
}
macro_rules! type_from_lua_userdata_clone{
($ty:ident)=>{
impl mlua::FromLua for $ty{
fn from_lua(value:mlua::Value,_lua:&mlua::Lua)->Result<Self,mlua::Error>{
match value{
mlua::Value::UserData(ud)=>Ok(ud.borrow::<Self>()?.clone()),
other=>Err(mlua::Error::runtime(format!("Expected {} got {:?}",stringify!($ty),other))),
}
}
}
};
}
macro_rules! type_from_lua_userdata_lua_lifetime{
($ty:ident)=>{
impl mlua::FromLua for $ty<'static>{

View File

@@ -3,19 +3,10 @@ mod macros;
mod runner;
mod r#enum;
mod task;
mod udim;
mod tween;
mod udim2;
mod color3;
mod cframe;
mod number;
mod vector2;
mod vector3;
mod brickcolor;
mod tween_info;
pub mod instance;
mod number_range;
mod script_signal;
mod color_sequence;
mod number_sequence;

View File

@@ -1,43 +0,0 @@
// the goal of this module is to provide an intermediate type
// that is guaranteed to be some kind of number, and provide
// methods to coerce it into various more specific types.
#[derive(Clone,Copy)]
pub enum Number{
Integer(i64),
Number(f64),
}
macro_rules! impl_ty{
($ident:ident,$ty:ty)=>{
impl Number{
#[inline]
pub fn $ident(self)->$ty{
match self{
Self::Integer(int)=>int as $ty,
Self::Number(num)=>num as $ty,
}
}
}
impl From<Number> for $ty{
fn from(value:Number)->$ty{
value.$ident()
}
}
};
}
impl_ty!(to_u32,u32);
impl_ty!(to_i32,i32);
impl_ty!(to_f32,f32);
impl_ty!(to_u64,u64);
impl_ty!(to_i64,i64);
impl_ty!(to_f64,f64);
impl mlua::FromLua for Number{
fn from_lua(value:mlua::Value,_lua:&mlua::Lua)->Result<Self,mlua::Error>{
match value{
mlua::Value::Integer(int)=>Ok(Number::Integer(int)),
mlua::Value::Number(num)=>Ok(Number::Number(num)),
other=>Err(mlua::Error::runtime(format!("Expected {} got {:?}",stringify!(Number),other))),
}
}
}

View File

@@ -1,34 +0,0 @@
use super::number::Number;
#[derive(Clone)]
pub struct NumberRange(rbx_types::NumberRange);
impl NumberRange{
pub const fn new(min:f32,max:f32)->Self{
Self(rbx_types::NumberRange{min,max})
}
}
impl From<NumberRange> for rbx_types::NumberRange{
fn from(NumberRange(value):NumberRange)->rbx_types::NumberRange{
value
}
}
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let table=lua.create_table()?;
table.raw_set("new",
lua.create_function(|_,(min,max):(Number,Option<Number>)|{
Ok(match max{
Some(max)=>NumberRange::new(min.into(),max.into()),
None=>NumberRange::new(min.into(),min.into()),
})
})?
)?;
globals.set("NumberRange",table)?;
Ok(())
}
impl mlua::UserData for NumberRange{}
type_from_lua_userdata_clone!(NumberRange);

View File

@@ -1,36 +1,31 @@
#[derive(Clone)]
pub struct NumberSequence(rbx_types::NumberSequence);
#[derive(Clone,Copy)]
pub struct NumberSequence{}
impl NumberSequence{
pub const fn new(keypoints:Vec<rbx_types::NumberSequenceKeypoint>)->Self{
Self(rbx_types::NumberSequence{keypoints})
pub const fn new()->Self{
Self{}
}
}
impl From<NumberSequence> for rbx_types::NumberSequence{
fn from(NumberSequence(value):NumberSequence)->rbx_types::NumberSequence{
value
impl Into<rbx_types::NumberSequence> for NumberSequence{
fn into(self)->rbx_types::NumberSequence{
rbx_types::NumberSequence{
keypoints:Vec::new()
}
}
}
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let table=lua.create_table()?;
let number_sequence_table=lua.create_table()?;
table.raw_set("new",
number_sequence_table.raw_set("new",
lua.create_function(|_,_:mlua::MultiValue|
Ok(NumberSequence::new(Vec::new()))
Ok(NumberSequence::new())
)?
)?;
globals.set("NumberSequence",table)?;
globals.set("NumberSequence",number_sequence_table)?;
Ok(())
}
impl mlua::UserData for NumberSequence{}
impl mlua::FromLua for NumberSequence{
fn from_lua(value:mlua::Value,_lua:&mlua::Lua)->Result<Self,mlua::Error>{
match value{
mlua::Value::UserData(ud)=>Ok(ud.borrow::<Self>()?.clone()),
other=>Err(mlua::Error::runtime(format!("Expected {} got {:?}",stringify!(NumberSequence),other))),
}
}
}
type_from_lua_userdata!(NumberSequence);

View File

@@ -12,12 +12,14 @@ pub enum Error{
error:mlua::Error
},
RustLua(mlua::Error),
NoServices,
}
impl std::fmt::Display for Error{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
match self{
Self::Lua{source,error}=>write!(f,"lua error: source:\n{source}\n{error}"),
Self::RustLua(error)=>write!(f,"rust-side lua error: {error}"),
other=>write!(f,"{other:?}"),
}
}
}
@@ -29,30 +31,20 @@ fn init(lua:&mlua::Lua)->mlua::Result<()>{
//global environment
let globals=lua.globals();
super::task::set_globals(lua,&globals)?;
#[cfg(feature="run-service")]
crate::scheduler::set_globals(lua,&globals)?;
super::script_signal::set_globals(lua,&globals)?;
super::r#enum::set_globals(lua,&globals)?;
super::udim::set_globals(lua,&globals)?;
super::udim2::set_globals(lua,&globals)?;
super::color3::set_globals(lua,&globals)?;
super::brickcolor::set_globals(lua,&globals)?;
super::vector2::set_globals(lua,&globals)?;
super::vector3::set_globals(lua,&globals)?;
super::cframe::set_globals(lua,&globals)?;
super::instance::instance::set_globals(lua,&globals)?;
super::tween_info::set_globals(lua,&globals)?;
super::number_range::set_globals(lua,&globals)?;
super::number_sequence::set_globals(lua,&globals)?;
super::color_sequence::set_globals(lua,&globals)?;
Ok(())
}
unsafe fn extend_lifetime_mut<'a,T>(src:&mut T)->&'a mut T{
let ptr:*mut T=src;
unsafe{&mut*ptr}
}
impl Runner{
pub fn new()->Result<Self,Error>{
let runner=Self{
@@ -62,20 +54,22 @@ impl Runner{
Ok(runner)
}
pub fn runnable_context<'a>(self,context:&'a mut Context)->Result<Runnable<'a>,Error>{
let services=context.find_services().ok_or(Error::NoServices)?;
self.runnable_context_with_services(context,&services)
}
pub fn runnable_context_with_services<'a>(self,context:&'a mut Context,services:&crate::context::Services)->Result<Runnable<'a>,Error>{
{
let globals=self.lua.globals();
globals.set("game",super::instance::Instance::new_unchecked(context.services.game)).map_err(Error::RustLua)?;
globals.set("workspace",super::instance::Instance::new_unchecked(context.services.workspace)).map_err(Error::RustLua)?;
globals.set("game",super::instance::Instance::new(services.game)).map_err(Error::RustLua)?;
globals.set("workspace",super::instance::Instance::new(services.workspace)).map_err(Error::RustLua)?;
}
// SAFETY: This is not a &'static mut WeakDom,
// but as long as Runnable<'a> holds the lifetime of &'a mut Context
// it is a valid unique reference.
self.lua.set_app_data::<crate::context::LuaAppData>(unsafe{extend_lifetime_mut(&mut context.dom)});
//this makes set_app_data shut up about the lifetime
self.lua.set_app_data::<&'static mut rbx_dom_weak::WeakDom>(unsafe{core::mem::transmute(&mut context.dom)});
#[cfg(feature="run-service")]
self.lua.set_app_data::<crate::scheduler::Scheduler>(crate::scheduler::Scheduler::default());
Ok(Runnable{
lua:self.lua,
_lifetime:std::marker::PhantomData
_lifetime:&std::marker::PhantomData
})
}
}
@@ -83,11 +77,11 @@ impl Runner{
//Runnable is the same thing but has context set, which it holds the lifetime for.
pub struct Runnable<'a>{
lua:mlua::Lua,
_lifetime:std::marker::PhantomData<&'a ()>
_lifetime:&'a std::marker::PhantomData<()>
}
impl Runnable<'_>{
pub fn drop_context(self)->Runner{
self.lua.remove_app_data::<crate::context::LuaAppData>();
self.lua.remove_app_data::<&'static mut rbx_dom_weak::WeakDom>();
#[cfg(feature="run-service")]
self.lua.remove_app_data::<crate::scheduler::Scheduler>();
Runner{

View File

@@ -98,9 +98,12 @@ impl ScriptConnection{
impl mlua::UserData for ScriptSignal{
fn add_methods<M:mlua::UserDataMethods<Self>>(methods:&mut M){
methods.add_method("connect",|_lua,this,f:mlua::Function|Ok(this.connect(f)));
methods.add_method("Connect",|_lua,this,f:mlua::Function|Ok(this.connect(f)));
methods.add_method("Once",|_lua,this,f:mlua::Function|Ok(this.once(f)));
methods.add_method("Connect",|_lua,this,f:mlua::Function|
Ok(this.connect(f))
);
methods.add_method("Once",|_lua,this,f:mlua::Function|
Ok(this.once(f))
);
// Fire is not allowed to be called from Lua
// methods.add_method("Fire",|_lua,this,args:mlua::MultiValue|
// Ok(this.fire(args))
@@ -117,7 +120,7 @@ impl mlua::FromLua for ScriptSignal{
}
impl mlua::UserData for ScriptConnection{
fn add_fields<F:UserDataFields<Self>>(fields:&mut F){
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("Connected",|_,this|{
Ok(this.position().is_some())
});
@@ -161,7 +164,6 @@ pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
.call::<mlua::Function>(())?;
lua.register_userdata_type::<ScriptSignal>(|reg|{
reg.add_field("wait",wait.clone());
reg.add_field("Wait",wait);
mlua::UserData::register(reg);
})?;

View File

@@ -1,42 +0,0 @@
#[cfg(not(feature="run-service"))]
fn no_op(_lua:&mlua::Lua,_time:Option<super::number::Number>)->mlua::Result<f64>{
Ok(0.0)
}
fn tick(_lua:&mlua::Lua,_:())->mlua::Result<f64>{
Ok(0.0)
}
// This is used to avoid calling coroutine.yield from the rust side.
const LUA_WAIT:&str=
"local coroutine_yield=coroutine.yield
local schedule_thread=schedule_thread
return function(dt)
schedule_thread(dt)
return coroutine_yield()
end";
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let coroutine_table=globals.get::<mlua::Table>("coroutine")?;
#[cfg(feature="run-service")]
let schedule_thread=lua.create_function(crate::scheduler::schedule_thread)?;
#[cfg(not(feature="run-service"))]
let schedule_thread=lua.create_function(no_op)?;
//create wait function environment
let wait_env=lua.create_table()?;
wait_env.raw_set("coroutine",coroutine_table)?;
wait_env.raw_set("schedule_thread",schedule_thread)?;
//construct wait function from Lua code
let wait=lua.load(LUA_WAIT)
.set_name("wait")
.set_environment(wait_env)
.call::<mlua::Function>(())?;
globals.raw_set("wait",wait)?;
// TODO: move this somewhere it belongs
let tick=lua.create_function(tick)?;
globals.raw_set("tick",tick)?;
Ok(())
}

View File

@@ -1,35 +0,0 @@
use super::instance::Instance;
use super::tween_info::TweenInfo;
#[expect(dead_code)]
#[derive(Clone)]
pub struct Tween{
instance:Instance,
tween_info:TweenInfo,
goal:mlua::Table,
playback_state:rbx_types::Enum,
}
impl Tween{
pub fn create(
instance:Instance,
tween_info:TweenInfo,
goal:mlua::Table,
)->Self{
Self{
instance,
tween_info,
goal,
// Enum.PlaybackState.Begin
playback_state:rbx_types::Enum::from_u32(0),
}
}
pub fn play(&mut self){
}
}
impl mlua::UserData for Tween{
fn add_methods<M:mlua::UserDataMethods<Self>>(methods:&mut M){
methods.add_method_mut("Play",|_,this,()|Ok(this.play()))
}
}
type_from_lua_userdata_clone!(Tween);

View File

@@ -1,45 +0,0 @@
use super::number::Number;
use super::r#enum::{CoerceEnum,Enums};
#[expect(dead_code)]
#[derive(Clone)]
pub struct TweenInfo{
time:f64,
easing_style:rbx_types::Enum,
easing_direction:rbx_types::Enum,
repeat_count:u32,
reverses:bool,
delay_time:f64,
}
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let table=lua.create_table()?;
table.raw_set("new",
lua.create_function(|_,(time,easing_style,easing_direction,repeat_count,reverses,delay_time):(Option<Number>,Option<CoerceEnum>,Option<CoerceEnum>,Option<u32>,Option<bool>,Option<Number>)|{
Ok(TweenInfo{
time:time.map_or(1.0,Number::to_f64),
easing_style:match easing_style{
// Enum.EasingStyle.Quad
None=>rbx_types::Enum::from_u32(3),
Some(e)=>e.coerce_to(Enums.get("EasingStyle").unwrap())?.into(),
},
easing_direction:match easing_direction{
// Enum.EasingDirection.Out
None=>rbx_types::Enum::from_u32(1),
Some(e)=>e.coerce_to(Enums.get("EasingDirection").unwrap())?.into(),
},
repeat_count:repeat_count.unwrap_or(0),
reverses:reverses.unwrap_or(false),
delay_time:delay_time.map_or(0.0,Number::to_f64),
})
})?
)?;
globals.set("TweenInfo",table)?;
Ok(())
}
impl mlua::UserData for TweenInfo{}
type_from_lua_userdata_clone!(TweenInfo);

View File

@@ -1,36 +0,0 @@
use super::number::Number;
#[derive(Clone,Copy)]
pub struct UDim(rbx_types::UDim);
impl UDim{
pub fn new(scale:f32,offset:i32)->Self{
UDim(rbx_types::UDim::new(scale,offset))
}
}
impl From<rbx_types::UDim> for UDim{
fn from(value:rbx_types::UDim)->Self{
Self(value)
}
}
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let table=lua.create_table()?;
table.raw_set("new",
lua.create_function(|_,(scale,offset):(Number,i32)|
Ok(UDim::new(scale.into(),offset))
)?
)?;
globals.set("UDim",table)?;
Ok(())
}
impl mlua::UserData for UDim{
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("Scale",|_,UDim(this)|Ok(this.scale));
fields.add_field_method_get("Offset",|_,UDim(this)|Ok(this.offset));
}
}
type_from_lua_userdata!(UDim);

View File

@@ -1,40 +0,0 @@
use super::udim::UDim;
use super::number::Number;
#[derive(Clone,Copy)]
pub struct UDim2(rbx_types::UDim2);
impl UDim2{
pub fn new(sx:f32,ox:i32,sy:f32,oy:i32)->Self{
UDim2(rbx_types::UDim2::new(
rbx_types::UDim::new(sx,ox),
rbx_types::UDim::new(sy,oy),
))
}
}
impl From<UDim2> for rbx_types::UDim2{
fn from(UDim2(value):UDim2)->rbx_types::UDim2{
value
}
}
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let table=lua.create_table()?;
table.raw_set("new",
lua.create_function(|_,(sx,ox,sy,oy):(Number,i32,Number,i32)|
Ok(UDim2::new(sx.into(),ox,sy.into(),oy))
)?
)?;
globals.set("UDim2",table)?;
Ok(())
}
impl mlua::UserData for UDim2{
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("X",|_,UDim2(this)|Ok(UDim::from(this.x)));
fields.add_field_method_get("Y",|_,UDim2(this)|Ok(UDim::from(this.y)));
}
}
type_from_lua_userdata!(UDim2);

View File

@@ -1,93 +0,0 @@
use mlua::FromLua;
use super::number::Number;
#[derive(Clone,Copy)]
pub struct Vector2(glam::Vec2);
impl Vector2{
pub const fn new(x:f32,y:f32)->Self{
Self(glam::vec2(x,y))
}
}
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let table=lua.create_table()?;
//Vector2.new
table.raw_set("new",
lua.create_function(|_,(x,y):(Option<Number>,Option<Number>)|
match (x,y){
(Some(x),Some(y))=>Ok(Vector2::new(x.into(),y.into())),
(None,None)=>Ok(Vector2(glam::Vec2::ZERO)),
_=>Err(mlua::Error::runtime("Unsupported arguments to Vector2.new")),
}
)?
)?;
globals.set("Vector2",table)?;
Ok(())
}
impl From<Vector2> for rbx_types::Vector2{
fn from(Vector2(v):Vector2)->rbx_types::Vector2{
rbx_types::Vector2::new(v.x,v.y)
}
}
impl From<rbx_types::Vector2> for Vector2{
fn from(value:rbx_types::Vector2)->Vector2{
Vector2::new(value.x,value.y)
}
}
impl mlua::UserData for Vector2{
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("magnitude",|_,Vector2(this)|Ok(this.length()));
fields.add_field_method_get("Magnitude",|_,Vector2(this)|Ok(this.length()));
fields.add_field_method_get("unit",|_,Vector2(this)|Ok(Vector2(this.normalize())));
fields.add_field_method_get("Unit",|_,Vector2(this)|Ok(Vector2(this.normalize())));
fields.add_field_method_get("x",|_,Vector2(this)|Ok(this.x));
fields.add_field_method_get("X",|_,Vector2(this)|Ok(this.x));
fields.add_field_method_get("y",|_,Vector2(this)|Ok(this.y));
fields.add_field_method_get("Y",|_,Vector2(this)|Ok(this.y));
}
fn add_methods<M:mlua::UserDataMethods<Self>>(methods:&mut M){
//methods.add_method("area",|_,this,()| Ok(this.length * this.width));
methods.add_meta_function(mlua::MetaMethod::Add,|_,(Vector2(this),Vector2(val)):(Self,Self)|Ok(Self(this+val)));
methods.add_meta_function(mlua::MetaMethod::Sub,|_,(Vector2(this),Vector2(val)):(Self,Self)|Ok(Self(this-val)));
methods.add_meta_function(mlua::MetaMethod::Mul,|lua,(lhs,rhs):(mlua::Value,mlua::Value)|{
match (lhs,rhs){
(mlua::Value::UserData(lhs),mlua::Value::UserData(rhs))=>lhs.borrow_scoped(|Vector2(lhs):&Vector2|rhs.borrow_scoped(|Vector2(rhs):&Vector2|Self(lhs*rhs)))?,
(lhs,mlua::Value::UserData(rhs))=>{
let lhs=Number::from_lua(lhs,lua)?;
rhs.borrow_scoped(|Vector2(rhs):&Vector2|Self(lhs.to_f32()*rhs))
},
(mlua::Value::UserData(lhs),rhs)=>{
let rhs=Number::from_lua(rhs,lua)?;
lhs.borrow_scoped(|Vector2(lhs):&Vector2|Self(lhs*rhs.to_f32()))
},
_=>Err(mlua::Error::runtime(format!("Expected Vector2")))
}
});
methods.add_meta_function(mlua::MetaMethod::Div,|_,(Vector2(this),val):(Self,mlua::Value)|{
match val{
mlua::Value::Integer(n)=>Ok(Self(this/(n as f32))),
mlua::Value::Number(n)=>Ok(Self(this/(n as f32))),
mlua::Value::UserData(ud)=>ud.borrow_scoped(|Vector2(rhs):&Vector2|Self(this/rhs)),
other=>Err(mlua::Error::runtime(format!("Attempt to divide Vector2 by {other:?}"))),
}
});
methods.add_meta_function(mlua::MetaMethod::ToString,|_,Vector2(this):Self|
Ok(format!("Vector2.new({},{})",
this.x,
this.y,
))
);
}
}
type_from_lua_userdata!(Vector2);

View File

@@ -1,7 +1,3 @@
use mlua::FromLua;
use super::number::Number;
#[derive(Clone,Copy)]
pub struct Vector3(pub(crate)glam::Vec3A);
@@ -12,27 +8,23 @@ impl Vector3{
}
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let table=lua.create_table()?;
let vector3_table=lua.create_table()?;
//Vector3.new
table.raw_set("new",
lua.create_function(|_,(x,y,z):(Option<Number>,Option<Number>,Option<Number>)|
match (x,y,z){
(Some(x),Some(y),Some(z))=>Ok(Vector3::new(x.into(),y.into(),z.into())),
(None,None,None)=>Ok(Vector3(glam::Vec3A::ZERO)),
_=>Err(mlua::Error::runtime("Unsupported arguments to Vector3.new")),
}
vector3_table.raw_set("new",
lua.create_function(|_,(x,y,z):(f32,f32,f32)|
Ok(Vector3::new(x,y,z))
)?
)?;
globals.set("Vector3",table)?;
globals.set("Vector3",vector3_table)?;
Ok(())
}
impl From<Vector3> for rbx_types::Vector3{
fn from(Vector3(v):Vector3)->rbx_types::Vector3{
rbx_types::Vector3::new(v.x,v.y,v.z)
impl Into<rbx_types::Vector3> for Vector3{
fn into(self)->rbx_types::Vector3{
rbx_types::Vector3::new(self.0.x,self.0.y,self.0.z)
}
}
@@ -44,50 +36,44 @@ impl From<rbx_types::Vector3> for Vector3{
impl mlua::UserData for Vector3{
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("magnitude",|_,Vector3(this)|Ok(this.length()));
fields.add_field_method_get("Magnitude",|_,Vector3(this)|Ok(this.length()));
fields.add_field_method_get("unit",|_,Vector3(this)|Ok(Vector3(this.normalize())));
fields.add_field_method_get("Unit",|_,Vector3(this)|Ok(Vector3(this.normalize())));
fields.add_field_method_get("x",|_,Vector3(this)|Ok(this.x));
fields.add_field_method_get("X",|_,Vector3(this)|Ok(this.x));
fields.add_field_method_get("y",|_,Vector3(this)|Ok(this.y));
fields.add_field_method_get("Y",|_,Vector3(this)|Ok(this.y));
fields.add_field_method_get("z",|_,Vector3(this)|Ok(this.z));
fields.add_field_method_get("Z",|_,Vector3(this)|Ok(this.z));
fields.add_field_method_get("magnitude",|_,this|Ok(this.0.length()));
fields.add_field_method_get("x",|_,this|Ok(this.0.x));
fields.add_field_method_set("x",|_,this,val|{
this.0.x=val;
Ok(())
});
fields.add_field_method_get("y",|_,this|Ok(this.0.y));
fields.add_field_method_set("y",|_,this,val|{
this.0.y=val;
Ok(())
});
fields.add_field_method_get("z",|_,this|Ok(this.0.z));
fields.add_field_method_set("z",|_,this,val|{
this.0.z=val;
Ok(())
});
}
fn add_methods<M:mlua::UserDataMethods<Self>>(methods:&mut M){
//methods.add_method("area",|_,this,()| Ok(this.length * this.width));
methods.add_meta_function(mlua::MetaMethod::Add,|_,(Vector3(this),Vector3(val)):(Self,Self)|Ok(Self(this+val)));
methods.add_meta_function(mlua::MetaMethod::Sub,|_,(Vector3(this),Vector3(val)):(Self,Self)|Ok(Self(this-val)));
methods.add_meta_function(mlua::MetaMethod::Mul,|lua,(lhs,rhs):(mlua::Value,mlua::Value)|{
match (lhs,rhs){
(mlua::Value::UserData(lhs),mlua::Value::UserData(rhs))=>lhs.borrow_scoped(|Vector3(lhs):&Vector3|rhs.borrow_scoped(|Vector3(rhs):&Vector3|Self(lhs*rhs)))?,
(lhs,mlua::Value::UserData(rhs))=>{
let lhs=Number::from_lua(lhs,lua)?;
rhs.borrow_scoped(|Vector3(rhs):&Vector3|Self(lhs.to_f32()*rhs))
},
(mlua::Value::UserData(lhs),rhs)=>{
let rhs=Number::from_lua(rhs,lua)?;
lhs.borrow_scoped(|Vector3(lhs):&Vector3|Self(lhs*rhs.to_f32()))
},
_=>Err(mlua::Error::runtime(format!("Expected Vector3")))
}
});
methods.add_meta_function(mlua::MetaMethod::Div,|_,(Vector3(this),val):(Self,mlua::Value)|{
methods.add_meta_function(mlua::MetaMethod::Add,|_,(this,val):(Self,Self)|Ok(Self(this.0+val.0)));
methods.add_meta_function(mlua::MetaMethod::Div,|_,(this,val):(Self,mlua::Value)|{
match val{
mlua::Value::Integer(n)=>Ok(Self(this/(n as f32))),
mlua::Value::Number(n)=>Ok(Self(this/(n as f32))),
mlua::Value::UserData(ud)=>ud.borrow_scoped(|Vector3(rhs):&Vector3|Self(this/rhs)),
mlua::Value::Integer(n)=>Ok(Self(this.0/(n as f32))),
mlua::Value::Number(n)=>Ok(Self(this.0/(n as f32))),
mlua::Value::UserData(ud)=>{
let rhs:Vector3=ud.take()?;
Ok(Self(this.0/rhs.0))
},
other=>Err(mlua::Error::runtime(format!("Attempt to divide Vector3 by {other:?}"))),
}
});
methods.add_meta_function(mlua::MetaMethod::ToString,|_,Vector3(this):Self|
methods.add_meta_function(mlua::MetaMethod::ToString,|_,this:Self|
Ok(format!("Vector3.new({},{},{})",
this.x,
this.y,
this.z,
this.0.x,
this.0.y,
this.0.z,
))
);
}

View File

@@ -46,12 +46,12 @@ impl Scheduler{
}
}
pub fn scheduler_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut Scheduler)->mlua::Result<T>)->mlua::Result<T>{
let mut scheduler=lua.app_data_mut::<Scheduler>().ok_or_else(||mlua::Error::runtime("Scheduler missing"))?;
pub fn scheduler_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut crate::scheduler::Scheduler)->mlua::Result<T>)->mlua::Result<T>{
let mut scheduler=lua.app_data_mut::<crate::scheduler::Scheduler>().ok_or_else(||mlua::Error::runtime("Scheduler missing"))?;
f(&mut *scheduler)
}
pub fn schedule_thread(lua:&mlua::Lua,dt:mlua::Value)->Result<(),mlua::Error>{
fn schedule_thread(lua:&mlua::Lua,dt:mlua::Value)->Result<(),mlua::Error>{
let delay=match dt{
mlua::Value::Integer(i)=>i.max(0) as u64*60,
mlua::Value::Number(f)=>{
@@ -75,3 +75,32 @@ pub fn schedule_thread(lua:&mlua::Lua,dt:mlua::Value)->Result<(),mlua::Error>{
Ok(())
})
}
// This is used to avoid calling coroutine.yield from the rust side.
const LUA_WAIT:&str=
"local coroutine_yield=coroutine.yield
local schedule_thread=schedule_thread
return function(dt)
schedule_thread(dt)
return coroutine_yield()
end";
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
let coroutine_table=globals.get::<mlua::Table>("coroutine")?;
let schedule_thread=lua.create_function(schedule_thread)?;
//create wait function environment
let wait_env=lua.create_table()?;
wait_env.raw_set("coroutine",coroutine_table)?;
wait_env.raw_set("schedule_thread",schedule_thread)?;
//construct wait function from Lua code
let wait=lua.load(LUA_WAIT)
.set_name("wait")
.set_environment(wait_env)
.call::<mlua::Function>(())?;
globals.raw_set("wait",wait)?;
Ok(())
}

View File

@@ -1,3 +0,0 @@
pub fn static_ustr(s:&'static str)->rbx_dom_weak::Ustr{
rbx_dom_weak::ustr(s)
}

View File

@@ -1,14 +1,11 @@
[package]
name = "strafesnet_snf"
version = "0.3.1"
version = "0.3.0"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
binrw = "0.15.0"
binrw = "0.14.0"
id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
[lints]
workspace = true
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }

View File

@@ -6,7 +6,7 @@ use strafesnet_common::physics::Time;
const VERSION:u32=0;
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,Time>;
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::Time>;
#[derive(Debug)]
pub enum Error{
@@ -85,7 +85,6 @@ pub struct Segment{
#[derive(Clone,Copy,Debug)]
pub struct SegmentInfo{
/// time of the first instruction in this segment.
#[expect(dead_code)]
time:Time,
instruction_count:u32,
/// How many total instructions in segments up to and including this segment
@@ -117,7 +116,6 @@ impl<R:BinReaderExt> StreamableBot<R>{
segment_map,
})
}
#[expect(dead_code)]
fn get_segment_info(&self,segment_id:SegmentId)->Result<SegmentInfo,Error>{
Ok(*self.segment_map.get(segment_id.get() as usize).ok_or(Error::InvalidSegmentId(segment_id))?)
}
@@ -274,7 +272,7 @@ pub fn write_bot<W:BinWriterExt>(mut writer:W,physics_version:u32,instructions:i
//probe header length
let mut bot_header_data=Vec::new();
header.write_le(&mut std::io::Cursor::new(&mut bot_header_data)).map_err(Error::InvalidData)?;
binrw::BinWrite::write_le(&header,&mut std::io::Cursor::new(&mut bot_header_data)).map_err(Error::InvalidData)?;
// the first block location is the map header
block_location.push(offset);

View File

@@ -53,6 +53,8 @@ pub(crate) enum FourCC{
Map,
#[brw(magic=b"SNFB")]
Bot,
#[brw(magic=b"SNFD")]
Demo,
}
#[binrw]
#[brw(little)]

View File

@@ -5,6 +5,7 @@ mod newtypes;
mod file;
pub mod map;
pub mod bot;
pub mod demo;
#[derive(Debug)]
pub enum Error{
@@ -12,6 +13,7 @@ pub enum Error{
Header(file::Error),
Map(map::Error),
Bot(bot::Error),
Demo(demo::Error),
}
impl std::fmt::Display for Error{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
@@ -23,6 +25,7 @@ impl std::error::Error for Error{}
pub enum SNF<R:BinReaderExt>{
Map(map::StreamableMap<R>),
Bot(bot::StreamableBot<R>),
Demo(demo::StreamableDemo<R>),
}
pub fn read_snf<R:BinReaderExt>(input:R)->Result<SNF<R>,Error>{
@@ -30,6 +33,7 @@ pub fn read_snf<R:BinReaderExt>(input:R)->Result<SNF<R>,Error>{
Ok(match file.fourcc(){
file::FourCC::Map=>SNF::Map(map::StreamableMap::new(file).map_err(Error::Map)?),
file::FourCC::Bot=>SNF::Bot(bot::StreamableBot::new(file).map_err(Error::Bot)?),
file::FourCC::Demo=>SNF::Demo(demo::StreamableDemo::new(file).map_err(Error::Demo)?),
})
}
pub fn read_map<R:BinReaderExt>(input:R)->Result<map::StreamableMap<R>,Error>{
@@ -46,6 +50,13 @@ pub fn read_bot<R:BinReaderExt>(input:R)->Result<bot::StreamableBot<R>,Error>{
_=>Err(Error::UnexpectedFourCC)
}
}
pub fn read_demo<R:BinReaderExt>(input:R)->Result<demo::StreamableDemo<R>,Error>{
let file=file::File::new(input).map_err(Error::Header)?;
match file.fourcc(){
file::FourCC::Demo=>Ok(demo::StreamableDemo::new(file).map_err(Error::Demo)?),
_=>Err(Error::UnexpectedFourCC)
}
}
#[cfg(test)]
mod tests {

View File

@@ -97,8 +97,8 @@ enum ResourceType{
}
struct ResourceMap<T>{
meshes:HashMap<model::MeshId,T>,
textures:HashMap<model::TextureId,T>,
meshes:HashMap<strafesnet_common::model::MeshId,T>,
textures:HashMap<strafesnet_common::model::TextureId,T>,
}
impl<T> Default for ResourceMap<T>{
fn default()->Self{
@@ -185,7 +185,7 @@ pub struct StreamableMap<R:BinReaderExt>{
//this is every possible attribute... need some sort of streaming system
attributes:Vec<strafesnet_common::gameplay_attributes::CollisionAttributes>,
//this is every possible render configuration... shaders and such... need streaming
render_configs:Vec<model::RenderConfig>,
render_configs:Vec<strafesnet_common::model::RenderConfig>,
//this makes sense to keep in memory for streaming, a map of which blocks occupy what space
bvh:BvhNode<BlockId>,
//something something resources hashmaps
@@ -223,7 +223,7 @@ impl<R:BinReaderExt> StreamableMap<R>{
}
Ok(Self{
file,
modes:gameplay_modes::NormalizedModes::new(modes),
modes:strafesnet_common::gameplay_modes::NormalizedModes::new(modes),
attributes,
render_configs,
bvh:strafesnet_common::bvh::generate_bvh(bvh),
@@ -366,12 +366,12 @@ fn collect_spacial_blocks(
block_location.push(sequential_block_data.position());
}else{
match bvh_node.into_content(){
RecursiveContent::Branch(bvh_node_list)=>{
strafesnet_common::bvh::RecursiveContent::Branch(bvh_node_list)=>{
for bvh_node in bvh_node_list{
collect_spacial_blocks(block_location,block_headers,sequential_block_data,bvh_node)?;
}
},
RecursiveContent::Leaf(_)=>panic!(),//bvh branches are 20 leaves minimum
strafesnet_common::bvh::RecursiveContent::Leaf(_)=>panic!(),//bvh branches are 20 leaves minimum
}
}
Ok(())
@@ -384,13 +384,13 @@ pub fn write_map<W:BinWriterExt>(mut writer:W,map:strafesnet_common::map::Comple
let boxen=map.models.into_iter().enumerate().map(|(model_id,model)|{
//grow your own aabb
let mesh=map.meshes.get(model.mesh.get() as usize).ok_or(Error::InvalidMeshId(model.mesh))?;
let mut aabb=Aabb::default();
let mut aabb=strafesnet_common::aabb::Aabb::default();
for &pos in &mesh.unique_pos{
aabb.grow(model.transform.transform_point3(pos).narrow_1().unwrap());
}
Ok(((model::ModelId::new(model_id as u32),model.into()),aabb))
}).collect::<Result<Vec<_>,_>>()?;
let bvh=weigh_contents(strafesnet_common::bvh::generate_bvh(boxen),&|_|size_of::<newtypes::model::Model>());
let bvh=weigh_contents(strafesnet_common::bvh::generate_bvh(boxen),&|_|std::mem::size_of::<newtypes::model::Model>());
//build blocks
//block location is initialized with two values
//the first value represents the location of the first byte after the file header

View File

@@ -104,7 +104,6 @@ impl From<strafesnet_common::gameplay_style::StyleModifiers> for StyleModifiers{
#[binrw::binrw]
#[brw(little,repr=u8)]
#[expect(dead_code)]
pub enum JumpCalculation{
Max,
BoostThenJump,
@@ -129,7 +128,6 @@ impl From<strafesnet_common::gameplay_style::JumpCalculation> for JumpCalculatio
}
}
#[expect(dead_code)]
pub enum JumpImpulse{
Time(Time),
Height(Planar64),

View File

@@ -56,6 +56,8 @@ impl From<strafesnet_common::integer::Ratio64Vec2> for Ratio64Vec2{
}
}
pub type Angle32=i32;
pub type Planar64=i64;
pub type Planar64Vec3=[i64;3];
pub type Planar64Mat3=[i64;9];
pub type Planar64Affine3=[i64;12];

View File

@@ -20,7 +20,7 @@ impl TryInto<TimedPhysicsInstruction> for TimedInstruction{
}
}
impl TryFrom<TimedPhysicsInstruction> for TimedInstruction{
type Error=InstructionConvert;
type Error=super::physics::InstructionConvert;
fn try_from(value:TimedPhysicsInstruction)->Result<Self,Self::Error>{
Ok(Self{
time:value.time.get(),

View File

@@ -1,6 +1,6 @@
[package]
name = "map-tool"
version = "1.7.2"
version = "1.7.0"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -12,19 +12,20 @@ flate2 = "1.0.27"
futures = "0.3.31"
image = "0.25.2"
image_dds = "0.7.1"
rbx_asset = { version = "0.5.0", registry = "strafesnet" }
rbx_binary = "2.0.1"
rbx_dom_weak = "4.1.0"
rbx_reflection_database = "2.0.2"
rbx_xml = "2.0.1"
lazy-regex = "3.1.0"
rbx_asset = { version = "0.2.5", registry = "strafesnet" }
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.3.1", path = "../lib/bsp_loader", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../lib/deferred_loader", registry = "strafesnet" }
strafesnet_rbx_loader = { version = "0.7.0", path = "../lib/rbx_loader", registry = "strafesnet" }
strafesnet_snf = { version = "0.3.1", path = "../lib/snf", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.3.0", path = "../lib/bsp_loader", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../lib/deferred_loader", registry = "strafesnet" }
strafesnet_rbx_loader = { version = "0.6.0", path = "../lib/rbx_loader", registry = "strafesnet" }
strafesnet_snf = { version = "0.3.0", path = "../lib/snf", registry = "strafesnet" }
thiserror = "2.0.11"
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "fs"] }
vbsp = "0.9.1"
vbsp = "0.8.0"
vbsp-entities-css = "0.6.0"
vmdl = "0.2.0"
vmt-parser = "0.2.0"
@@ -35,6 +36,3 @@ vtf = "0.3.0"
#lto = true
#strip = true
#codegen-units = 1
[lints]
workspace = true

View File

@@ -8,11 +8,6 @@ use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
use rbxassetid::RobloxAssetId;
use tokio::io::AsyncReadExt;
// disallow non-static lifetimes
fn static_ustr(s:&'static str)->rbx_dom_weak::Ustr{
rbx_dom_weak::ustr(s)
}
const DOWNLOAD_LIMIT:usize=16;
#[derive(Subcommand)]
@@ -32,12 +27,8 @@ pub struct RobloxToSNFSubcommand {
pub struct DownloadAssetsSubcommand{
#[arg(required=true)]
roblox_files:Vec<PathBuf>,
#[arg(long,group="cookie",required=true)]
cookie_literal:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_envvar:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_file:Option<PathBuf>,
// #[arg(long)]
// cookie_file:Option<String>,
}
impl Commands{
@@ -46,27 +37,13 @@ impl Commands{
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
Commands::DownloadAssets(subcommand)=>download_assets(
subcommand.roblox_files,
cookie_from_args(
subcommand.cookie_literal,
subcommand.cookie_envvar,
subcommand.cookie_file,
).await?,
rbx_asset::cookie::Cookie::new("".to_string()),
).await,
}
}
}
async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<rbx_asset::cookie::Cookie>{
let cookie=match (literal,environment,file){
(Some(cookie_literal),None,None)=>cookie_literal,
(None,Some(cookie_environment),None)=>std::env::var(cookie_environment)?,
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
_=>Err(anyhow::Error::msg("Illegal cookie argument triple"))?,
};
Ok(rbx_asset::cookie::Cookie::new(cookie))
}
#[expect(dead_code)]
#[allow(unused)]
#[derive(Debug)]
enum LoadDomError{
IO(std::io::Error),
@@ -111,45 +88,17 @@ SurfaceAppearance.NormalMap
SurfaceAppearance.RoughnessMap
SurfaceAppearance.TexturePack
*/
/* These properties now use Content
BaseWrap.CageMeshContent
Decal.TextureContent
ImageButton.ImageContent
ImageLabel.ImageContent
MeshPart.MeshContent
MeshPart.TextureContent
SurfaceAppearance.ColorMapContent
SurfaceAppearance.MetalnessMapContent
SurfaceAppearance.NormalMapContent
SurfaceAppearance.RoughnessMapContent
WrapLayer.ReferenceMeshContent
*/
fn accumulate_content(content_list:&mut HashSet<RobloxAssetId>,object:&Instance,property:&'static str){
let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(&static_ustr(property))else{
fn accumulate_content_id(content_list:&mut HashSet<RobloxAssetId>,object:&Instance,property:&str){
if let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(property){
let url:&str=content.as_ref();
if let Ok(asset_id)=url.parse(){
content_list.insert(asset_id);
}else{
println!("Content failed to parse into AssetID: {:?}",content);
}
}else{
println!("property={} does not exist for class={}",property,object.class.as_str());
return;
};
let rbx_dom_weak::types::ContentType::Uri(uri)=content.value()else{
println!("ContentType is not Uri");
return;
};
let Ok(asset_id)=uri.parse()else{
println!("Content failed to parse into AssetID: {:?}",content);
return;
};
content_list.insert(asset_id);
}
fn accumulate_content_id(content_list:&mut HashSet<RobloxAssetId>,object:&Instance,property:&'static str){
let Some(rbx_dom_weak::types::Variant::ContentId(content))=object.properties.get(&static_ustr(property))else{
println!("property={} does not exist for class={}",property,object.class.as_str());
return;
};
let Ok(asset_id)=content.as_str().parse()else{
println!("Content failed to parse into AssetID: {:?}",content);
return;
};
content_list.insert(asset_id);
}
}
async fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
let mut file=tokio::fs::File::open(path).await?;
@@ -167,12 +116,12 @@ impl UniqueAssets{
fn collect(&mut self,object:&Instance){
match object.class.as_str(){
"Beam"=>accumulate_content_id(&mut self.textures,object,"Texture"),
"Decal"=>accumulate_content(&mut self.textures,object,"TextureContent"),
"Texture"=>accumulate_content(&mut self.textures,object,"TextureContent"),
"Decal"=>accumulate_content_id(&mut self.textures,object,"Texture"),
"Texture"=>accumulate_content_id(&mut self.textures,object,"Texture"),
"FileMesh"=>accumulate_content_id(&mut self.textures,object,"TextureId"),
"MeshPart"=>{
accumulate_content(&mut self.textures,object,"TextureContent");
accumulate_content(&mut self.meshes,object,"MeshContent");
accumulate_content_id(&mut self.textures,object,"TextureID");
accumulate_content_id(&mut self.meshes,object,"MeshId");
},
"SpecialMesh"=>accumulate_content_id(&mut self.meshes,object,"MeshId"),
"ParticleEmitter"=>accumulate_content_id(&mut self.textures,object,"Texture"),
@@ -192,7 +141,7 @@ impl UniqueAssets{
}
}
#[expect(dead_code)]
#[allow(unused)]
#[derive(Debug)]
enum UniqueAssetError{
IO(std::io::Error),
@@ -216,16 +165,16 @@ enum DownloadType{
impl DownloadType{
fn path(&self)->PathBuf{
match self{
DownloadType::Texture(RobloxAssetId(asset_id))=>format!("downloaded_textures/{asset_id}").into(),
DownloadType::Mesh(RobloxAssetId(asset_id))=>format!("meshes/{asset_id}").into(),
DownloadType::Union(RobloxAssetId(asset_id))=>format!("unions/{asset_id}").into(),
DownloadType::Texture(asset_id)=>format!("downloaded_textures/{}",asset_id.0.to_string()).into(),
DownloadType::Mesh(asset_id)=>format!("meshes/{}",asset_id.0.to_string()).into(),
DownloadType::Union(asset_id)=>format!("unions/{}",asset_id.0.to_string()).into(),
}
}
fn asset_id(&self)->u64{
match self{
&DownloadType::Texture(RobloxAssetId(asset_id))=>asset_id,
&DownloadType::Mesh(RobloxAssetId(asset_id))=>asset_id,
&DownloadType::Union(RobloxAssetId(asset_id))=>asset_id,
DownloadType::Texture(asset_id)=>asset_id.0,
DownloadType::Mesh(asset_id)=>asset_id.0,
DownloadType::Union(asset_id)=>asset_id.0,
}
}
}
@@ -242,8 +191,9 @@ struct Stats{
failed_downloads:u32,
timed_out_downloads:u32,
}
async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,download_instruction:DownloadType)->Result<DownloadResult,std::io::Error>{
async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::CookieContext,download_instruction:DownloadType)->Result<DownloadResult,std::io::Error>{
stats.total_assets+=1;
let download_instruction=download_instruction;
// check if file exists on disk
let path=download_instruction.path();
if tokio::fs::try_exists(path.as_path()).await?{
@@ -263,12 +213,11 @@ async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,dow
match asset_result{
Ok(asset_result)=>{
stats.downloaded_assets+=1;
let data=asset_result.to_vec()?;
tokio::fs::write(path,&data).await?;
break Ok(DownloadResult::Data(data));
tokio::fs::write(path,&asset_result).await?;
break Ok(DownloadResult::Data(asset_result));
},
Err(rbx_asset::cookie::GetError::Response(rbx_asset::types::ResponseError::Details{status_code,url_and_body}))=>{
if status_code.as_u16()==429{
Err(rbx_asset::cookie::GetError::Response(rbx_asset::ResponseError::StatusCodeWithUrlAndBody(scwuab)))=>{
if scwuab.status_code.as_u16()==429{
if retry==12{
println!("Giving up asset download {asset_id}");
stats.timed_out_downloads+=1;
@@ -280,7 +229,7 @@ async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,dow
retry+=1;
}else{
stats.failed_downloads+=1;
println!("weird status_code error: status_code={status_code} url={} body={}",url_and_body.url,url_and_body.body);
println!("weird scuwab error: {scwuab:?}");
break Ok(DownloadResult::Failed);
}
},
@@ -303,7 +252,7 @@ enum ConvertTextureError{
#[error("DDS write error {0:?}")]
DDSWrite(#[from]image_dds::ddsfile::Error),
}
async fn convert_texture(RobloxAssetId(asset_id):RobloxAssetId,download_result:DownloadResult)->Result<(),ConvertTextureError>{
async fn convert_texture(asset_id:RobloxAssetId,download_result:DownloadResult)->Result<(),ConvertTextureError>{
let data=match download_result{
DownloadResult::Cached(path)=>tokio::fs::read(path).await?,
DownloadResult::Data(data)=>data,
@@ -328,7 +277,7 @@ async fn convert_texture(RobloxAssetId(asset_id):RobloxAssetId,download_result:D
image_dds::Mipmaps::GeneratedAutomatic,
)?;
let file_name=format!("textures/{asset_id}.dds");
let file_name=format!("textures/{}.dds",asset_id.0);
let mut file=std::fs::File::create(file_name)?;
dds.write(&mut file)?;
Ok(())
@@ -366,7 +315,7 @@ async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->A
// insert into global unique assets guy
// add to download queue if the asset is globally unique and does not already exist on disk
let mut stats=Stats::default();
let context=rbx_asset::cookie::Context::new(cookie);
let context=rbx_asset::cookie::CookieContext::new(cookie);
let mut globally_unique_assets=UniqueAssets::default();
// pop a job = retry_queue.pop_front() or ingest(recv.recv().await)
// SLOW MODE:
@@ -421,7 +370,7 @@ async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->A
}
#[derive(Debug)]
#[expect(dead_code)]
#[allow(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
@@ -434,23 +383,17 @@ impl std::fmt::Display for ConvertError{
}
}
impl std::error::Error for ConvertError{}
struct Errors{
script_errors:Vec<strafesnet_rbx_loader::RunnerError>,
convert_errors:strafesnet_rbx_loader::RecoverableErrors,
}
fn convert_to_snf(path:&Path,output_folder:PathBuf)->Result<Errors,ConvertError>{
let entire_file=std::fs::read(path).map_err(ConvertError::IO)?;
async fn convert_to_snf(path:&Path,output_folder:PathBuf)->AResult<()>{
let entire_file=tokio::fs::read(path).await?;
let model=strafesnet_rbx_loader::read(
entire_file.as_slice()
std::io::Cursor::new(entire_file)
).map_err(ConvertError::RobloxRead)?;
let mut place=strafesnet_rbx_loader::Place::from(model);
let script_errors=place.run_scripts().unwrap_or_else(|e|vec![e]);
let mut place=model.into_place();
place.run_scripts();
let (map,convert_errors)=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let map=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
@@ -459,37 +402,24 @@ fn convert_to_snf(path:&Path,output_folder:PathBuf)->Result<Errors,ConvertError>
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
Ok(Errors{
script_errors,
convert_errors,
})
Ok(())
}
async fn roblox_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf)->AResult<()>{
async fn roblox_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();
let mut it=paths.into_iter();
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
// This is wrong! Calling roblox_to_snf multiple times keeps adding permits
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let output_folder=output_folder.clone();
tokio::task::spawn_blocking(move||{
let result=convert_to_snf(path.as_path(),output_folder);
tokio::spawn(async move{
let result=convert_to_snf(path.as_path(),output_folder).await;
drop(permit);
match result{
Ok(errors)=>{
for error in errors.script_errors{
println!("Script error: {error}");
}
let error_count=errors.convert_errors.count();
if error_count!=0{
println!("Error count: {error_count}");
println!("Errors: {}",errors.convert_errors);
}
},
Ok(())=>(),
Err(e)=>println!("Convert error: {e:?}"),
}
});

View File

@@ -452,7 +452,7 @@ fn bsp_contents(path:PathBuf)->AResult<()>{
}
#[derive(Debug)]
#[expect(dead_code)]
#[allow(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
@@ -484,7 +484,7 @@ async fn convert_to_snf(path:&Path,vpk_list:&[strafesnet_bsp_loader::Vpk],output
Ok(())
}
async fn source_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
async fn source_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();

View File

@@ -28,12 +28,5 @@ strafesnet_rbx_loader = { path = "../lib/rbx_loader", registry = "strafesnet", o
strafesnet_session = { path = "../engine/session", registry = "strafesnet" }
strafesnet_settings = { path = "../engine/settings", registry = "strafesnet" }
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet", optional = true }
wgpu = "28.0.0"
wgpu = "24.0.0"
winit = "0.30.7"
[profile.dev]
strip = false
opt-level = 3
[lints]
workspace = true

View File

@@ -2,12 +2,14 @@ pub type QNWorker<'a,Task>=CompatNWorker<'a,Task>;
pub type INWorker<'a,Task>=CompatNWorker<'a,Task>;
pub struct CompatNWorker<'a,Task>{
data:std::marker::PhantomData<Task>,
f:Box<dyn FnMut(Task)+Send+'a>,
}
impl<'a,Task> CompatNWorker<'a,Task>{
pub fn new(f:impl FnMut(Task)+Send+'a)->CompatNWorker<'a,Task>{
Self{
data:std::marker::PhantomData,
f:Box::new(f),
}
}

View File

@@ -3,7 +3,7 @@ use std::io::Read;
#[cfg(any(feature="roblox",feature="source"))]
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum ReadError{
#[cfg(feature="roblox")]
@@ -63,7 +63,7 @@ pub fn read<R:Read+std::io::Seek>(input:R)->Result<ReadFormat,ReadError>{
}
}
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum LoadError{
ReadError(ReadError),
@@ -97,16 +97,11 @@ pub fn load<P:AsRef<std::path::Path>>(path:P)->Result<LoadFormat,LoadError>{
ReadFormat::SNFM(map)=>Ok(LoadFormat::Map(map)),
#[cfg(feature="roblox")]
ReadFormat::Roblox(model)=>{
let mut place=strafesnet_rbx_loader::Place::from(model);
let script_errors=place.run_scripts().unwrap();
for error in script_errors{
println!("Script error: {error}");
}
let (map,errors)=place.to_snf(LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRoblox)?;
if errors.count()!=0{
print!("Errors encountered while loading the map:\n{}",errors);
}
Ok(LoadFormat::Map(map))
let mut place=model.into_place();
place.run_scripts();
Ok(LoadFormat::Map(
place.to_snf(LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRoblox)?
))
},
#[cfg(feature="source")]
ReadFormat::Source(bsp)=>Ok(LoadFormat::Map(

View File

@@ -21,7 +21,7 @@ WorkerDescription{
pub fn new(
mut graphics:graphics::GraphicsState,
mut config:wgpu::SurfaceConfiguration,
surface:wgpu::Surface<'_>,
surface:wgpu::Surface,
device:wgpu::Device,
queue:wgpu::Queue,
)->crate::compat_worker::INWorker<'_,Instruction>{
@@ -34,8 +34,8 @@ pub fn new(
Instruction::Resize(size,user_settings)=>{
println!("Resizing to {:?}",size);
let t0=std::time::Instant::now();
config.width=640;
config.height=480;
config.width=size.width.max(1);
config.height=size.height.max(1);
surface.configure(&device,&config);
graphics.resize(&device,&config,&user_settings);
println!("Resize took {:?}",t0.elapsed());

View File

@@ -77,8 +77,5 @@ pub fn new<'a>(
run_session_instruction!(ins.time,SessionInstruction::LoadReplay(bot));
}
}
//whatever just do it
session.debug_raycast_print_model_id_if_changed(ins.time);
})
}

View File

@@ -52,7 +52,7 @@ impl<'a> SetupContextPartial2<'a>{
let required_features=required_features();
//no helper function smh gotta write it myself
let adapters=pollster::block_on(self.instance.enumerate_adapters(self.backends));
let adapters=self.instance.enumerate_adapters(self.backends);
let mut chosen_adapter=None;
let mut chosen_adapter_score=0;
@@ -117,16 +117,16 @@ impl<'a> SetupContextPartial3<'a>{
// Make sure we use the texture resolution limits from the adapter, so we can support images the size of the surface.
let needed_limits=strafesnet_graphics::graphics::required_limits().using_resolution(self.adapter.limits());
let trace_dir=std::env::var("WGPU_TRACE");
let (device, queue)=pollster::block_on(self.adapter
.request_device(
&wgpu::DeviceDescriptor{
label:None,
required_features:(optional_features&self.adapter.features())|required_features,
required_limits:needed_limits,
&wgpu::DeviceDescriptor {
label: None,
required_features: (optional_features & self.adapter.features()) | required_features,
required_limits: needed_limits,
memory_hints:wgpu::MemoryHints::Performance,
trace:wgpu::Trace::Off,
experimental_features:wgpu::ExperimentalFeatures::disabled(),
},
trace_dir.ok().as_ref().map(std::path::Path::new),
))
.expect("Unable to find a suitable GPU adapter!");

View File

@@ -86,27 +86,6 @@ fn vs_entity_texture(
return result;
}
@group(1)
@binding(0)
var<uniform> model_instance: ModelInstance;
struct DebugEntityOutput {
@builtin(position) position: vec4<f32>,
@location(1) normal: vec3<f32>,
@location(2) view: vec3<f32>,
};
@vertex
fn vs_debug(
@location(0) pos: vec3<f32>,
) -> DebugEntityOutput {
var position: vec4<f32> = model_instance.transform * vec4<f32>(pos, 1.0);
var result: DebugEntityOutput;
result.view = position.xyz - camera.view_inv[3].xyz;//col(3)
result.position = camera.proj * camera.view * position;
return result;
}
//group 2 is the skybox texture
@group(1)
@binding(0)
@@ -131,8 +110,3 @@ fn fs_entity_texture(vertex: EntityOutputTexture) -> @location(0) vec4<f32> {
let reflected_color = textureSample(cube_texture, cube_sampler, reflected).rgb;
return mix(vec4<f32>(vec3<f32>(0.05) + 0.2 * reflected_color,1.0),mix(vertex.model_color,vec4<f32>(fragment_color.rgb,1.0),fragment_color.a),0.5+0.5*abs(d));
}
@fragment
fn fs_debug(vertex: DebugEntityOutput) -> @location(0) vec4<f32> {
return model_instance.color;
}

Some files were not shown because too many files have changed in this diff Show More