Compare commits

..

1 Commits

Author SHA1 Message Date
d0f7799448 debug 2025-05-14 18:16:07 -07:00
77 changed files with 1889 additions and 2849 deletions

1
.gitignore vendored
View File

@@ -1,2 +1 @@
/target
.zed

1943
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,13 +25,6 @@ resolver = "2"
strip = true
codegen-units = 1
[workspace.lints.rust]
# unsafe_code = "forbid"
# missing_docs = "warn"
# missing_debug_implementations = "warn"
single_use_lifetimes = "warn"
trivial_casts = "warn"
unused_lifetimes = "warn"
unused_qualifications = "warn"
# variant_size_differences = "warn"
unexpected_cfgs = "warn"
[profile.dev]
strip = false
opt-level = 3

View File

@@ -11,7 +11,4 @@ id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
strafesnet_session = { path = "../session", registry = "strafesnet" }
strafesnet_settings = { path = "../settings", registry = "strafesnet" }
wgpu = "27.0.0"
[lints]
workspace = true
wgpu = "25.0.0"

View File

@@ -94,7 +94,7 @@ impl GraphicsCamera{
raw
}
}
impl Default for GraphicsCamera{
impl std::default::Default for GraphicsCamera{
fn default()->Self{
Self{
screen_size:glam::UVec2::ONE,
@@ -167,7 +167,7 @@ impl GraphicsState{
}
pub fn generate_models(&mut self,device:&wgpu::Device,queue:&wgpu::Queue,map:&map::CompleteMap){
//generate texture view per texture
let texture_views:HashMap<model::TextureId,wgpu::TextureView>=map.textures.iter().enumerate().filter_map(|(texture_id,texture_data)|{
let texture_views:HashMap<strafesnet_common::model::TextureId,wgpu::TextureView>=map.textures.iter().enumerate().filter_map(|(texture_id,texture_data)|{
let texture_id=model::TextureId::new(texture_id as u32);
let image=match ddsfile::Dds::read(std::io::Cursor::new(texture_data)){
Ok(image)=>image,
@@ -803,7 +803,7 @@ impl GraphicsState{
module:&shader,
entry_point:Some("vs_entity_texture"),
buffers:&[wgpu::VertexBufferLayout{
array_stride:size_of::<GraphicsVertex>() as wgpu::BufferAddress,
array_stride:std::mem::size_of::<GraphicsVertex>() as wgpu::BufferAddress,
step_mode:wgpu::VertexStepMode::Vertex,
attributes:&wgpu::vertex_attr_array![0=>Float32x3,1=>Float32x2,2=>Float32x3,3=>Float32x4],
}],
@@ -953,7 +953,6 @@ impl GraphicsState{
}),
store:wgpu::StoreOp::Store,
},
depth_slice:None,
})],
depth_stencil_attachment:Some(wgpu::RenderPassDepthStencilAttachment{
view:&self.depth_view,

View File

@@ -8,6 +8,3 @@ arrayvec = "0.7.6"
glam = "0.30.0"
id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -18,17 +18,6 @@ impl<T> std::ops::Neg for Body<T>{
}
}
}
impl<T:Copy> std::ops::Neg for &Body<T>{
type Output=Body<T>;
fn neg(self)->Self::Output{
Body{
position:self.position,
velocity:-self.velocity,
acceleration:self.acceleration,
time:-self.time,
}
}
}
impl<T> Body<T>
where Time<T>:Copy,

View File

@@ -4,6 +4,7 @@ use crate::physics::{Time,Body};
use core::ops::Bound;
#[derive(Debug)]
enum Transition<M:MeshQuery>{
Miss,
Next(FEV<M>,GigaTime),
@@ -76,6 +77,8 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
M::Face:Copy,
M::Edge:Copy,
M::Vert:Copy,
M:std::fmt::Debug,
F:std::fmt::Display,
F:core::ops::Mul<Fixed<1,32>,Output=Fixed<4,128>>,
<F as core::ops::Mul<Fixed<1,32>>>::Output:core::iter::Sum,
M::Offset:core::ops::Sub<<F as std::ops::Mul<Fixed<1,32>>>::Output>,
@@ -90,10 +93,15 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//n=face.normal d=face.dot
//n.a t^2+n.v t+n.p-d==0
let (n,d)=mesh.face_nd(face_id);
println!("Face n={} d={}",n,d);
//TODO: use higher precision d value?
//use the mesh transform translation instead of baking it into the d value.
for dt in Fixed::<4,128>::zeroes2((n.dot(body.position)-d)*2,n.dot(body.velocity)*2,n.dot(body.acceleration)){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
let low=low(&lower_bound,&dt);
let upp=upp(&dt,&upper_bound);
let into=n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative();
println!("dt={} low={low} upp={upp} into={into}",dt.divide());
if low&&upp&&into{
upper_bound=Bound::Included(dt);
best_transition=Transition::Hit(face_id,dt);
break;
@@ -119,19 +127,24 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
},
&FEV::Edge(edge_id)=>{
//test each face collision time, ignoring roots with zero or conflicting derivative
let edge_n=mesh.edge_n(edge_id);
let edge_verts=mesh.edge_verts(edge_id);
let &[ev0,ev1]=edge_verts.as_ref();
let (v0,v1)=(mesh.vert(ev0),mesh.vert(ev1));
let edge_n=v1-v0;
let delta_pos=body.position*2-(v0+v1);
let delta_pos=body.position*2-(mesh.vert(ev0)+mesh.vert(ev1));
for (i,&edge_face_id) in mesh.edge_faces(edge_id).as_ref().iter().enumerate(){
let face_n=mesh.face_nd(edge_face_id).0;
//edge_n gets parity from the order of edge_faces
let n=face_n.cross(edge_n)*((i as i64)*2-1);
let d=n.dot(delta_pos).wrap_4();
println!("Edge Face={:?} boundary_n={} boundary_d={}",edge_face_id,n,d>>1);
//WARNING yada yada d *2
//wrap for speed
for dt in Fixed::<4,128>::zeroes2(n.dot(delta_pos).wrap_4(),n.dot(body.velocity).wrap_4()*2,n.dot(body.acceleration).wrap_4()){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
for dt in Fixed::<4,128>::zeroes2(d,n.dot(body.velocity).wrap_4()*2,n.dot(body.acceleration).wrap_4()){
let low=low(&lower_bound,&dt);
let upp=upp(&dt,&upper_bound);
let into=n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative();
println!("dt={} low={low} upp={upp} into={into}",dt.divide());
if low&&upp&&into{
upper_bound=Bound::Included(dt);
best_transition=Transition::Next(FEV::Face(edge_face_id),dt);
break;
@@ -175,8 +188,11 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
pub fn crawl(mut self,mesh:&M,relative_body:&Body,lower_bound:Bound<&Time>,upper_bound:Bound<&Time>)->CrawlResult<M>{
let mut lower_bound=lower_bound.map(|&t|into_giga_time(t,relative_body.time));
let upper_bound=upper_bound.map(|&t|into_giga_time(t,relative_body.time));
println!("crawl begin={self:?}");
for _ in 0..20{
match self.next_transition(mesh,relative_body,lower_bound,upper_bound){
let transition=self.next_transition(mesh,relative_body,lower_bound,upper_bound);
println!("transition={transition:?}");
match transition{
Transition::Miss=>return CrawlResult::Miss(self),
Transition::Next(next_fev,next_time)=>(self,lower_bound)=(next_fev,Bound::Included(next_time)),
Transition::Hit(face,time)=>return CrawlResult::Hit(face,time),

View File

@@ -76,9 +76,9 @@ struct Face{
#[derive(Debug)]
struct Vert(Planar64Vec3);
pub trait MeshQuery{
type Face:Copy;
type Edge:Copy+DirectedEdge;
type Vert:Copy;
type Face:Copy+std::fmt::Debug;
type Edge:Copy+DirectedEdge+std::fmt::Debug;
type Vert:Copy+std::fmt::Debug;
// Vertex must be Planar64Vec3 because it represents an actual position
type Normal;
type Offset;
@@ -90,16 +90,6 @@ pub trait MeshQuery{
let &[v0,v1]=self.edge_verts(directed_edge_id.as_undirected()).as_ref();
(self.vert(v1)-self.vert(v0))*((directed_edge_id.parity() as i64)*2-1)
}
/// Returns an iterator over the vertices in the direction of the directed edges.
/// Intended to be used to find adjacent vertices:
/// `self.directed_verts(self.vert_edges(vert_id).as_ref())`
/// TODO: rewrite this function as `adjacent_vertices`
fn directed_verts(&self,edges:&[Self::Edge])->impl Iterator<Item=Self::Vert>{
edges.iter().map(|e|{
let edge_verts=self.edge_verts(e.as_undirected());
edge_verts.as_ref()[e.parity() as usize]
})
}
fn vert(&self,vert_id:Self::Vert)->Planar64Vec3;
fn face_nd(&self,face_id:Self::Face)->(Self::Normal,Self::Offset);
fn face_edges(&self,face_id:Self::Face)->impl AsRef<[Self::Edge]>;
@@ -245,7 +235,7 @@ impl PhysicsMesh{
}
}
#[inline]
pub fn complete_mesh_view(&self)->PhysicsMeshView<'_>{
pub fn complete_mesh_view(&self)->PhysicsMeshView{
PhysicsMeshView{
data:&self.data,
topology:self.complete_mesh(),
@@ -256,13 +246,13 @@ impl PhysicsMesh{
&self.submeshes
}
#[inline]
pub fn submesh_view(&self,submesh_id:PhysicsSubmeshId)->PhysicsMeshView<'_>{
pub fn submesh_view(&self,submesh_id:PhysicsSubmeshId)->PhysicsMeshView{
PhysicsMeshView{
data:&self.data,
topology:&self.submeshes()[submesh_id.get() as usize],
}
}
pub fn submesh_views(&self)->impl Iterator<Item=PhysicsMeshView<'_>>{
pub fn submesh_views(&self)->impl Iterator<Item=PhysicsMeshView>{
self.submeshes().iter().map(|topology|PhysicsMeshView{
data:&self.data,
topology,
@@ -462,8 +452,6 @@ impl MeshQuery for PhysicsMeshView<'_>{
let vert_idx=self.topology.verts[vert_id.get() as usize].get() as usize;
self.data.verts[vert_idx].0
}
/// Directed edges going clockwise when looking in the direction of the face normal.
/// (Edit this documentation if this is wrong!)
fn face_edges(&self,face_id:SubmeshFaceId)->impl AsRef<[SubmeshDirectedEdgeId]>{
self.topology.face_topology[face_id.get() as usize].edges.as_slice()
}
@@ -512,9 +500,12 @@ impl TransformedMesh<'_>{
transform,
}
}
pub fn verts<'a>(&'a self)->impl Iterator<Item=Vector3<Fixed<2,64>>>+'a{
pub fn verts<'a>(&'a self)->impl Iterator<Item=vec3::Vector3<Fixed<2,64>>>+'a{
self.view.data.verts.iter().map(|&Vert(pos)|self.transform.vertex.transform_point3(pos))
}
pub fn faces(&self)->impl Iterator<Item=SubmeshFaceId>{
(0..self.view.topology.faces.len() as u32).map(SubmeshFaceId::new)
}
fn farthest_vert(&self,dir:Planar64Vec3)->SubmeshVertId{
//this happens to be well-defined. there are no virtual virtices
SubmeshVertId::new(
@@ -626,6 +617,17 @@ pub struct MinkowskiMesh<'a>{
mesh1:TransformedMesh<'a>,
}
//infinity fev algorithm state transition
#[derive(Debug)]
enum Transition{
Done,//found closest vert, no edges are better
Vert(MinkowskiVert),//transition to vert
}
enum EV{
Vert(MinkowskiVert),
Edge(MinkowskiEdge),
}
pub type GigaTime=Ratio<Fixed<4,128>,Fixed<4,128>>;
pub fn into_giga_time(time:Time,relative_to:Time)->GigaTime{
let r=(time-relative_to).to_ratio();
@@ -642,195 +644,124 @@ impl MinkowskiMesh<'_>{
fn farthest_vert(&self,dir:Planar64Vec3)->MinkowskiVert{
MinkowskiVert::VertVert(self.mesh0.farthest_vert(dir),self.mesh1.farthest_vert(-dir))
}
fn closest_fev_not_inside(&self,relative_position:Planar64Vec3)->Option<FEV<MinkowskiMesh<'_>>>{
// Make a fast guess as to what the closest point will be.
let MinkowskiVert::VertVert(mut v0,mut v1)=self.farthest_vert(relative_position);
// TODO: alternate naive vertex searches to improve the robustness
// in the "tall bipyramid" failure case
let mut m0v=self.mesh0.vert(v0);
let mut m1v=self.mesh1.vert(v1);
let mut best_distance_squared={
let diff=relative_position+m1v-m0v;
diff.dot(diff)
};
let mut v0e=self.mesh0.vert_edges(v0);
let mut v1e=self.mesh1.vert_edges(v1);
let mut v0e_ref=v0e.as_ref();
let mut v1e_ref=v1e.as_ref();
// repeatedly check adjacent vertex permutations to see if they are closer
loop{
let mut best_v0=None;
let mut best_v1=None;
// check vertices adjacent to v1 against v0
for m1_test_vert in self.mesh1.directed_verts(v1e_ref){
let m1v_test=self.mesh1.vert(m1_test_vert);
let diff=relative_position+m1v_test-m0v;
let d=diff.dot(diff);
if d<best_distance_squared{
best_distance_squared=d;
best_v0=None;
best_v1=Some(m1_test_vert);
fn next_transition_vert(&self,vert_id:MinkowskiVert,best_distance_squared:&mut Fixed<2,64>,infinity_dir:Planar64Vec3,point:Planar64Vec3)->Transition{
let mut best_transition=Transition::Done;
for &directed_edge_id in self.vert_edges(vert_id).as_ref(){
let edge_n=self.directed_edge_n(directed_edge_id);
//is boundary uncrossable by a crawl from infinity
let edge_verts=self.edge_verts(directed_edge_id.as_undirected());
//select opposite vertex
let test_vert_id=edge_verts.as_ref()[directed_edge_id.parity() as usize];
//test if it's closer
let diff=point-self.vert(test_vert_id);
if edge_n.dot(infinity_dir).is_zero(){
let distance_squared=diff.dot(diff);
if distance_squared<*best_distance_squared{
best_transition=Transition::Vert(test_vert_id);
*best_distance_squared=distance_squared;
}
}
// check vertices adjacent to v0 against v1
for m0_test_vert in self.mesh0.directed_verts(v0e_ref){
let m0v_test=self.mesh0.vert(m0_test_vert);
let diff=relative_position+m1v-m0v_test;
let d=diff.dot(diff);
if d<best_distance_squared{
best_distance_squared=d;
best_v0=Some(m0_test_vert);
best_v1=None;
}
}
// check permutations of adjacent vertices
for m0_test_vert in self.mesh0.directed_verts(v0e_ref){
let m0v_test=self.mesh0.vert(m0_test_vert);
for m1_test_vert in self.mesh1.directed_verts(v1e_ref){
let m1v_test=self.mesh1.vert(m1_test_vert);
let diff=relative_position+m1v_test-m0v_test;
let d=diff.dot(diff);
if d<best_distance_squared{
best_distance_squared=d;
best_v0=Some(m0_test_vert);
best_v1=Some(m1_test_vert);
}
best_transition
}
fn final_ev(&self,vert_id:MinkowskiVert,best_distance_squared:&mut Fixed<2,64>,infinity_dir:Planar64Vec3,point:Planar64Vec3)->EV{
let mut best_transition=EV::Vert(vert_id);
let diff=point-self.vert(vert_id);
for &directed_edge_id in self.vert_edges(vert_id).as_ref(){
let edge_n=self.directed_edge_n(directed_edge_id);
//is boundary uncrossable by a crawl from infinity
//check if time of collision is outside Time::MIN..Time::MAX
if edge_n.dot(infinity_dir).is_zero(){
let d=edge_n.dot(diff);
//test the edge
let edge_nn=edge_n.dot(edge_n);
if !d.is_negative()&&d<=edge_nn{
let distance_squared={
let c=diff.cross(edge_n);
//wrap for speed
(c.dot(c)/edge_nn).divide().wrap_2()
};
if distance_squared<=*best_distance_squared{
best_transition=EV::Edge(directed_edge_id.as_undirected());
*best_distance_squared=distance_squared;
}
}
}
// end condition
let v0_changed=match best_v0{
Some(new_v0)=>{
v0=new_v0;
m0v=self.mesh0.vert(v0);
v0e=self.mesh0.vert_edges(v0);
v0e_ref=v0e.as_ref();
true
},
None=>false,
};
let v1_changed=match best_v1{
Some(new_v1)=>{
v1=new_v1;
m1v=self.mesh1.vert(v1);
v1e=self.mesh1.vert_edges(v1);
v1e_ref=v1e.as_ref();
true
},
None=>false,
};
// if neither vertex changes, we found the closest two vertices.
if !(v0_changed&&v1_changed){
break;
}
best_transition
}
fn crawl_boundaries(&self,mut vert_id:MinkowskiVert,infinity_dir:Planar64Vec3,point:Planar64Vec3)->EV{
let mut best_distance_squared={
let diff=point-self.vert(vert_id);
diff.dot(diff)
};
loop{
match self.next_transition_vert(vert_id,&mut best_distance_squared,infinity_dir,point){
Transition::Done=>return self.final_ev(vert_id,&mut best_distance_squared,infinity_dir,point),
Transition::Vert(new_vert_id)=>vert_id=new_vert_id,
}
}
// now you have the two closest vertices
let mut best_fev=FEV::Vert(MinkowskiVert::VertVert(v0,v1));
// ==== FEV::Edge ====
// test VertEdges
for &e1 in v1e_ref{
let edge_verts=self.mesh1.edge_verts(e1.as_undirected());
let &[ev0_id,ev1_id]=edge_verts.as_ref();
let (ev0,ev1)=(self.mesh1.vert(ev0_id),self.mesh1.vert(ev1_id));
let edge_n=ev1-ev0;
// use relative coordinates to make including relative_position easier
let diff=relative_position+m0v-ev0;
let d=diff.dot(edge_n);
// is test point between edge vertices
let edge_nn=edge_n.dot(edge_n);
if d.is_positive()&&d<edge_nn{
let distance_squared={
let c=diff.cross(edge_n);
//wrap for speed
(c.dot(c)/edge_nn).divide().wrap_2()
}
/// This function drops a vertex down to an edge or a face if the path from infinity did not cross any vertex-edge boundaries but the point is supposed to have already crossed a boundary down from a vertex
fn infinity_fev(&self,infinity_dir:Planar64Vec3,point:Planar64Vec3)->FEV::<MinkowskiMesh>{
//start on any vertex
//cross uncrossable vertex-edge boundaries until you find the closest vertex or edge
//cross edge-face boundary if it's uncrossable
match self.crawl_boundaries(self.farthest_vert(infinity_dir),infinity_dir,point){
//if a vert is returned, it is the closest point to the infinity point
EV::Vert(vert_id)=>FEV::Vert(vert_id),
EV::Edge(edge_id)=>{
//cross to face if the boundary is not crossable and we are on the wrong side
let edge_n=self.edge_n(edge_id);
// point is multiplied by two because vert_sum sums two vertices.
let delta_pos=point*2-{
let &[v0,v1]=self.edge_verts(edge_id).as_ref();
self.vert(v0)+self.vert(v1)
};
if distance_squared<best_distance_squared{
best_distance_squared=distance_squared;
best_fev=FEV::Edge(MinkowskiEdge::VertEdge(v0,e1.as_undirected()))
for (i,&face_id) in self.edge_faces(edge_id).as_ref().iter().enumerate(){
let face_n=self.face_nd(face_id).0;
//edge-face boundary nd, n facing out of the face towards the edge
let boundary_n=face_n.cross(edge_n)*(i as i64*2-1);
let boundary_d=boundary_n.dot(delta_pos);
//check if time of collision is outside Time::MIN..Time::MAX
//infinity_dir can always be treated as a velocity
if !boundary_d.is_positive()&&boundary_n.dot(infinity_dir).is_zero(){
//both faces cannot pass this condition, return early if one does.
return FEV::Face(face_id);
}
}
}
FEV::Edge(edge_id)
},
}
// test EdgeVerts
for &e0 in v0e_ref{
let edge_verts=self.mesh0.edge_verts(e0.as_undirected());
let &[ev0_id,ev1_id]=edge_verts.as_ref();
let (ev0,ev1)=(self.mesh0.vert(ev0_id),self.mesh0.vert(ev1_id));
let edge_n=ev1-ev0;
// use relative coordinates to make including relative_position easier
let diff=m1v-relative_position-ev0;
let d=diff.dot(edge_n);
// is test point between edge vertices
let edge_nn=edge_n.dot(edge_n);
if d.is_positive()&&d<edge_nn{
let distance_squared={
let c=diff.cross(edge_n);
//wrap for speed
(c.dot(c)/edge_nn).divide().wrap_2()
};
if distance_squared<best_distance_squared{
best_distance_squared=distance_squared;
best_fev=FEV::Edge(MinkowskiEdge::EdgeVert(e0.as_undirected(),v1))
}
}
}
// ==== FEV::Face ====
// test VertFaces
'outer: for &f1 in self.mesh1.vert_faces(v1).as_ref(){
let (n,d)=self.mesh1.face_nd(f1);
// Test the face's voronoi column
for &e1 in self.mesh1.face_edges(f1).as_ref(){
let edge_n=self.mesh1.directed_edge_n(e1);
let boundary_n=n.cross(edge_n);
let &[ev0_id,ev1_id]=self.mesh1.edge_verts(e1.as_undirected()).as_ref();
let (ev0,ev1)=(self.mesh1.vert(ev0_id),self.mesh1.vert(ev1_id));
let diff=(relative_position+m0v)*2-(ev0+ev1);
if boundary_n.dot(diff).is_negative(){
// The test point is outside the face's voronoi column.
continue 'outer;
}
}
// Calculate distance
let d=n.dot(relative_position+m0v)-d;
// Wrap for speed
let distance_squared=(d*d).wrap_2();
if distance_squared<best_distance_squared{
best_distance_squared=distance_squared;
best_fev=FEV::Face(MinkowskiFace::VertFace(v0,f1));
}
}
// test FaceVerts
'outer: for &f0 in self.mesh0.vert_faces(v0).as_ref(){
let (n,d)=self.mesh0.face_nd(f0);
// Test the face's voronoi column
for &e0 in self.mesh0.face_edges(f0).as_ref(){
let edge_n=self.mesh0.directed_edge_n(e0);
let boundary_n=n.cross(edge_n);
let &[ev0_id,ev1_id]=self.mesh0.edge_verts(e0.as_undirected()).as_ref();
let (ev0,ev1)=(self.mesh0.vert(ev0_id),self.mesh0.vert(ev1_id));
let diff=(m1v-relative_position)*2-(ev0+ev1);
if boundary_n.dot(diff).is_negative(){
// The test point is outside the face's voronoi column.
continue 'outer;
}
}
// Calculate distance
let d=n.dot(relative_position+m0v)-d;
// Wrap for speed
let distance_squared=(d*d).wrap_2();
if distance_squared<best_distance_squared{
best_distance_squared=distance_squared;
best_fev=FEV::Face(MinkowskiFace::FaceVert(f0,v1));
}
}
// test EdgeEdges
Some(best_fev)
}
// TODO: fundamentally improve this algorithm.
// All it needs to do is find the closest point on the mesh
// and return the FEV which the point resides on.
//
// What it actually does is use the above functions to trace a ray in from infinity,
// crawling the closest point along the mesh surface until the ray reaches
// the starting point to discover the final FEV.
//
// The actual collision prediction probably does a single test
// and then immediately returns with 0 FEV transitions on average,
// because of the strict time_limit constraint.
//
// Most of the calculation time is just calculating the starting point
// for the "actual" crawling algorithm below (predict_collision_{in|out}).
fn closest_fev_not_inside(&self,mut infinity_body:Body,start_time:Bound<&Time>)->Option<FEV<MinkowskiMesh>>{
infinity_body.infinity_dir().and_then(|dir|{
let infinity_fev=self.infinity_fev(-dir,infinity_body.position);
//a line is simpler to solve than a parabola
infinity_body.velocity=dir;
infinity_body.acceleration=vec3::ZERO;
//crawl in from negative infinity along a tangent line to get the closest fev
infinity_fev.crawl(self,&infinity_body,Bound::Unbounded,start_time).miss()
})
}
pub fn predict_collision_in(&self,relative_body:&Body,range:impl RangeBounds<Time>)->Option<(MinkowskiFace,GigaTime)>{
self.closest_fev_not_inside(relative_body.position).and_then(|fev|{
println!("@@@BEGIN SETUP@@@");
self.closest_fev_not_inside(relative_body.clone(),range.start_bound()).and_then(|fev|{
println!("@@@BEGIN REAL CRAWL@@@");
//continue forwards along the body parabola
fev.crawl(self,relative_body,range.start_bound(),range.end_bound()).hit()
})
@@ -839,9 +770,10 @@ impl MinkowskiMesh<'_>{
let (lower_bound,upper_bound)=(range.start_bound(),range.end_bound());
// swap and negate bounds to do a time inversion
let (lower_bound,upper_bound)=(upper_bound.map(|&t|-t),lower_bound.map(|&t|-t));
self.closest_fev_not_inside(relative_body.position).and_then(|fev|{
let infinity_body=-relative_body.clone();
self.closest_fev_not_inside(infinity_body,lower_bound.as_ref()).and_then(|fev|{
//continue backwards along the body parabola
fev.crawl(self,&-relative_body,lower_bound.as_ref(),upper_bound.as_ref()).hit()
fev.crawl(self,&infinity_body,lower_bound.as_ref(),upper_bound.as_ref()).hit()
//no need to test -time<time_limit because of the first step
.map(|(face,time)|(face,-time))
})
@@ -851,7 +783,10 @@ impl MinkowskiMesh<'_>{
use crate::face_crawler::{low,upp};
//no algorithm needed, there is only one state and two cases (Edge,None)
//determine when it passes an edge ("sliding off" case)
let start_time=range.start_bound().map(|&t|(t-relative_body.time).to_ratio());
let start_time=range.start_bound().map(|&t|{
let r=(t-relative_body.time).to_ratio();
Ratio::new(r.num,r.den)
});
let mut best_time=range.end_bound().map(|&t|into_giga_time(t,relative_body.time));
let mut best_edge=None;
let face_n=self.face_nd(contact_face_id).0;
@@ -874,10 +809,20 @@ impl MinkowskiMesh<'_>{
}
best_edge
}
fn infinity_in(&self,infinity_body:Body)->Option<(MinkowskiFace,GigaTime)>{
let infinity_fev=self.infinity_fev(-infinity_body.velocity,infinity_body.position);
// Bound::Included means that the surface of the mesh is included in the mesh
infinity_fev.crawl(self,&infinity_body,Bound::Unbounded,Bound::Included(&infinity_body.time)).hit()
}
pub fn is_point_in_mesh(&self,point:Planar64Vec3)->bool{
// TODO
println!("Unimplemented is_point_in_mesh called! {point}");
false
let infinity_body=Body::new(point,vec3::Y,vec3::ZERO,Time::ZERO);
//movement must escape the mesh forwards and backwards in time,
//otherwise the point is not inside the mesh
self.infinity_in(infinity_body)
.is_some_and(|_|
self.infinity_in(-infinity_body)
.is_some()
)
}
}
impl MeshQuery for MinkowskiMesh<'_>{
@@ -1064,8 +1009,7 @@ impl MeshQuery for MinkowskiMesh<'_>{
}
fn vert_faces(&self,_vert_id:MinkowskiVert)->impl AsRef<[MinkowskiFace]>{
unimplemented!();
#[expect(unreachable_code)]
Vec::new()
vec![]
}
}

View File

@@ -23,7 +23,7 @@ use strafesnet_common::physics::{Instruction,MouseInstruction,ModeInstruction,Mi
//internal influence
//when the physics asks itself what happens next, this is how it's represented
#[derive(Debug,Clone)]
#[derive(Debug)]
pub enum InternalInstruction{
CollisionStart(Collision,model_physics::GigaTime),
CollisionEnd(Collision,model_physics::GigaTime),
@@ -36,7 +36,7 @@ pub enum InternalInstruction{
pub struct InputState{
mouse:MouseState,
next_mouse:MouseState,
controls:Controls,
controls:strafesnet_common::controls_bitflag::Controls,
}
impl InputState{
fn set_next_mouse(&mut self,next_mouse:MouseState){
@@ -100,7 +100,6 @@ enum TransientAcceleration{
time:Time,
},
//walk target will never be reached
#[expect(dead_code)]
Unreachable{
acceleration:Planar64Vec3,
}
@@ -185,7 +184,6 @@ struct PhysicsModels{
intersect_attributes:HashMap<IntersectAttributesId,gameplay_attributes::IntersectAttributes>,
}
impl PhysicsModels{
#[expect(dead_code)]
fn clear(&mut self){
self.meshes.clear();
self.contact_models.clear();
@@ -193,7 +191,7 @@ impl PhysicsModels{
self.contact_attributes.clear();
self.intersect_attributes.clear();
}
fn mesh(&self,convex_mesh_id:ConvexMeshId)->TransformedMesh<'_>{
fn mesh(&self,convex_mesh_id:ConvexMeshId)->TransformedMesh{
let (mesh_id,transform)=match convex_mesh_id.model_id{
PhysicsModelId::Contact(model_id)=>{
let model=&self.contact_models[&model_id];
@@ -210,14 +208,14 @@ impl PhysicsModels{
)
}
//it's a bit weird to have three functions, but it's always going to be one of these
fn contact_mesh(&self,contact:&ContactCollision)->TransformedMesh<'_>{
fn contact_mesh(&self,contact:&ContactCollision)->TransformedMesh{
let model=&self.contact_models[&contact.model_id];
TransformedMesh::new(
self.meshes[&model.mesh_id].submesh_view(contact.submesh_id),
&model.transform
)
}
fn intersect_mesh(&self,intersect:&IntersectCollision)->TransformedMesh<'_>{
fn intersect_mesh(&self,intersect:&IntersectCollision)->TransformedMesh{
let model=&self.intersect_models[&intersect.model_id];
TransformedMesh::new(
self.meshes[&model.mesh_id].submesh_view(intersect.submesh_id),
@@ -286,7 +284,6 @@ impl PhysicsCamera{
fn rotation(&self)->Planar64Mat3{
self.get_rotation(self.clamped_mouse_pos)
}
#[expect(dead_code)]
fn simulate_move_rotation(&self,mouse_delta:glam::IVec2)->Planar64Mat3{
self.get_rotation(self.clamped_mouse_pos+mouse_delta)
}
@@ -302,7 +299,7 @@ impl PhysicsCamera{
}
}
impl Default for PhysicsCamera{
impl std::default::Default for PhysicsCamera{
fn default()->Self{
Self{
sensitivity:Ratio64Vec2::ONE*200_000,
@@ -354,7 +351,6 @@ mod gameplay{
pub fn unordered_checkpoint_count(&self)->u32{
self.unordered_checkpoints.len() as u32
}
#[expect(dead_code)]
pub fn set_mode_id(&mut self,mode_id:gameplay_modes::ModeId){
self.clear();
self.mode_id=mode_id;
@@ -385,7 +381,7 @@ mod gameplay{
self.unordered_checkpoints.clear();
}
}
impl Default for ModeState{
impl std::default::Default for ModeState{
fn default()->Self{
Self{
mode_id:gameplay_modes::ModeId::MAIN,
@@ -421,7 +417,7 @@ impl HitboxMesh{
}
}
#[inline]
fn transformed_mesh(&self)->TransformedMesh<'_>{
fn transformed_mesh(&self)->TransformedMesh{
TransformedMesh::new(self.mesh.complete_mesh_view(),&self.transform)
}
}
@@ -489,7 +485,6 @@ enum MoveState{
Air,
Walk(ContactMoveState),
Ladder(ContactMoveState),
#[expect(dead_code)]
Water,
Fly,
}
@@ -868,15 +863,6 @@ impl Default for PhysicsState{
}
impl PhysicsState{
pub fn new_with_body(body:Body)->Self{
Self{
body,
..Self::default()
}
}
pub const fn body(&self)->&Body{
&self.body
}
pub fn camera_body(&self)->Body{
Body{
position:self.body.position+self.style.camera_offset,
@@ -889,9 +875,6 @@ impl PhysicsState{
pub const fn mode(&self)->gameplay_modes::ModeId{
self.mode_state.get_mode_id()
}
pub const fn style_mut(&mut self)->&mut StyleModifiers{
&mut self.style
}
pub fn get_finish_time(&self)->Option<run::Time>{
self.run.get_finish_time()
}
@@ -955,8 +938,8 @@ pub struct PhysicsData{
//cached calculations
hitbox_mesh:HitboxMesh,
}
impl PhysicsData{
pub fn empty()->Self{
impl Default for PhysicsData{
fn default()->Self{
Self{
bvh:bvh::BvhNode::empty(),
models:Default::default(),
@@ -964,7 +947,47 @@ impl PhysicsData{
hitbox_mesh:StyleModifiers::default().calculate_mesh(),
}
}
pub fn new(map:&map::CompleteMap)->Self{
}
// the collection of information required to run physics
pub struct PhysicsContext<'a>{
state:&'a mut PhysicsState,//this captures the entire state of the physics.
data:&'a PhysicsData,//data currently loaded into memory which is needded for physics to run, but is not part of the state.
}
// the physics consumes both Instruction and PhysicsInternalInstruction,
// but can only emit PhysicsInternalInstruction
impl InstructionConsumer<InternalInstruction> for PhysicsContext<'_>{
type Time=Time;
fn process_instruction(&mut self,ins:TimedInstruction<InternalInstruction,Time>){
atomic_internal_instruction(&mut self.state,&self.data,ins)
}
}
impl InstructionConsumer<Instruction> for PhysicsContext<'_>{
type Time=Time;
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,Time>){
atomic_input_instruction(&mut self.state,&self.data,ins)
}
}
impl InstructionEmitter<InternalInstruction> for PhysicsContext<'_>{
type Time=Time;
//this little next instruction function could cache its return value and invalidate the cached value by watching the State.
fn next_instruction(&self,time_limit:Time)->Option<TimedInstruction<InternalInstruction,Time>>{
next_instruction_internal(&self.state,&self.data,time_limit)
}
}
impl PhysicsContext<'_>{
pub fn run_input_instruction(
state:&mut PhysicsState,
data:&PhysicsData,
instruction:TimedInstruction<Instruction,Time>
){
let mut context=PhysicsContext{state,data};
context.process_exhaustive(instruction.time);
context.process_instruction(instruction);
}
}
impl PhysicsData{
/// use with caution, this is the only non-instruction way to mess with physics
pub fn generate_models(&mut self,map:&map::CompleteMap){
let modes=map.modes.clone().denormalize();
let mut used_contact_attributes=Vec::new();
let mut used_intersect_attributes=Vec::new();
@@ -1091,57 +1114,11 @@ impl PhysicsData{
(IntersectAttributesId::new(attr_id as u32),attr)
).collect(),
};
self.bvh=bvh;
self.models=models;
self.modes=modes;
//hitbox_mesh is unchanged
println!("Physics Objects: {}",model_count);
Self{
hitbox_mesh:StyleModifiers::default().calculate_mesh(),
bvh,
models,
modes,
}
}
}
// the collection of information required to run physics
pub struct PhysicsContext<'a>{
state:&'a mut PhysicsState,//this captures the entire state of the physics.
data:&'a PhysicsData,//data currently loaded into memory which is needded for physics to run, but is not part of the state.
}
// the physics consumes both Instruction and PhysicsInternalInstruction,
// but can only emit PhysicsInternalInstruction
impl InstructionConsumer<InternalInstruction> for PhysicsContext<'_>{
type Time=Time;
fn process_instruction(&mut self,ins:TimedInstruction<InternalInstruction,Time>){
atomic_internal_instruction(&mut self.state,&self.data,ins)
}
}
impl InstructionConsumer<Instruction> for PhysicsContext<'_>{
type Time=Time;
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,Time>){
atomic_input_instruction(&mut self.state,&self.data,ins)
}
}
impl InstructionEmitter<InternalInstruction> for PhysicsContext<'_>{
type Time=Time;
//this little next instruction function could cache its return value and invalidate the cached value by watching the State.
fn next_instruction(&self,time_limit:Time)->Option<TimedInstruction<InternalInstruction,Time>>{
next_instruction_internal(&self.state,&self.data,time_limit)
}
}
impl<'a> PhysicsContext<'a>{
pub fn run_input_instruction(
state:&mut PhysicsState,
data:&PhysicsData,
instruction:TimedInstruction<Instruction,Time>
){
let mut context=PhysicsContext{state,data};
context.process_exhaustive(instruction.time);
context.process_instruction(instruction);
}
pub fn iter_internal(
state:&'a mut PhysicsState,
data:&'a PhysicsData,
time_limit:Time,
)->instruction::InstructionIter<InternalInstruction,Time,Self>{
PhysicsContext{state,data}.into_iter(time_limit)
}
}
@@ -1287,7 +1264,6 @@ fn set_velocity_cull(body:&mut Body,touching:&mut TouchingState,models:&PhysicsM
fn set_velocity(body:&mut Body,touching:&TouchingState,models:&PhysicsModels,hitbox_mesh:&HitboxMesh,v:Planar64Vec3){
body.velocity=touching.constrain_velocity(models,hitbox_mesh,v);
}
#[expect(dead_code)]
fn set_acceleration_cull(body:&mut Body,touching:&mut TouchingState,models:&PhysicsModels,hitbox_mesh:&HitboxMesh,a:Planar64Vec3)->bool{
//This is not correct but is better than what I have
let mut culled=false;
@@ -1928,7 +1904,7 @@ mod test{
fn test_collision_rotated(relative_body:Body,expected_collision_time:Option<Time>){
let h0=HitboxMesh::new(PhysicsMesh::unit_cube(),
integer::Planar64Affine3::new(
Planar64Mat3::from_cols([
integer::Planar64Mat3::from_cols([
int3(5,0,1)>>1,
int3(0,1,0)>>1,
int3(-1,0,5)>>1,
@@ -1944,7 +1920,7 @@ mod test{
assert_eq!(collision.map(|tup|relative_body.time+tup.1.into()),expected_collision_time,"Incorrect time of collision");
}
fn test_collision(relative_body:Body,expected_collision_time:Option<Time>){
test_collision_axis_aligned(relative_body,expected_collision_time);
test_collision_axis_aligned(relative_body.clone(),expected_collision_time);
test_collision_rotated(relative_body,expected_collision_time);
}
#[test]
@@ -2136,115 +2112,4 @@ mod test{
Time::ZERO
),None);
}
// overlap edges by 1 epsilon
#[test]
fn almost_miss_north(){
test_collision_axis_aligned(Body::new(
(int3(0,10,-7)>>1)+vec3::raw_xyz(0,0,1),
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),Some(Time::from_secs(2)))
}
#[test]
fn almost_miss_east(){
test_collision_axis_aligned(Body::new(
(int3(7,10,0)>>1)+vec3::raw_xyz(-1,0,0),
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),Some(Time::from_secs(2)))
}
#[test]
fn almost_miss_south(){
test_collision_axis_aligned(Body::new(
(int3(0,10,7)>>1)+vec3::raw_xyz(0,0,-1),
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),Some(Time::from_secs(2)))
}
#[test]
fn almost_miss_west(){
test_collision_axis_aligned(Body::new(
(int3(-7,10,0)>>1)+vec3::raw_xyz(1,0,0),
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),Some(Time::from_secs(2)))
}
// exactly miss edges
#[test]
fn exact_miss_north(){
test_collision_axis_aligned(Body::new(
int3(0,10,-7)>>1,
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),None)
}
#[test]
fn exact_miss_east(){
test_collision_axis_aligned(Body::new(
int3(7,10,0)>>1,
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),None)
}
#[test]
fn exact_miss_south(){
test_collision_axis_aligned(Body::new(
int3(0,10,7)>>1,
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),None)
}
#[test]
fn exact_miss_west(){
test_collision_axis_aligned(Body::new(
int3(-7,10,0)>>1,
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),None)
}
// miss edges by 1 epsilon
#[test]
fn narrow_miss_north(){
test_collision_axis_aligned(Body::new(
(int3(0,10,-7)>>1)-vec3::raw_xyz(0,0,1),
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),None)
}
#[test]
fn narrow_miss_east(){
test_collision_axis_aligned(Body::new(
(int3(7,10,0)>>1)-vec3::raw_xyz(-1,0,0),
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),None)
}
#[test]
fn narrow_miss_south(){
test_collision_axis_aligned(Body::new(
(int3(0,10,7)>>1)-vec3::raw_xyz(0,0,-1),
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),None)
}
#[test]
fn narrow_miss_west(){
test_collision_axis_aligned(Body::new(
(int3(-7,10,0)>>1)-vec3::raw_xyz(1,0,0),
int3(0,-1,0),
vec3::ZERO,
Time::ZERO
),None)
}
}

View File

@@ -192,7 +192,7 @@ fn get_push_ray_3(point:Planar64Vec3,c0:&Contact,c1:&Contact,c2:&Contact)->Optio
const fn get_best_push_ray_and_conts_0<'a>(point:Planar64Vec3)->(Ray,Conts<'a>){
(get_push_ray_0(point),Conts::new_const())
}
fn get_best_push_ray_and_conts_1(point:Planar64Vec3,c0:&Contact)->Option<(Ray,Conts<'_>)>{
fn get_best_push_ray_and_conts_1(point:Planar64Vec3,c0:&Contact)->Option<(Ray,Conts)>{
get_push_ray_1(point,c0)
.map(|ray|(ray,Conts::from_iter([c0])))
}

View File

@@ -10,6 +10,3 @@ strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
strafesnet_physics = { path = "../physics", registry = "strafesnet" }
strafesnet_settings = { path = "../settings", registry = "strafesnet" }
strafesnet_snf = { path = "../../lib/snf", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -31,7 +31,7 @@ pub enum SessionInputInstruction{
Mouse(glam::IVec2),
SetControl(strafesnet_common::physics::SetControlInstruction),
Mode(ImplicitModeInstruction),
Misc(MiscInstruction),
Misc(strafesnet_common::physics::MiscInstruction),
}
/// Implicit mode instruction are fed separately to session.
/// Session generates the explicit mode instructions interlaced with a SetSensitivity instruction
@@ -152,10 +152,10 @@ enum ViewState{
pub struct Session{
directories:Directories,
user_settings:UserSettings,
mouse_interpolator:MouseInterpolator,
mouse_interpolator:crate::mouse_interpolator::MouseInterpolator,
view_state:ViewState,
//gui:GuiState
geometry_shared:PhysicsData,
geometry_shared:physics::PhysicsData,
simulation:Simulation,
// below fields not included in lite session
recording:Recording,
@@ -172,7 +172,7 @@ impl Session{
user_settings,
directories,
mouse_interpolator:MouseInterpolator::new(),
geometry_shared:PhysicsData::empty(),
geometry_shared:Default::default(),
simulation,
view_state:ViewState::Play,
recording:Default::default(),
@@ -184,7 +184,7 @@ impl Session{
}
fn change_map(&mut self,map:&strafesnet_common::map::CompleteMap){
self.simulation.physics.clear();
self.geometry_shared=PhysicsData::new(map);
self.geometry_shared.generate_models(map);
}
pub fn get_frame_state(&self,time:SessionTime)->Option<FrameState>{
match &self.view_state{

View File

@@ -8,6 +8,3 @@ configparser = "3.0.2"
directories = "6.0.0"
glam = "0.30.0"
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -1 +0,0 @@
/test_files

View File

@@ -4,12 +4,6 @@ version = "0.1.0"
edition = "2024"
[dependencies]
glam = "0.30.0"
strafesnet_common = { path = "../lib/common", registry = "strafesnet" }
strafesnet_physics = { path = "../engine/physics", registry = "strafesnet" }
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet" }
# this is just for the primitive constructor
strafesnet_rbx_loader = { path = "../lib/rbx_loader", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -1,28 +0,0 @@
#[expect(dead_code)]
#[derive(Debug)]
pub enum ReplayError{
IO(std::io::Error),
SNF(strafesnet_snf::Error),
SNFM(strafesnet_snf::map::Error),
SNFB(strafesnet_snf::bot::Error),
}
impl From<std::io::Error> for ReplayError{
fn from(value:std::io::Error)->Self{
Self::IO(value)
}
}
impl From<strafesnet_snf::Error> for ReplayError{
fn from(value:strafesnet_snf::Error)->Self{
Self::SNF(value)
}
}
impl From<strafesnet_snf::map::Error> for ReplayError{
fn from(value:strafesnet_snf::map::Error)->Self{
Self::SNFM(value)
}
}
impl From<strafesnet_snf::bot::Error> for ReplayError{
fn from(value:strafesnet_snf::bot::Error)->Self{
Self::SNFB(value)
}
}

View File

@@ -1,15 +1,7 @@
mod error;
mod util;
#[cfg(test)]
mod tests;
#[cfg(test)]
mod test_scenes;
use std::io::Cursor;
use std::path::Path;
use std::time::Instant;
use error::ReplayError;
use util::read_entire_file;
use strafesnet_physics::physics::{PhysicsData,PhysicsState,PhysicsContext};
fn main(){
@@ -21,6 +13,40 @@ fn main(){
}
}
#[allow(unused)]
#[derive(Debug)]
enum ReplayError{
IO(std::io::Error),
SNF(strafesnet_snf::Error),
SNFM(strafesnet_snf::map::Error),
SNFB(strafesnet_snf::bot::Error),
}
impl From<std::io::Error> for ReplayError{
fn from(value:std::io::Error)->Self{
Self::IO(value)
}
}
impl From<strafesnet_snf::Error> for ReplayError{
fn from(value:strafesnet_snf::Error)->Self{
Self::SNF(value)
}
}
impl From<strafesnet_snf::map::Error> for ReplayError{
fn from(value:strafesnet_snf::map::Error)->Self{
Self::SNFM(value)
}
}
impl From<strafesnet_snf::bot::Error> for ReplayError{
fn from(value:strafesnet_snf::bot::Error)->Self{
Self::SNFB(value)
}
}
fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
let data=std::fs::read(path)?;
Ok(Cursor::new(data))
}
fn run_replay()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("../tools/bhop_maps/5692113331.snfm")?;
@@ -31,8 +57,9 @@ fn run_replay()->Result<(),ReplayError>{
let bot=strafesnet_snf::read_bot(data)?.read_all()?;
// create recording
let mut physics_data=PhysicsData::default();
println!("generating models..");
let physics_data=PhysicsData::new(&map);
physics_data.generate_models(&map);
println!("simulating...");
let mut physics=PhysicsState::default();
for ins in bot.instructions{
@@ -140,8 +167,9 @@ fn test_determinism()->Result<(),ReplayError>{
let data=read_entire_file("../tools/bhop_maps/5692113331.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
let mut physics_data=PhysicsData::default();
println!("generating models..");
let physics_data=PhysicsData::new(&map);
physics_data.generate_models(&map);
let (send,recv)=std::sync::mpsc::channel();

View File

@@ -1,117 +0,0 @@
use strafesnet_physics::physics::{InternalInstruction,PhysicsData,PhysicsState,PhysicsContext};
use strafesnet_common::gameplay_modes::NormalizedModes;
use strafesnet_common::gameplay_attributes::{CollisionAttributes,CollisionAttributesId};
use strafesnet_common::integer::{vec3,mat3,Planar64Affine3,Time};
use strafesnet_common::model::{Mesh,Model,MeshId,ModelId,RenderConfigId};
use strafesnet_common::map::CompleteMap;
use strafesnet_rbx_loader::primitives::{unit_cube,CubeFaceDescription};
struct TestSceneBuilder{
meshes:Vec<Mesh>,
models:Vec<Model>,
}
impl TestSceneBuilder{
fn new()->Self{
Self{
meshes:Vec::new(),
models:Vec::new(),
}
}
fn push_mesh(&mut self,mesh:Mesh)->MeshId{
let mesh_id=self.meshes.len();
self.meshes.push(mesh);
MeshId::new(mesh_id as u32)
}
fn push_mesh_instance(&mut self,mesh:MeshId,transform:Planar64Affine3)->ModelId{
let model=Model{
mesh,
attributes:CollisionAttributesId::new(0),
color:glam::Vec4::ONE,
transform,
};
let model_id=self.models.len();
self.models.push(model);
ModelId::new(model_id as u32)
}
fn build(self)->PhysicsData{
let modes=NormalizedModes::new(Vec::new());
let attributes=vec![CollisionAttributes::contact_default()];
let meshes=self.meshes;
let models=self.models;
let textures=Vec::new();
let render_configs=Vec::new();
PhysicsData::new(&CompleteMap{
modes,
attributes,
meshes,
models,
textures,
render_configs,
})
}
}
fn test_scene()->PhysicsData{
let mut builder=TestSceneBuilder::new();
let cube_face_description=CubeFaceDescription::new(Default::default(),RenderConfigId::new(0));
let mesh=builder.push_mesh(unit_cube(cube_face_description));
// place two 5x5x5 cubes.
builder.push_mesh_instance(mesh,Planar64Affine3::new(
mat3::from_diagonal(vec3::int(5,5,5)>>1),
vec3::int(0,0,0)
));
builder.push_mesh_instance(mesh,Planar64Affine3::new(
mat3::from_diagonal(vec3::int(5,5,5)>>1),
vec3::int(5,-5,0)
));
builder.build()
}
#[test]
fn simultaneous_collision(){
let physics_data=test_scene();
let body=strafesnet_physics::physics::Body::new(
(vec3::int(5+2,0,0)>>1)+vec3::int(1,1,0),
vec3::int(-1,-1,0),
vec3::int(0,0,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
physics.style_mut().gravity=vec3::ZERO;
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(2))
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick));
// the order that they hit does matter, but we aren't currently worrying about that.
// See multi-collision branch
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
assert!(phys_iter.next().is_none());
let body=physics.body();
assert_eq!(body.position,vec3::int(5,0,0));
assert_eq!(body.velocity,vec3::int(0,0,0));
assert_eq!(body.acceleration,vec3::int(0,0,0));
assert_eq!(body.time,Time::from_secs(1));
}
#[test]
fn bug_3(){
let physics_data=test_scene();
let body=strafesnet_physics::physics::Body::new(
(vec3::int(5+2,0,0)>>1)+vec3::int(1,2,0),
vec3::int(-1,-1,0),
vec3::int(0,0,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
physics.style_mut().gravity=vec3::ZERO;
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(3))
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick));
// touch side of part at 0,0,0
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
// touch top of part at 5,-5,0
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(2));
assert!(phys_iter.next().is_none());
let body=physics.body();
assert_eq!(body.position,vec3::int(5+2,0,0)>>1);
assert_eq!(body.velocity,vec3::int(0,0,0));
assert_eq!(body.acceleration,vec3::int(0,0,0));
assert_eq!(body.time,Time::from_secs(2));
}

View File

@@ -1,76 +0,0 @@
use crate::error::ReplayError;
use crate::util::read_entire_file;
use strafesnet_physics::physics::{PhysicsData,PhysicsState,PhysicsContext};
#[test]
#[ignore]
fn physics_bug_2()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("test_files/bhop_monster_jam.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
// create recording
println!("generating models..");
let physics_data=PhysicsData::new(&map);
println!("simulating...");
//teleport to bug
// body pos = Vector { array: [Fixed { bits: 554895163352 }, Fixed { bits: 1485633089990 }, Fixed { bits: 1279601007173 }] }
// after the fix it's still happening, possibly for a different reason, new position to evince:
// body pos = Vector { array: [Fixed { bits: 555690659654 }, Fixed { bits: 1490485868773 }, Fixed { bits: 1277783839382 }] }
use strafesnet_common::integer::{vec3,Time};
let body=strafesnet_physics::physics::Body::new(
vec3::raw_xyz(555690659654,1490485868773,1277783839382),
vec3::int(0,0,0),
vec3::int(0,-100,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
// wait one second to activate the bug
// hit=Some(ModelId(2262))
PhysicsContext::run_input_instruction(&mut physics,&physics_data,strafesnet_common::instruction::TimedInstruction{
time:Time::from_millis(500),
instruction:strafesnet_common::physics::Instruction::Idle,
});
Ok(())
}
#[test]
#[ignore]
fn physics_bug_3()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("../tools/bhop_maps/5692152916.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
// create recording
println!("generating models..");
let physics_data=PhysicsData::new(&map);
println!("simulating...");
//teleport to bug
use strafesnet_common::integer::{vec3,Time};
let body=strafesnet_physics::physics::Body::new(
// bhop_toc corner position after wall hits
// vec3::raw_xyz(-1401734815424,3315081280280,-2466057177493),
// vec3::raw_xyz(0,-96915585363,1265),
// vec3::raw_xyz(0,-429496729600,0),
// alternate room center position
// vec3::raw_xyz(-1129043783837,3324870327882,-2014012350212),
// vec3::raw_xyz(0,-96915585363,1265),
// vec3::raw_xyz(0,-429496729600,0),
// corner setup before wall hits
vec3::raw_xyz(-1392580080675,3325402529458,-2444727738679),
vec3::raw_xyz(-30259028820,-22950929553,-71141663007),
vec3::raw_xyz(0,-429496729600,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
// wait one second to activate the bug
PhysicsContext::run_input_instruction(&mut physics,&physics_data,strafesnet_common::instruction::TimedInstruction{
time:Time::from_millis(500),
instruction:strafesnet_common::physics::Instruction::Idle,
});
Ok(())
}

View File

@@ -1,7 +0,0 @@
use std::io::Cursor;
use std::path::Path;
pub fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
let data=std::fs::read(path)?;
Ok(Cursor::new(data))
}

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_bsp_loader"
version = "0.3.1"
version = "0.3.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -11,12 +11,9 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
glam = "0.30.0"
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../deferred_loader", registry = "strafesnet" }
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }
vbsp = "0.9.1"
vbsp-entities-css = "0.6.0"
vmdl = "0.2.0"
vpk = "0.3.0"
[lints]
workspace = true

View File

@@ -7,7 +7,7 @@ use crate::{valve_transform_normal,valve_transform_dist};
#[derive(Hash,Eq,PartialEq)]
struct Face{
normal:integer::Planar64Vec3,
dot:Planar64,
dot:integer::Planar64,
}
#[derive(Debug)]
@@ -187,7 +187,7 @@ fn planes_to_faces(face_list:std::collections::HashSet<Face>)->Result<Faces,Plan
}
}
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum BrushToMeshError{
SliceBrushSides,

View File

@@ -347,7 +347,7 @@ pub struct PartialMap1{
modes:NormalizedModes,
}
impl PartialMap1{
pub fn add_prop_meshes(
pub fn add_prop_meshes<'a>(
self,
prop_meshes:Meshes<model::Mesh>,
)->PartialMap2{

View File

@@ -5,6 +5,7 @@ use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
use crate::{Bsp,Vpk};
#[allow(dead_code)]
#[derive(Debug)]
pub enum TextureError{
Io(std::io::Error),
@@ -31,7 +32,7 @@ impl Loader for TextureLoader{
type Error=TextureError;
type Index<'a>=Cow<'a,str>;
type Resource=Texture;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>{
let file_name=format!("textures/{}.dds",index);
let mut file=std::fs::File::open(file_name)?;
let mut data=Vec::new();
@@ -40,6 +41,7 @@ impl Loader for TextureLoader{
}
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum MeshError{
Io(std::io::Error),
@@ -111,7 +113,7 @@ impl ModelLoader<'_,'_>{
}
}
}
impl Loader for ModelLoader<'_,'_>{
impl<'bsp,'vpk> Loader for ModelLoader<'bsp,'vpk>{
type Error=MeshError;
type Index<'a>=&'a str where Self:'a;
type Resource=vmdl::Model;
@@ -151,7 +153,7 @@ impl MeshLoader<'_,'_,'_,'_>{
}
}
}
impl Loader for MeshLoader<'_,'_,'_,'_>{
impl<'str,'bsp,'vpk,'load> Loader for MeshLoader<'bsp,'vpk,'load,'str>{
type Error=MeshError;
type Index<'a>=&'a str where Self:'a;
type Resource=Mesh;

View File

@@ -61,7 +61,7 @@ pub fn convert_mesh(model:vmdl::Model,deferred_loader:&mut RenderConfigDeferredL
_=>None,
}
})
}).filter_map(|[v1,v2,v3]|{
}).flat_map(|[v1,v2,v3]|{
// this should probably be a fatal error :D
let v1=model_vertices.get(v1)?;
let v2=model_vertices.get(v2)?;

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_common"
version = "0.7.0"
version = "0.6.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -17,6 +17,3 @@ linear_ops = { version = "0.1.1", path = "../linear_ops", registry = "strafesnet
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet" }
glam = "0.30.0"
id = { version = "0.1.0", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -2,9 +2,7 @@ use crate::integer::{vec3,Planar64Vec3};
#[derive(Clone)]
pub struct Aabb{
// min is inclusive
min:Planar64Vec3,
// max is not inclusive
max:Planar64Vec3,
}
@@ -45,7 +43,7 @@ impl Aabb{
}
#[inline]
pub fn contains(&self,point:Planar64Vec3)->bool{
let bvec=self.min.le(point)&point.lt(self.max);
let bvec=self.min.lt(point)&point.lt(self.max);
bvec.all()
}
#[inline]
@@ -61,11 +59,11 @@ impl Aabb{
pub fn center(&self)->Planar64Vec3{
self.min.map_zip(self.max,|(min,max)|min.midpoint(max))
}
#[inline]
pub fn area_weight(&self)->fixed_wide::fixed::Fixed<2,64>{
let d=self.max-self.min;
d.x*d.y+d.y*d.z+d.z*d.x
}
//probably use floats for area & volume because we don't care about precision
// pub fn area_weight(&self)->f32{
// let d=self.max-self.min;
// d.x*d.y+d.y*d.z+d.z*d.x
// }
// pub fn volume(&self)->f32{
// let d=self.max-self.min;
// d.x*d.y*d.z

View File

@@ -245,19 +245,18 @@ pub fn generate_bvh<T>(boxen:Vec<(T,Aabb)>)->BvhNode<T>{
fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
let n=boxen.len();
const MAX_TERMINAL_BRANCH_LEAF_NODES:usize=20;
if force||n<MAX_TERMINAL_BRANCH_LEAF_NODES{
let mut aabb_outer=Aabb::default();
let nodes=boxen.into_iter().map(|(data,aabb)|{
aabb_outer.join(&aabb);
if force||n<20{
let mut aabb=Aabb::default();
let nodes=boxen.into_iter().map(|b|{
aabb.join(&b.1);
BvhNode{
content:RecursiveContent::Leaf(data),
aabb,
content:RecursiveContent::Leaf(b.0),
aabb:b.1,
}
}).collect();
BvhNode{
content:RecursiveContent::Branch(nodes),
aabb:aabb_outer,
aabb,
}
}else{
let mut sort_x=Vec::with_capacity(n);
@@ -273,9 +272,9 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
sort_y.sort_by_key(|&(_,c)|c);
sort_z.sort_by_key(|&(_,c)|c);
let h=n/2;
let (_,median_x)=sort_x[h];
let (_,median_y)=sort_y[h];
let (_,median_z)=sort_z[h];
let median_x=sort_x[h].1;
let median_y=sort_y[h].1;
let median_z=sort_z[h].1;
//locate a run of values equal to the median
//partition point gives the first index for which the predicate evaluates to false
let first_index_eq_median_x=sort_x.partition_point(|&(_,x)|x<median_x);
@@ -314,10 +313,10 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
};
list_list[list_id].push((data,aabb));
}
let mut aabb=Aabb::default();
if list_list.len()==1{
generate_bvh_node(list_list.remove(0),true)
}else{
let mut aabb=Aabb::default();
BvhNode{
content:RecursiveContent::Branch(
list_list.into_iter().map(|b|{

View File

@@ -140,15 +140,6 @@ impl ModeId{
pub const MAIN:Self=Self(0);
pub const BONUS:Self=Self(1);
}
impl core::fmt::Display for ModeId{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->core::fmt::Result{
match self{
&Self::MAIN=>write!(f,"Main"),
&Self::BONUS=>write!(f,"Bonus"),
&Self(mode_id)=>write!(f,"Bonus{mode_id}"),
}
}
}
#[derive(Clone)]
pub struct Mode{
style:gameplay_style::StyleModifiers,

View File

@@ -34,7 +34,7 @@ pub struct StyleModifiers{
//unused
pub mass:Planar64,
}
impl Default for StyleModifiers{
impl std::default::Default for StyleModifiers{
fn default()->Self{
Self::roblox_bhop()
}

View File

@@ -34,41 +34,12 @@ pub trait InstructionFeedback<I,T>:InstructionEmitter<I,Time=T>+InstructionConsu
self.process_instruction(instruction);
}
}
#[inline]
fn into_iter(self,time_limit:T)->InstructionIter<I,T,Self>
where
Self:Sized
{
InstructionIter{
time_limit,
feedback:self,
_phantom:core::marker::PhantomData,
}
}
}
impl<I,T,F> InstructionFeedback<I,T> for F
impl<I,T,X> InstructionFeedback<I,T> for X
where
T:Copy,
F:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
X:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
{}
pub struct InstructionIter<I,T:Copy,F:InstructionFeedback<I,T>>{
time_limit:T,
feedback:F,
_phantom:core::marker::PhantomData<I>,
}
impl<I,T,F> Iterator for InstructionIter<I,T,F>
where
I:Clone,
T:Clone+Copy,
F:InstructionFeedback<I,T>,
{
type Item=TimedInstruction<I,T>;
fn next(&mut self)->Option<Self::Item>{
let instruction=self.feedback.next_instruction(self.time_limit)?;
self.feedback.process_instruction(instruction.clone());
Some(instruction)
}
}
//PROPER PRIVATE FIELDS!!!
pub struct InstructionCollector<I,T>{

View File

@@ -86,7 +86,7 @@ impl<T> std::fmt::Display for Time<T>{
write!(f,"{}s+{:09}ns",self.0/Self::ONE_SECOND.0,self.0%Self::ONE_SECOND.0)
}
}
impl<T> Default for Time<T>{
impl<T> std::default::Default for Time<T>{
fn default()->Self{
Self::raw(0)
}
@@ -126,7 +126,7 @@ impl_time_additive_assign_operator!(core::ops::AddAssign,add_assign);
impl_time_additive_assign_operator!(core::ops::SubAssign,sub_assign);
impl_time_additive_assign_operator!(core::ops::RemAssign,rem_assign);
impl<T> std::ops::Mul for Time<T>{
type Output=Ratio<Fixed<2,64>,Fixed<2,64>>;
type Output=Ratio<fixed_wide::fixed::Fixed<2,64>,fixed_wide::fixed::Fixed<2,64>>;
#[inline]
fn mul(self,rhs:Self)->Self::Output{
Ratio::new(Fixed::raw(self.0)*Fixed::raw(rhs.0),Fixed::raw_digit(1_000_000_000i64.pow(2)))
@@ -156,7 +156,7 @@ impl<T> core::ops::Mul<Time<T>> for Planar64{
#[cfg(test)]
mod test_time{
use super::*;
type Time=AbsoluteTime;
type Time=super::AbsoluteTime;
#[test]
fn time_from_planar64(){
let a:Time=Planar64::from(1).into();
@@ -552,7 +552,7 @@ impl TryFrom<[f32;3]> for Unit32Vec3{
}
*/
pub type Planar64TryFromFloatError=FixedFromFloatError;
pub type Planar64TryFromFloatError=fixed_wide::fixed::FixedFromFloatError;
pub type Planar64=fixed_wide::types::I32F32;
pub type Planar64Vec3=linear_ops::types::Vector3<Planar64>;
pub type Planar64Mat3=linear_ops::types::Matrix3<Planar64>;
@@ -562,11 +562,11 @@ pub mod vec3{
pub const MIN:Planar64Vec3=Planar64Vec3::new([Planar64::MIN;3]);
pub const MAX:Planar64Vec3=Planar64Vec3::new([Planar64::MAX;3]);
pub const ZERO:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO;3]);
pub const ZERO_2:Vector3<Fixed::<2,64>>=Vector3::new([Fixed::<2,64>::ZERO;3]);
pub const ZERO_3:Vector3<Fixed::<3,96>>=Vector3::new([Fixed::<3,96>::ZERO;3]);
pub const ZERO_4:Vector3<Fixed::<4,128>>=Vector3::new([Fixed::<4,128>::ZERO;3]);
pub const ZERO_5:Vector3<Fixed::<5,160>>=Vector3::new([Fixed::<5,160>::ZERO;3]);
pub const ZERO_6:Vector3<Fixed::<6,192>>=Vector3::new([Fixed::<6,192>::ZERO;3]);
pub const ZERO_2:linear_ops::types::Vector3<Fixed::<2,64>>=linear_ops::types::Vector3::new([Fixed::<2,64>::ZERO;3]);
pub const ZERO_3:linear_ops::types::Vector3<Fixed::<3,96>>=linear_ops::types::Vector3::new([Fixed::<3,96>::ZERO;3]);
pub const ZERO_4:linear_ops::types::Vector3<Fixed::<4,128>>=linear_ops::types::Vector3::new([Fixed::<4,128>::ZERO;3]);
pub const ZERO_5:linear_ops::types::Vector3<Fixed::<5,160>>=linear_ops::types::Vector3::new([Fixed::<5,160>::ZERO;3]);
pub const ZERO_6:linear_ops::types::Vector3<Fixed::<6,192>>=linear_ops::types::Vector3::new([Fixed::<6,192>::ZERO;3]);
pub const X:Planar64Vec3=Planar64Vec3::new([Planar64::ONE,Planar64::ZERO,Planar64::ZERO]);
pub const Y:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::ONE,Planar64::ZERO]);
pub const Z:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::ZERO,Planar64::ONE]);

View File

@@ -1,6 +1,6 @@
use std::collections::HashMap;
use crate::integer::{Planar64,Planar64Vec3,Planar64Affine3};
use crate::integer::{Planar64Vec3,Planar64Affine3};
use crate::gameplay_attributes;
pub type TextureCoordinate=glam::Vec2;
@@ -168,11 +168,6 @@ impl MeshBuilder{
}
}
pub fn acquire_pos_id(&mut self,pos:Planar64Vec3)->PositionId{
// Truncate the 16 most precise bits of the vertex positions.
// This allows the normal vectors to exactly represent the face.
// Remove this in Mesh V2
const MASK:Planar64=Planar64::raw(!((1<<16)-1));
let pos=pos.map(|c|c&MASK);
*self.pos_id_from.entry(pos).or_insert_with(||{
let pos_id=PositionId::new(self.unique_pos.len() as u32);
self.unique_pos.push(pos);

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_deferred_loader"
version = "0.5.1"
version = "0.5.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -10,7 +10,4 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
[lints]
workspace = true
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }

View File

@@ -4,5 +4,5 @@ pub trait Loader{
type Error:Error;
type Index<'a> where Self:'a;
type Resource;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>;
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>;
}

View File

@@ -1,6 +1,6 @@
[package]
name = "fixed_wide"
version = "0.2.1"
version = "0.2.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -18,6 +18,3 @@ bnum = "0.13.0"
arrayvec = { version = "0.7.6", optional = true }
paste = "1.0.15"
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet", optional = true }
[lints]
workspace = true

View File

@@ -1,8 +1,6 @@
use bnum::{BInt,cast::As};
const BNUM_DIGIT_WIDTH:usize=64;
#[derive(Clone,Copy,Default,Hash,PartialEq,Eq,PartialOrd,Ord)]
#[derive(Clone,Copy,Debug,Default,Hash,PartialEq,Eq,PartialOrd,Ord)]
/// A Fixed point number for which multiply operations widen the bits in the output. (when the wide-mul feature is enabled)
/// N is the number of u64s to use
/// F is the number of fractional bits (always N*32 lol)
@@ -235,11 +233,6 @@ impl FixedFromFloatError{
}
}
}
impl core::fmt::Display for FixedFromFloatError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
macro_rules! impl_from_float {
( $decode:ident, $input: ty, $mantissa_bits:expr ) => {
impl<const N:usize,const F:usize> TryFrom<$input> for Fixed<N,F>{
@@ -288,23 +281,6 @@ macro_rules! impl_from_float {
impl_from_float!(integer_decode_f32,f32,24);
impl_from_float!(integer_decode_f64,f64,53);
impl<const N:usize,const F:usize> core::fmt::Debug for Fixed<N,F>{
#[inline]
fn fmt(&self,f:&mut core::fmt::Formatter)->Result<(),core::fmt::Error>{
let integral=self.as_bits().unsigned_abs()>>F;
let fractional=self.as_bits().unsigned_abs()&((bnum::BUint::<N>::ONE<<F)-bnum::BUint::<N>::ONE);
let leading_zeroes=(fractional.leading_zeros() as usize).saturating_sub(N*BNUM_DIGIT_WIDTH-F)>>2;
if self.is_negative(){
core::write!(f,"-")?;
}
if fractional.is_zero(){
core::write!(f,"{integral:x}.{}","0".repeat(leading_zeroes))
}else{
core::write!(f,"{integral:x}.{}{fractional:x}","0".repeat(leading_zeroes))
}
}
}
impl<const N:usize,const F:usize> core::fmt::Display for Fixed<N,F>{
#[inline]
fn fmt(&self,f:&mut core::fmt::Formatter)->Result<(),core::fmt::Error>{

View File

@@ -229,16 +229,3 @@ fn test_zeroes_deferred_division(){
])
);
}
#[test]
fn test_debug(){
assert_eq!(format!("{:?}",I32F32::EPSILON),"0.00000001");
assert_eq!(format!("{:?}",I32F32::ONE),"1.00000000");
assert_eq!(format!("{:?}",I32F32::TWO),"2.00000000");
assert_eq!(format!("{:?}",I32F32::MAX),"7fffffff.ffffffff");
assert_eq!(format!("{:?}",I32F32::try_from(core::f64::consts::PI).unwrap()),"3.243f6a88");
assert_eq!(format!("{:?}",I32F32::NEG_EPSILON),"-0.00000001");
assert_eq!(format!("{:?}",I32F32::NEG_ONE),"-1.00000000");
assert_eq!(format!("{:?}",I32F32::NEG_TWO),"-2.00000000");
assert_eq!(format!("{:?}",I32F32::MIN),"-80000000.00000000");
}

View File

@@ -20,6 +20,3 @@ paste = { version = "1.0.15", optional = true }
[dev-dependencies]
fixed_wide = { path = "../fixed_wide", registry = "strafesnet", features = ["wide-mul"] }
[lints]
workspace = true

View File

@@ -205,8 +205,7 @@ macro_rules! impl_matrix_named_fields_shape {
#[inline]
fn deref(&self)->&Self::Target{
// This cast is valid because Matrix has #[repr(transparent)]
let ptr:*const [[T;$size_inner];$size_outer]=&self.array;
let ptr=ptr as *const Self::Target;
let ptr=&self.array as *const [[T;$size_inner];$size_outer] as *const Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&*ptr}
}
@@ -215,8 +214,7 @@ macro_rules! impl_matrix_named_fields_shape {
#[inline]
fn deref_mut(&mut self)->&mut Self::Target{
// This cast is valid because Matrix has #[repr(transparent)]
let ptr:*mut [[T;$size_inner];$size_outer]=&mut self.array;
let ptr=ptr as *mut Self::Target;
let ptr=&mut self.array as *mut [[T;$size_inner];$size_outer] as *mut Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&mut*ptr}
}

View File

@@ -331,8 +331,7 @@ macro_rules! impl_vector_named_fields {
#[inline]
fn deref(&self)->&Self::Target{
// This cast is valid because Vector has #[repr(transparent)]
let ptr:*const [T;$size]=&self.array;
let ptr=ptr as *const Self::Target;
let ptr=&self.array as *const [T;$size] as *const Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&*ptr}
}
@@ -341,8 +340,7 @@ macro_rules! impl_vector_named_fields {
#[inline]
fn deref_mut(&mut self)->&mut Self::Target{
// This cast is valid because Vector has #[repr(transparent)]
let ptr:*mut [T;$size]=&mut self.array;
let ptr=ptr as *mut Self::Target;
let ptr=&mut self.array as *mut [T;$size] as *mut Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&mut*ptr}
}

View File

@@ -8,6 +8,3 @@ description = "Ratio operations using trait bounds for avoiding division like th
authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_rbx_loader"
version = "0.7.0"
version = "0.6.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -12,17 +12,13 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
bytemuck = "1.14.3"
glam = "0.30.0"
regex = { version = "1.11.3", default-features = false }
rbx_binary = { version = "1.0.1-sn5", registry = "strafesnet" }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
rbx_mesh = "0.5.0"
rbx_reflection = "5.0.0"
lazy-regex = "3.1.0"
rbx_binary = { version = "1.1.0-sn4", registry = "strafesnet" }
rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet", features = ["instance-userdata"] }
rbx_mesh = "0.3.1"
rbx_reflection_database = "1.0.0"
rbx_xml = { version = "1.0.1-sn5", registry = "strafesnet" }
rbx_xml = { version = "1.1.0-sn4", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", path = "../rbxassetid", registry = "strafesnet" }
roblox_emulator = { version = "0.5.1", path = "../roblox_emulator", default-features = false, registry = "strafesnet" }
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../deferred_loader", registry = "strafesnet" }
[lints]
workspace = true
roblox_emulator = { version = "0.5.0", path = "../roblox_emulator", default-features = false, registry = "strafesnet" }
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }

View File

@@ -1,245 +0,0 @@
use std::collections::HashSet;
use std::num::ParseIntError;
use strafesnet_common::gameplay_modes::{StageId,ModeId};
use strafesnet_common::integer::{FixedFromFloatError,Planar64TryFromFloatError};
/// A collection of errors which can be ignored at your peril
#[derive(Debug,Default)]
pub struct RecoverableErrors{
/// A basepart has an invalid / missing property.
pub basepart_property:Vec<InstancePath>,
/// A part has an unconvertable CFrame.
pub basepart_cframe:Vec<CFrameError>,
/// A part has an unconvertable Velocity.
pub basepart_velocity:Vec<Planar64ConvertError>,
/// A part has an invalid / missing property.
pub part_property:Vec<InstancePath>,
/// A part has an invalid shape.
pub part_shape:Vec<ShapeError>,
/// A meshpart has an invalid / missing property.
pub meshpart_property:Vec<InstancePath>,
/// A meshpart has no mesh.
pub meshpart_content:Vec<InstancePath>,
/// A basepart has an unsupported subclass.
pub unsupported_class:HashSet<String>,
/// A decal has an invalid / missing property.
pub decal_property:Vec<InstancePath>,
/// A decal has an invalid normal_id.
pub normal_id:Vec<NormalIdError>,
/// A texture has an invalid / missing property.
pub texture_property:Vec<InstancePath>,
/// A mode_id failed to parse.
pub mode_id_parse_int:Vec<ParseIntContext>,
/// There is a duplicate mode.
pub duplicate_mode:HashSet<ModeId>,
/// A mode_id failed to parse.
pub stage_id_parse_int:Vec<ParseIntContext>,
/// A Stage was duplicated leading to undefined behaviour.
pub duplicate_stage:HashSet<DuplicateStageError>,
/// A WormholeOut id failed to parse.
pub wormhole_out_id_parse_int:Vec<ParseIntContext>,
/// A WormholeOut was duplicated leading to undefined behaviour.
pub duplicate_wormhole_out:HashSet<u32>,
/// A WormholeIn id failed to parse.
pub wormhole_in_id_parse_int:Vec<ParseIntContext>,
/// A jump limit failed to parse.
pub jump_limit_parse_int:Vec<ParseIntContext>,
}
impl RecoverableErrors{
pub fn count(&self)->usize{
self.basepart_property.len()+
self.basepart_cframe.len()+
self.basepart_velocity.len()+
self.part_property.len()+
self.part_shape.len()+
self.meshpart_property.len()+
self.meshpart_content.len()+
self.unsupported_class.len()+
self.decal_property.len()+
self.normal_id.len()+
self.texture_property.len()+
self.mode_id_parse_int.len()+
self.duplicate_mode.len()+
self.stage_id_parse_int.len()+
self.duplicate_stage.len()+
self.wormhole_out_id_parse_int.len()+
self.duplicate_wormhole_out.len()+
self.wormhole_in_id_parse_int.len()+
self.jump_limit_parse_int.len()
}
}
fn write_comma_separated<T>(
f:&mut std::fmt::Formatter<'_>,
mut it:impl Iterator<Item=T>,
custom_write:impl Fn(&mut std::fmt::Formatter<'_>,T)->std::fmt::Result
)->std::fmt::Result{
if let Some(t)=it.next(){
custom_write(f,t)?;
for t in it{
write!(f,", ")?;
custom_write(f,t)?;
}
}
Ok(())
}
macro_rules! write_instance_path_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal,$problem:literal)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} {}: ",$problem)?;
write_comma_separated($f,$self.$field.iter(),|f,InstancePath(path)|
write!(f,"{path}")
)?;
writeln!($f)?;
}
};
}
macro_rules! write_duplicate_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} duplicates: ")?;
write_comma_separated($f,$self.$field.iter(),|f,id|
write!(f,"{id}")
)?;
writeln!($f)?;
}
};
}
macro_rules! write_bespoke_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal,$problem:literal,$path_field:ident,$error_field:ident)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} {}: ",$problem)?;
write_comma_separated($f,$self.$field.iter(),|f,context|
write!(f,"{} ({})",context.$path_field,context.$error_field)
)?;
writeln!($f)?;
}
};
}
impl core::fmt::Display for RecoverableErrors{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write_instance_path_error!(f,self,basepart_property,"BasePart is","BaseParts are","missing a property");
write_bespoke_error!(f,self,basepart_cframe,"BasePart","BaseParts","CFrame float convert failed",path,error);
write_bespoke_error!(f,self,basepart_velocity,"BasePart","BaseParts","Velocity float convert failed",path,error);
write_instance_path_error!(f,self,part_property,"Part is","Parts are","missing a property");
write_bespoke_error!(f,self,part_shape,"Part","Parts","Shape is invalid",path,shape);
write_instance_path_error!(f,self,meshpart_property,"MeshPart is","MeshParts are","missing a property");
write_instance_path_error!(f,self,meshpart_content,"MeshPart has","MeshParts have","no mesh");
{
let len=self.unsupported_class.len();
if len!=0{
let plural=if len==1{"Class is"}else{"Classes are"};
write!(f,"The following {plural} not supported: ")?;
write_comma_separated(f,self.unsupported_class.iter(),|f,classname|write!(f,"{classname}"))?;
writeln!(f)?;
}
}
write_instance_path_error!(f,self,decal_property,"Decal is","Decals are","missing a property");
write_bespoke_error!(f,self,normal_id,"Decal","Decals","NormalId is invalid",path,normal_id);
write_instance_path_error!(f,self,texture_property,"Texture is","Textures are","missing a property");
write_bespoke_error!(f,self,mode_id_parse_int,"ModeId","ModeIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_mode,"ModeId has","ModeIds have");
write_bespoke_error!(f,self,stage_id_parse_int,"StageId","StageIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_stage,"StageId has","StageIds have");
write_bespoke_error!(f,self,wormhole_out_id_parse_int,"WormholeOutId","WormholeOutIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_wormhole_out,"WormholeOutId has","WormholeOutIds have");
write_bespoke_error!(f,self,wormhole_in_id_parse_int,"WormholeInId","WormholeInIds","failed to parse",context,error);
write_bespoke_error!(f,self,jump_limit_parse_int,"jump limit","jump limits","failed to parse",context,error);
Ok(())
}
}
/// A Decal was missing required properties
#[derive(Debug)]
pub struct InstancePath(pub String);
impl core::fmt::Display for InstancePath{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
self.0.fmt(f)
}
}
impl InstancePath{
pub fn new(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)->InstancePath{
let mut names:Vec<_>=core::iter::successors(
Some(instance),
|i|dom.get_by_ref(i.parent())
).map(
|i|i.name.as_str()
).collect();
// discard the name of the root object
names.pop();
names.reverse();
InstancePath(names.join("."))
}
}
#[derive(Debug)]
pub struct ParseIntContext{
pub context:String,
pub error:ParseIntError,
}
impl ParseIntContext{
pub fn parse<T:core::str::FromStr<Err=ParseIntError>>(input:&str)->Result<T,Self>{
input.parse().map_err(|error|ParseIntContext{
context:input.to_owned(),
error,
})
}
}
#[derive(Debug)]
pub struct NormalIdError{
pub path:InstancePath,
pub normal_id:u32,
}
#[derive(Debug)]
pub struct ShapeError{
pub path:InstancePath,
pub shape:u32,
}
#[derive(Debug)]
pub enum CFrameErrorType{
ZeroDeterminant,
Convert(FixedFromFloatError),
}
impl core::fmt::Display for CFrameErrorType{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
#[derive(Debug)]
pub struct CFrameError{
pub path:InstancePath,
pub error:CFrameErrorType,
}
#[derive(Debug)]
pub struct Planar64ConvertError{
pub path:InstancePath,
pub error:Planar64TryFromFloatError,
}
#[derive(Debug,Hash,Eq,PartialEq)]
pub struct DuplicateStageError{
pub mode_id:ModeId,
pub stage_id:StageId,
}
impl core::fmt::Display for DuplicateStageError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{}-Spawn{}",self.mode_id,self.stage_id.get())
}
}

View File

@@ -1,18 +1,13 @@
use std::io::Read;
use rbx_dom_weak::WeakDom;
use roblox_emulator::context::Context;
use strafesnet_common::map::CompleteMap;
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
pub use error::RecoverableErrors;
pub use roblox_emulator::runner::Error as RunnerError;
mod rbx;
mod mesh;
mod error;
mod union;
pub mod loader;
pub mod primitives;
mod primitives;
pub mod data{
pub struct RobloxMeshBytes(Vec<u8>);
@@ -33,7 +28,7 @@ impl Model{
fn new(dom:WeakDom)->Self{
Self{dom}
}
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
to_snf(self,failure_mode)
}
}
@@ -53,20 +48,18 @@ impl Place{
context,
})
}
pub fn run_scripts(&mut self)->Result<Vec<RunnerError>,RunnerError>{
pub fn run_scripts(&mut self){
let Place{context}=self;
let runner=roblox_emulator::runner::Runner::new()?;
let runner=roblox_emulator::runner::Runner::new().unwrap();
let scripts=context.scripts();
let runnable=runner.runnable_context(context)?;
let mut errors=Vec::new();
let runnable=runner.runnable_context(context).unwrap();
for script in scripts{
if let Err(e)=runnable.run_script(script){
errors.push(e);
println!("runner error: {e}");
}
}
Ok(errors)
}
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
to_snf(self,failure_mode)
}
}
@@ -130,7 +123,7 @@ impl From<loader::MeshError> for LoadError{
}
}
fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
let dom=dom.as_ref();
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
@@ -150,5 +143,7 @@ fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<(Complet
let mut texture_loader=loader::TextureLoader::new();
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,failure_mode).map_err(LoadError::Texture)?;
Ok(map_step2.add_render_configs_and_textures(render_configs))
let map=map_step2.add_render_configs_and_textures(render_configs);
Ok(map)
}

View File

@@ -18,6 +18,7 @@ fn read_entire_file(path:impl AsRef<std::path::Path>)->Result<Vec<u8>,std::io::E
Ok(data)
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum TextureError{
Io(std::io::Error),
@@ -50,7 +51,7 @@ impl Loader for TextureLoader{
type Error=TextureError;
type Index<'a>=&'a str;
type Resource=Texture;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>{
let RobloxAssetId(asset_id)=index.parse()?;
let file_name=format!("textures/{}.dds",asset_id);
let data=read_entire_file(file_name)?;
@@ -58,6 +59,7 @@ impl Loader for TextureLoader{
}
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum MeshError{
Io(std::io::Error),
@@ -116,7 +118,7 @@ pub struct MeshIndex<'a>{
content:&'a str,
}
impl MeshIndex<'_>{
pub fn file_mesh(content:&str)->MeshIndex<'_>{
pub fn file_mesh(content:&str)->MeshIndex{
MeshIndex{
mesh_type:MeshType::FileMesh,
content,
@@ -157,7 +159,7 @@ impl Loader for MeshLoader{
type Error=MeshError;
type Index<'a>=MeshIndex<'a>;
type Resource=MeshWithSize;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>{
let mesh=match index.mesh_type{
MeshType::FileMesh=>{
let RobloxAssetId(asset_id)=index.content.parse()?;

View File

@@ -3,12 +3,14 @@ use std::collections::HashMap;
use rbx_mesh::mesh::{Vertex2,Vertex2Truncated};
use strafesnet_common::aabb::Aabb;
use strafesnet_common::integer::vec3;
use strafesnet_common::model::{self,ColorId,IndexedVertex,PolygonGroup,PolygonList,RenderConfigId,VertexId};
use strafesnet_common::model::{self,ColorId,IndexedVertex,NormalId,PolygonGroup,PolygonList,PositionId,RenderConfigId,TextureCoordinateId,VertexId};
use crate::loader::MeshWithSize;
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Planar64Vec3(strafesnet_common::integer::Planar64TryFromFloatError),
RbxMesh(rbx_mesh::mesh::Error)
}
impl std::fmt::Display for Error{
@@ -18,46 +20,69 @@ impl std::fmt::Display for Error{
}
impl std::error::Error for Error{}
fn ingest_vertices2(
fn ingest_vertices2<
AcquirePosId,
AcquireTexId,
AcquireNormalId,
AcquireColorId,
AcquireVertexId,
>(
vertices:Vec<Vertex2>,
mb:&mut model::MeshBuilder,
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{
acquire_pos_id:&mut AcquirePosId,
acquire_tex_id:&mut AcquireTexId,
acquire_normal_id:&mut AcquireNormalId,
acquire_color_id:&mut AcquireColorId,
acquire_vertex_id:&mut AcquireVertexId,
)->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>
where
AcquirePosId:FnMut([f32;3])->Result<PositionId,Error>,
AcquireTexId:FnMut([f32;2])->TextureCoordinateId,
AcquireNormalId:FnMut([f32;3])->Result<NormalId,Error>,
AcquireColorId:FnMut([f32;4])->ColorId,
AcquireVertexId:FnMut(IndexedVertex)->VertexId,
{
//this monster is collecting a map of old_vertices_index -> unique_vertices_index
//while also doing the inserting unique entries into lists simultaneously
// vertex positions that fail to convert are DROPPED
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
rbx_mesh::mesh::VertexId2(vertex_id as u32),
{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32))),
};
mb.acquire_vertex_id(vertex)
}
))).collect()
acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id(vertex.tex),
normal:acquire_normal_id(vertex.norm)?,
color:acquire_color_id(vertex.color.map(|f|f as f32/255.0f32))
}),
))).collect::<Result<_,_>>()?)
}
fn ingest_vertices_truncated2(
fn ingest_vertices_truncated2<
AcquirePosId,
AcquireTexId,
AcquireNormalId,
AcquireVertexId,
>(
vertices:Vec<Vertex2Truncated>,
mb:&mut model::MeshBuilder,
acquire_pos_id:&mut AcquirePosId,
acquire_tex_id:&mut AcquireTexId,
acquire_normal_id:&mut AcquireNormalId,
static_color_id:ColorId,//pick one color and fill everything with it
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{
acquire_vertex_id:&mut AcquireVertexId,
)->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>
where
AcquirePosId:FnMut([f32;3])->Result<PositionId,Error>,
AcquireTexId:FnMut([f32;2])->TextureCoordinateId,
AcquireNormalId:FnMut([f32;3])->Result<NormalId,Error>,
AcquireVertexId:FnMut(IndexedVertex)->VertexId,
{
//this monster is collecting a map of old_vertices_index -> unique_vertices_index
//while also doing the inserting unique entries into lists simultaneously
// vertex positions that fail to convert are DROPPED
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
rbx_mesh::mesh::VertexId2(vertex_id as u32),
{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:static_color_id,
};
mb.acquire_vertex_id(vertex)
}
))).collect()
acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id(vertex.tex),
normal:acquire_normal_id(vertex.norm)?,
color:static_color_id
}),
))).collect::<Result<_,_>>()?)
}
fn ingest_faces2_lods3(
@@ -68,80 +93,132 @@ fn ingest_faces2_lods3(
){
//faces have to be split into polygon groups based on lod
polygon_groups.extend(lods.windows(2).map(|lod_pair|
PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().filter_map(|rbx_mesh::mesh::Face2(v0,v1,v2)|
Some(vec![*vertex_id_map.get(&v0)?,*vertex_id_map.get(&v1)?,*vertex_id_map.get(&v2)?])
PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().map(|rbx_mesh::mesh::Face2(v0,v1,v2)|
vec![vertex_id_map[&v0],vertex_id_map[&v1],vertex_id_map[&v2]]
).collect()))
))
}
pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<MeshWithSize,Error>{
//generate that mesh boi
let mut unique_pos=Vec::new();
let mut pos_id_from=HashMap::new();
let mut unique_tex=Vec::new();
let mut tex_id_from=HashMap::new();
let mut unique_normal=Vec::new();
let mut normal_id_from=HashMap::new();
let mut unique_color=Vec::new();
let mut color_id_from=HashMap::new();
let mut unique_vertices=Vec::new();
let mut vertex_id_from=HashMap::new();
let mut polygon_groups=Vec::new();
let mut mb=model::MeshBuilder::new();
let mut aabb=Aabb::default();
let mut acquire_pos_id=|pos|{
let p=vec3::try_from_f32_array(pos).map_err(Error::Planar64Vec3)?;
aabb.grow(p);
Ok(PositionId::new(*pos_id_from.entry(p).or_insert_with(||{
let pos_id=unique_pos.len();
unique_pos.push(p);
pos_id
}) as u32))
};
let mut acquire_tex_id=|tex|{
let h=bytemuck::cast::<[f32;2],[u32;2]>(tex);
TextureCoordinateId::new(*tex_id_from.entry(h).or_insert_with(||{
let tex_id=unique_tex.len();
unique_tex.push(glam::Vec2::from_array(tex));
tex_id
}) as u32)
};
let mut acquire_normal_id=|normal|{
let n=vec3::try_from_f32_array(normal).map_err(Error::Planar64Vec3)?;
Ok(NormalId::new(*normal_id_from.entry(n).or_insert_with(||{
let normal_id=unique_normal.len();
unique_normal.push(n);
normal_id
}) as u32))
};
let mut acquire_color_id=|color|{
let h=bytemuck::cast::<[f32;4],[u32;4]>(color);
ColorId::new(*color_id_from.entry(h).or_insert_with(||{
let color_id=unique_color.len();
unique_color.push(glam::Vec4::from_array(color));
color_id
}) as u32)
};
let mut acquire_vertex_id=|vertex:IndexedVertex|{
VertexId::new(*vertex_id_from.entry(vertex.clone()).or_insert_with(||{
let vertex_id=unique_vertices.len();
unique_vertices.push(vertex);
vertex_id
}) as u32)
};
match rbx_mesh::read_versioned(roblox_mesh_bytes.cursor()).map_err(Error::RbxMesh)?{
rbx_mesh::mesh::Mesh::V1(mesh)=>{
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).filter_map(|trip|{
let mut ingest_vertex1=|vertex:&rbx_mesh::mesh::Vertex1|{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::vec2(vertex.tex[0],vertex.tex[1])),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:color_id,
};
Some(mb.acquire_vertex_id(vertex))
};
Some(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?])
}).collect())));
rbx_mesh::mesh::VersionedMesh::Version1(mesh)=>{
let color_id=acquire_color_id([1.0f32;4]);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).map(|trip|{
let mut ingest_vertex1=|vertex:&rbx_mesh::mesh::Vertex1|Ok(acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id([vertex.tex[0],vertex.tex[1]]),
normal:acquire_normal_id(vertex.norm)?,
color:color_id,
}));
Ok(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?])
}).collect::<Result<_,_>>()?)));
},
rbx_mesh::mesh::Mesh::V2(mesh)=>{
rbx_mesh::mesh::VersionedMesh::Version2(mesh)=>{
let vertex_id_map=match mesh.header.sizeof_vertex{
rbx_mesh::mesh::SizeOfVertex2::Truncated=>{
//pick white and make all the vertices white
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
let color_id=acquire_color_id([1.0f32;4]);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,color_id,&mut acquire_vertex_id)
},
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
};
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id),
}?;
//one big happy group for all the faces
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().filter_map(|face|
Some(vec![*vertex_id_map.get(&face.0)?,*vertex_id_map.get(&face.1)?,*vertex_id_map.get(&face.2)?])
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|face|
vec![vertex_id_map[&face.0],vertex_id_map[&face.1],vertex_id_map[&face.2]]
).collect())));
},
rbx_mesh::mesh::Mesh::V3(mesh)=>{
rbx_mesh::mesh::VersionedMesh::Version3(mesh)=>{
let vertex_id_map=match mesh.header.sizeof_vertex{
rbx_mesh::mesh::SizeOfVertex2::Truncated=>{
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
let color_id=acquire_color_id([1.0f32;4]);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,color_id,&mut acquire_vertex_id)
},
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
};
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id),
}?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
rbx_mesh::mesh::Mesh::V4(mesh)=>{
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb);
rbx_mesh::mesh::VersionedMesh::Version4(mesh)=>{
let vertex_id_map=ingest_vertices2(
mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id
)?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
rbx_mesh::mesh::Mesh::V5(mesh)=>{
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb);
rbx_mesh::mesh::VersionedMesh::Version5(mesh)=>{
let vertex_id_map=ingest_vertices2(
mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id
)?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
}
let mesh=mb.build(
let mesh=model::Mesh{
unique_pos,
unique_normal,
unique_tex,
unique_color,
unique_vertices,
polygon_groups,
//these should probably be moved to the model...
//but what if models want to use the same texture
vec![model::IndexedGraphicsGroup{
graphics_groups:vec![model::IndexedGraphicsGroup{
render:RenderConfigId::new(0),
//the lowest lod is highest quality
groups:vec![model::PolygonGroupId::new(0)]
}],
//disable physics
Vec::new(),
);
let mut aabb=Aabb::default();
for &point in &mesh.unique_pos{
aabb.grow(point);
}
physics_groups:Vec::new(),
};
Ok(MeshWithSize{mesh,size:aabb.size()})
}

View File

@@ -1,5 +1,4 @@
use std::collections::HashMap;
use crate::error::{RecoverableErrors,CFrameError,CFrameErrorType,DuplicateStageError,InstancePath,NormalIdError,Planar64ConvertError,ParseIntContext,ShapeError};
use crate::loader::{MeshWithSize,MeshIndex};
use crate::primitives::{self,CubeFace,CubeFaceDescription,WedgeFaceDescription,CornerWedgeFaceDescription,FaceDescription,Primitives};
use strafesnet_common::map;
@@ -7,7 +6,7 @@ use strafesnet_common::model;
use strafesnet_common::gameplay_modes::{NormalizedModes,Mode,ModeId,ModeUpdate,ModesBuilder,Stage,StageElement,StageElementBehaviour,StageId,Zone};
use strafesnet_common::gameplay_style;
use strafesnet_common::gameplay_attributes as attr;
use strafesnet_common::integer::{self,vec3,Planar64TryFromFloatError,Planar64,Planar64Vec3,Planar64Mat3,Planar64Affine3};
use strafesnet_common::integer::{self,vec3,Planar64,Planar64Vec3,Planar64Mat3,Planar64Affine3};
use strafesnet_common::model::RenderConfigId;
use strafesnet_deferred_loader::deferred_loader::{RenderConfigDeferredLoader,MeshDeferredLoader};
use strafesnet_deferred_loader::mesh::Meshes;
@@ -18,41 +17,40 @@ fn static_ustr(s:&'static str)->rbx_dom_weak::Ustr{
rbx_dom_weak::ustr(s)
}
macro_rules! lazy_regex{
($r:literal)=>{{
use regex::Regex;
use std::sync::LazyLock;
static RE:LazyLock<Regex>=LazyLock::new(||Regex::new($r).unwrap());
&RE
}};
fn recursive_collect_superclass(
objects:&mut std::vec::Vec<rbx_dom_weak::types::Ref>,
dom:&rbx_dom_weak::WeakDom,
instance:&rbx_dom_weak::Instance,
superclass:&str
){
let instance=instance;
let db=rbx_reflection_database::get();
let Some(superclass)=db.classes.get(superclass)else{
return;
};
objects.extend(
dom.descendants_of(instance.referent()).filter_map(|instance|{
let class=db.classes.get(instance.class.as_str())?;
db.has_superclass(class,superclass).then(||instance.referent())
})
);
}
fn planar64_affine3_from_roblox(cf:&rbx_dom_weak::types::CFrame,size:&rbx_dom_weak::types::Vector3)->Result<Planar64Affine3,Planar64TryFromFloatError>{
Ok(Planar64Affine3::new(
fn planar64_affine3_from_roblox(cf:&rbx_dom_weak::types::CFrame,size:&rbx_dom_weak::types::Vector3)->Planar64Affine3{
Planar64Affine3::new(
Planar64Mat3::from_cols([
(vec3::try_from_f32_array([cf.orientation.x.x,cf.orientation.y.x,cf.orientation.z.x])?
*integer::try_from_f32(size.x/2.0)?).narrow_1().unwrap(),//.map_err(Planar64ConvertError::Narrow)?
(vec3::try_from_f32_array([cf.orientation.x.y,cf.orientation.y.y,cf.orientation.z.y])?
*integer::try_from_f32(size.y/2.0)?).narrow_1().unwrap(),//.map_err(Planar64ConvertError::Narrow)?
(vec3::try_from_f32_array([cf.orientation.x.z,cf.orientation.y.z,cf.orientation.z.z])?
*integer::try_from_f32(size.z/2.0)?).narrow_1().unwrap(),//.map_err(Planar64ConvertError::Narrow)?
]),
vec3::try_from_f32_array([cf.position.x,cf.position.y,cf.position.z])?
))
vec3::try_from_f32_array([cf.orientation.x.x,cf.orientation.y.x,cf.orientation.z.x]).unwrap()
*integer::try_from_f32(size.x/2.0).unwrap(),
vec3::try_from_f32_array([cf.orientation.x.y,cf.orientation.y.y,cf.orientation.z.y]).unwrap()
*integer::try_from_f32(size.y/2.0).unwrap(),
vec3::try_from_f32_array([cf.orientation.x.z,cf.orientation.y.z,cf.orientation.z.z]).unwrap()
*integer::try_from_f32(size.z/2.0).unwrap(),
].map(|t|t.narrow_1().unwrap())),
vec3::try_from_f32_array([cf.position.x,cf.position.y,cf.position.z]).unwrap()
)
}
enum GetAttributesError{
ModeIdParseInt(ParseIntContext),
DuplicateMode(ModeId),
StageIdParseInt(ParseIntContext),
DuplicateStage(DuplicateStageError),
WormholeOutIdParseInt(ParseIntContext),
DuplicateWormholeOut(u32),
WormholeInIdParseInt(ParseIntContext),
JumpLimitParseInt(ParseIntContext),
}
fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:model::ModelId,modes_builder:&mut ModesBuilder,wormhole_in_model_to_id:&mut HashMap<model::ModelId,u32>,wormhole_id_to_out_model:&mut HashMap<u32,model::ModelId>)->Result<attr::CollisionAttributes,GetAttributesError>{
fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:model::ModelId,modes_builder:&mut ModesBuilder,wormhole_in_model_to_id:&mut HashMap<model::ModelId,u32>,wormhole_id_to_out_model:&mut HashMap<u32,model::ModelId>)->attr::CollisionAttributes{
let mut general=attr::GeneralAttributes::default();
let mut intersecting=attr::IntersectingAttributes::default();
let mut contacting=attr::ContactingAttributes::default();
@@ -86,14 +84,13 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
"MapStart"=>{
force_can_collide=false;
force_intersecting=true;
let mode_id=ModeId::MAIN;
modes_builder.insert_mode(
mode_id,
ModeId::MAIN,
Mode::empty(
gameplay_style::StyleModifiers::roblox_bhop(),
model_id
)
).map_err(|_|GetAttributesError::DuplicateMode(mode_id))?;
).unwrap();
},
"MapFinish"=>{
force_can_collide=false;
@@ -127,36 +124,32 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
);
},
other=>{
let regman=lazy_regex!(r"^(BonusStart|WormholeOut)(\d+)$");
let regman=lazy_regex::regex!(r"^(BonusStart|WormholeOut)(\d+)$");
if let Some(captures)=regman.captures(other){
match &captures[1]{
"BonusStart"=>{
force_can_collide=false;
force_intersecting=true;
let mode_id=ModeId::new(ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::ModeIdParseInt)?);
modes_builder.insert_mode(
mode_id,
ModeId::new(captures[2].parse::<u32>().unwrap()),
Mode::empty(
gameplay_style::StyleModifiers::roblox_bhop(),
model_id
)
).map_err(|_|GetAttributesError::DuplicateMode(mode_id))?;
).unwrap();
},
"WormholeOut"=>{
//the PhysicsModelId has to exist for it to be teleported to!
force_intersecting=true;
//this object is not special in strafe client, but the roblox mapping needs to be converted to model id
let wormhole_id=ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::WormholeOutIdParseInt)?;
if wormhole_id_to_out_model.insert(wormhole_id,model_id).is_some(){
return Err(GetAttributesError::DuplicateWormholeOut(wormhole_id));
}
assert!(wormhole_id_to_out_model.insert(captures[2].parse::<u32>().unwrap(),model_id).is_none(),"Cannot have multiple WormholeOut with same id");
},
_=>(),
}
}else if let Some(captures)=lazy_regex!(r"^(Force)?(Spawn|SpawnAt|Trigger|Teleport|Platform)(\d+)$")
}else if let Some(captures)=lazy_regex::regex!(r"^(Force)?(Spawn|SpawnAt|Trigger|Teleport|Platform)(\d+)$")
.captures(other){
force_intersecting=true;
let stage_id=StageId::new(ParseIntContext::parse(&captures[3]).map_err(GetAttributesError::StageIdParseInt)?);
let stage_id=StageId::new(captures[3].parse::<u32>().unwrap());
let stage_element=StageElement::new(
//stage_id:
stage_id,
@@ -168,12 +161,11 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
//behaviour:
match &captures[2]{
"Spawn"=>{
let mode_id=ModeId::MAIN;
modes_builder.insert_stage(
mode_id,
ModeId::MAIN,
stage_id,
Stage::empty(model_id),
).map_err(|_|GetAttributesError::DuplicateStage(DuplicateStageError{mode_id,stage_id}))?;
).unwrap();
//TODO: let denormalize handle this
StageElementBehaviour::SpawnAt
},
@@ -183,7 +175,7 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
"Trigger"=>{force_can_collide=false;StageElementBehaviour::Trigger},
"Teleport"=>{force_can_collide=false;StageElementBehaviour::Teleport},
"Platform"=>StageElementBehaviour::Platform,
_=>unreachable!("regex1[2] messed up bad"),
_=>panic!("regex1[2] messed up bad"),
},
None
);
@@ -194,7 +186,7 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
stage_element,
),
);
}else if let Some(captures)=lazy_regex!(r"^(Jump|WormholeIn)(\d+)$")
}else if let Some(captures)=lazy_regex::regex!(r"^(Jump|WormholeIn)(\d+)$")
.captures(other){
match &captures[1]{
"Jump"=>modes_builder.push_mode_update(
@@ -206,33 +198,30 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
StageId::FIRST,
false,
StageElementBehaviour::Check,
Some(ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::JumpLimitParseInt)?)
Some(captures[2].parse::<u8>().unwrap())
)
),
),
"WormholeIn"=>{
force_can_collide=false;
force_intersecting=true;
let wormhole_id=ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::WormholeInIdParseInt)?;
// It is impossible for two different objects to have the same model id
assert!(wormhole_in_model_to_id.insert(model_id,wormhole_id).is_none(),"Impossible");
assert!(wormhole_in_model_to_id.insert(model_id,captures[2].parse::<u32>().unwrap()).is_none(),"Impossible");
},
_=>unreachable!("regex2[1] messed up bad"),
_=>panic!("regex2[1] messed up bad"),
}
}else if let Some(captures)=lazy_regex!(r"^Bonus(Finish|Anticheat)(\d+)$")
}else if let Some(captures)=lazy_regex::regex!(r"^Bonus(Finish|Anticheat)(\d+)$")
.captures(other){
force_can_collide=false;
force_intersecting=true;
let mode_id=ModeId::new(ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::ModeIdParseInt)?);
modes_builder.push_mode_update(
mode_id,
ModeId::new(captures[2].parse::<u32>().unwrap()),
ModeUpdate::zone(
model_id,
//zone:
match &captures[1]{
"Finish"=>Zone::Finish,
"Anticheat"=>Zone::Anticheat,
_=>unreachable!("regex3[1] messed up bad"),
_=>panic!("regex3[1] messed up bad"),
},
),
);
@@ -254,7 +243,7 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
if allow_booster&&velocity!=vec3::ZERO{
general.booster=Some(attr::Booster::Velocity(velocity));
}
Ok(match force_can_collide{
match force_can_collide{
true=>{
match name{
"Bounce"=>contacting.contact_behaviour=Some(attr::ContactingBehaviour::Elastic(u32::MAX)),
@@ -272,7 +261,7 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
}else{
attr::CollisionAttributes::Decoration
},
})
}
}
#[derive(Clone,Copy)]
@@ -344,12 +333,12 @@ pub struct RobloxFaceTextureDescription{
pub color:glam::Vec4,
pub transform:RobloxTextureTransform,
}
impl PartialEq for RobloxFaceTextureDescription{
impl core::cmp::PartialEq for RobloxFaceTextureDescription{
fn eq(&self,other:&Self)->bool{
self.to_bits().eq(&other.to_bits())
}
}
impl Eq for RobloxFaceTextureDescription{}
impl core::cmp::Eq for RobloxFaceTextureDescription{}
impl core::hash::Hash for RobloxFaceTextureDescription{
fn hash<H:core::hash::Hasher>(&self,state:&mut H){
self.to_bits().hash(state);
@@ -418,25 +407,21 @@ fn get_content_url(content:&rbx_dom_weak::types::Content)->Option<&str>{
}
fn get_texture_description<'a>(
temp_objects:&mut Vec<rbx_dom_weak::types::Ref>,
render_config_deferred_loader:&mut RenderConfigDeferredLoader<&'a str>,
recoverable_errors:&mut RecoverableErrors,
db:&rbx_reflection::ReflectionDatabase,
dom:&'a rbx_dom_weak::WeakDom,
object:&rbx_dom_weak::Instance,
size:&rbx_dom_weak::types::Vector3,
)->RobloxPartDescription{
//use the biggest one and cut it down later...
let mut part_texture_description=RobloxPartDescription::default();
let decal=&db.classes["Decal"];
let decals=object.children().iter().filter_map(|&referent|{
let instance=dom.get_by_ref(referent)?;
db.classes.get(instance.class.as_str()).is_some_and(|class|
db.has_superclass(class,decal)
).then_some(instance)
});
for decal in decals{
// decals should always have these properties,
// but it is not guaranteed by the rbx_dom_weak data structure.
temp_objects.clear();
recursive_collect_superclass(temp_objects,&dom,object,"Decal");
for &mut decal_ref in temp_objects{
let Some(decal)=dom.get_by_ref(decal_ref) else{
println!("Decal get_by_ref failed");
continue;
};
let (
Some(rbx_dom_weak::types::Variant::Content(content)),
Some(rbx_dom_weak::types::Variant::Enum(normalid)),
@@ -448,16 +433,16 @@ fn get_texture_description<'a>(
decal.properties.get(&static_ustr("Color3")),
decal.properties.get(&static_ustr("Transparency")),
)else{
recoverable_errors.decal_property.push(InstancePath::new(dom,decal));
println!("Decal is missing a required property");
continue;
};
let texture_id=get_content_url(content);
let texture_id=match content.value(){
rbx_dom_weak::types::ContentType::Uri(uri)=>Some(uri.as_str()),
_=>None,
};
let render_id=render_config_deferred_loader.acquire_render_config_id(texture_id);
let Ok(cube_face)=normalid.to_u32().try_into()else{
recoverable_errors.normal_id.push(NormalIdError{
path:InstancePath::new(dom,decal),
normal_id:normalid.to_u32(),
});
println!("NormalId is invalid");
continue;
};
let (roblox_texture_color,roblox_texture_transform)=if decal.class=="Texture"{
@@ -494,7 +479,6 @@ fn get_texture_description<'a>(
}
)
}else{
recoverable_errors.texture_property.push(InstancePath::new(dom,decal));
(glam::Vec4::ONE,RobloxTextureTransform::identity())
}
}else{
@@ -548,8 +532,6 @@ pub fn convert<'a>(
render_config_deferred_loader:&mut RenderConfigDeferredLoader<&'a str>,
mesh_deferred_loader:&mut MeshDeferredLoader<MeshIndex<'a>>,
)->PartialMap1<'a>{
let mut recoverable_errors=RecoverableErrors::default();
let mut deferred_models_deferred_attributes=Vec::new();
let mut deferred_unions_deferred_attributes=Vec::new();
let mut primitive_models_deferred_attributes=Vec::new();
@@ -559,83 +541,63 @@ pub fn convert<'a>(
//just going to leave it like this for now instead of reworking the data structures for this whole thing
let textureless_render_group=render_config_deferred_loader.acquire_render_config_id(None);
let db=rbx_reflection_database::get();
let basepart=&db.classes["BasePart"];
let baseparts=dom.descendants().filter(|&instance|
db.classes.get(instance.class.as_str()).is_some_and(|class|
db.has_superclass(class,basepart)
)
);
for object in baseparts{
let (
Some(rbx_dom_weak::types::Variant::CFrame(cf)),
Some(rbx_dom_weak::types::Variant::Vector3(size)),
Some(rbx_dom_weak::types::Variant::Vector3(velocity)),
Some(rbx_dom_weak::types::Variant::Float32(transparency)),
Some(rbx_dom_weak::types::Variant::Color3uint8(color3)),
Some(&rbx_dom_weak::types::Variant::Bool(can_collide)),
) = (
object.properties.get(&static_ustr("CFrame")),
object.properties.get(&static_ustr("Size")),
object.properties.get(&static_ustr("Velocity")),
object.properties.get(&static_ustr("Transparency")),
object.properties.get(&static_ustr("Color")),
object.properties.get(&static_ustr("CanCollide")),
)else{
recoverable_errors.basepart_property.push(InstancePath::new(dom,object));
continue;
};
let model_transform=match planar64_affine3_from_roblox(cf,size){
Ok(model_transform)=>{
let mut object_refs=Vec::new();
let mut temp_objects=Vec::new();
recursive_collect_superclass(&mut object_refs, &dom, dom.root(),"BasePart");
for object_ref in object_refs {
if let Some(object)=dom.get_by_ref(object_ref){
if let (
Some(rbx_dom_weak::types::Variant::CFrame(cf)),
Some(rbx_dom_weak::types::Variant::Vector3(size)),
Some(rbx_dom_weak::types::Variant::Vector3(velocity)),
Some(rbx_dom_weak::types::Variant::Float32(transparency)),
Some(rbx_dom_weak::types::Variant::Color3uint8(color3)),
Some(rbx_dom_weak::types::Variant::Bool(can_collide)),
) = (
object.properties.get(&static_ustr("CFrame")),
object.properties.get(&static_ustr("Size")),
object.properties.get(&static_ustr("Velocity")),
object.properties.get(&static_ustr("Transparency")),
object.properties.get(&static_ustr("Color")),
object.properties.get(&static_ustr("CanCollide")),
)
{
let model_transform=planar64_affine3_from_roblox(cf,size);
if model_transform.matrix3.det().is_zero(){
recoverable_errors.basepart_cframe.push(CFrameError{
path:InstancePath::new(dom,object),
error:CFrameErrorType::ZeroDeterminant,
});
let mut parent_ref=object.parent();
let mut full_path=object.name.clone();
while let Some(parent)=dom.get_by_ref(parent_ref){
full_path=format!("{}.{}",parent.name,full_path);
parent_ref=parent.parent();
}
println!("Zero determinant CFrame at location {}",full_path);
println!("matrix3:{}",model_transform.matrix3);
continue;
}
model_transform
},
Err(e)=>{
recoverable_errors.basepart_cframe.push(CFrameError{
path:InstancePath::new(dom,object),
error:CFrameErrorType::Convert(e),
});
continue;
}
};
//TODO: also detect "CylinderMesh" etc here
let shape=match object.class.as_str(){
"Part"|"Seat"|"SpawnLocation"=>{
let Some(rbx_dom_weak::types::Variant::Enum(shape))=object.properties.get(&static_ustr("Shape"))else{
recoverable_errors.part_property.push(InstancePath::new(dom,object));
continue;
//TODO: also detect "CylinderMesh" etc here
let shape=match object.class.as_str(){
"Part"=>if let Some(rbx_dom_weak::types::Variant::Enum(shape))=object.properties.get(&static_ustr("Shape")){
Shape::Primitive(shape.to_u32().try_into().expect("Funky roblox PartType"))
}else{
panic!("Part has no Shape!");
},
"TrussPart"=>Shape::Primitive(Primitives::Cube),
"WedgePart"=>Shape::Primitive(Primitives::Wedge),
"CornerWedgePart"=>Shape::Primitive(Primitives::CornerWedge),
"MeshPart"=>Shape::MeshPart,
"UnionOperation"=>Shape::PhysicsData,
_=>{
println!("Unsupported BasePart ClassName={}; defaulting to cube",object.class);
Shape::Primitive(Primitives::Cube)
}
};
let Ok(shape)=shape.to_u32().try_into()else{
recoverable_errors.part_shape.push(ShapeError{
path:InstancePath::new(dom,object),
shape:shape.to_u32(),
});
continue;
};
Shape::Primitive(shape)
},
"TrussPart"|"VehicleSeat"=>Shape::Primitive(Primitives::Cube),
"WedgePart"=>Shape::Primitive(Primitives::Wedge),
"CornerWedgePart"=>Shape::Primitive(Primitives::CornerWedge),
"MeshPart"=>Shape::MeshPart,
"UnionOperation"=>Shape::PhysicsData,
"Terrain"=>continue,
_=>{
recoverable_errors.unsupported_class.insert(object.class.as_str().to_owned());
Shape::Primitive(Primitives::Cube)
}
};
let (availability,mesh_id)=match shape{
Shape::Primitive(primitive_shape)=>{
let part_texture_description=get_texture_description(render_config_deferred_loader,&mut recoverable_errors,db,dom,object,size);
let (availability,mesh_id)=match shape{
Shape::Primitive(primitive_shape)=>{
//TODO: TAB TAB
let part_texture_description=get_texture_description(&mut temp_objects,render_config_deferred_loader,dom,object,size);
//obscure rust syntax "slice pattern"
let RobloxPartDescription([
f0,//Cube::Right
@@ -684,83 +646,66 @@ pub fn convert<'a>(
mesh_id
};
(MeshAvailability::Immediate,mesh_id)
},
Shape::MeshPart=>{
let (
Some(rbx_dom_weak::types::Variant::Content(mesh_content)),
Some(rbx_dom_weak::types::Variant::Content(texture_content)),
)=(
// mesh must exist
object.properties.get(&static_ustr("MeshContent")),
// texture is allowed to be none
object.properties.get(&static_ustr("TextureContent")),
)else{
recoverable_errors.meshpart_property.push(InstancePath::new(dom,object));
continue;
},
Shape::MeshPart=>if let (
Some(rbx_dom_weak::types::Variant::Content(mesh_content)),
Some(rbx_dom_weak::types::Variant::Content(texture_content)),
)=(
// mesh must exist
object.properties.get(&static_ustr("MeshContent")),
// texture is allowed to be none
object.properties.get(&static_ustr("TextureContent")),
){
let mesh_asset_id=get_content_url(mesh_content).unwrap_or_default();
let texture_asset_id=get_content_url(texture_content);
(
MeshAvailability::DeferredMesh(render_config_deferred_loader.acquire_render_config_id(texture_asset_id)),
mesh_deferred_loader.acquire_mesh_id(MeshIndex::file_mesh(mesh_asset_id)),
)
}else{
panic!("Mesh has no Mesh or Texture");
},
Shape::PhysicsData=>{
let mut content="";
let mut mesh_data:&[u8]=&[];
let mut physics_data:&[u8]=&[];
if let Some(rbx_dom_weak::types::Variant::ContentId(asset_id))=object.properties.get(&static_ustr("AssetId")){
content=asset_id.as_ref();
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(&static_ustr("MeshData")){
mesh_data=data.as_ref();
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(&static_ustr("PhysicsData")){
physics_data=data.as_ref();
}
let part_texture_description=get_texture_description(&mut temp_objects,render_config_deferred_loader,dom,object,size);
let mesh_index=MeshIndex::union(content,mesh_data,physics_data,size,part_texture_description.clone());
let mesh_id=mesh_deferred_loader.acquire_mesh_id(mesh_index);
(MeshAvailability::DeferredUnion(part_texture_description),mesh_id)
},
};
let mesh_asset_id=match get_content_url(mesh_content){
Some(mesh_asset_id)=>mesh_asset_id,
None=>{
recoverable_errors.meshpart_content.push(InstancePath::new(dom,object));
// Return an empty string which will fail to parse as an asset id
""
}
let model_deferred_attributes=ModelDeferredAttributes{
mesh:mesh_id,
transform:model_transform,
color:glam::vec4(color3.r as f32/255f32, color3.g as f32/255f32, color3.b as f32/255f32, 1.0-*transparency),
deferred_attributes:GetAttributesArgs{
name:object.name.as_str(),
can_collide:*can_collide,
velocity:vec3::try_from_f32_array([velocity.x,velocity.y,velocity.z]).unwrap(),
},
};
let texture_asset_id=get_content_url(texture_content);
(
MeshAvailability::DeferredMesh(render_config_deferred_loader.acquire_render_config_id(texture_asset_id)),
mesh_deferred_loader.acquire_mesh_id(MeshIndex::file_mesh(mesh_asset_id)),
)
},
Shape::PhysicsData=>{
let mut content="";
let mut mesh_data:&[u8]=&[];
let mut physics_data:&[u8]=&[];
if let Some(rbx_dom_weak::types::Variant::ContentId(asset_id))=object.properties.get(&static_ustr("AssetId")){
content=asset_id.as_ref();
match availability{
MeshAvailability::Immediate=>primitive_models_deferred_attributes.push(model_deferred_attributes),
MeshAvailability::DeferredMesh(render)=>deferred_models_deferred_attributes.push(DeferredModelDeferredAttributes{
render,
model:model_deferred_attributes
}),
MeshAvailability::DeferredUnion(part_texture_description)=>deferred_unions_deferred_attributes.push(DeferredUnionDeferredAttributes{
render:part_texture_description,
model:model_deferred_attributes,
}),
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(&static_ustr("MeshData")){
mesh_data=data.as_ref();
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(&static_ustr("PhysicsData")){
physics_data=data.as_ref();
}
let part_texture_description=get_texture_description(render_config_deferred_loader,&mut recoverable_errors,db,dom,object,size);
let mesh_index=MeshIndex::union(content,mesh_data,physics_data,size,part_texture_description.clone());
let mesh_id=mesh_deferred_loader.acquire_mesh_id(mesh_index);
(MeshAvailability::DeferredUnion(part_texture_description),mesh_id)
},
};
let velocity=match vec3::try_from_f32_array([velocity.x,velocity.y,velocity.z]){
Ok(velocity)=>velocity,
Err(e)=>{
recoverable_errors.basepart_velocity.push(Planar64ConvertError{
path:InstancePath::new(dom,object),
error:e,
});
continue;
}
};
let model_deferred_attributes=ModelDeferredAttributes{
mesh:mesh_id,
transform:model_transform,
color:glam::vec4(color3.r as f32/255f32,color3.g as f32/255f32,color3.b as f32/255f32,1.0-*transparency),
deferred_attributes:GetAttributesArgs{
name:object.name.as_str(),
can_collide,
velocity,
},
};
match availability{
MeshAvailability::Immediate=>primitive_models_deferred_attributes.push(model_deferred_attributes),
MeshAvailability::DeferredMesh(render)=>deferred_models_deferred_attributes.push(DeferredModelDeferredAttributes{
render,
model:model_deferred_attributes
}),
MeshAvailability::DeferredUnion(part_texture_description)=>deferred_unions_deferred_attributes.push(DeferredUnionDeferredAttributes{
render:part_texture_description,
model:model_deferred_attributes,
}),
}
}
PartialMap1{
@@ -768,17 +713,16 @@ pub fn convert<'a>(
primitive_models_deferred_attributes,
deferred_models_deferred_attributes,
deferred_unions_deferred_attributes,
recoverable_errors,
}
}
struct MeshIdWithSize{
mesh:model::MeshId,
size:Planar64Vec3,
}
fn acquire_mesh_id_from_render_config_id(
fn acquire_mesh_id_from_render_config_id<'a>(
primitive_meshes:&mut Vec<model::Mesh>,
mesh_id_from_render_config_id:&mut HashMap<model::MeshId,HashMap<RenderConfigId,model::MeshId>>,
loaded_meshes:&HashMap<model::MeshId,MeshWithSize>,
loaded_meshes:&'a HashMap<model::MeshId,MeshWithSize>,
old_mesh_id:model::MeshId,
render:RenderConfigId,
)->Option<MeshIdWithSize>{
@@ -798,10 +742,10 @@ fn acquire_mesh_id_from_render_config_id(
size,
})
}
fn acquire_union_id_from_render_config_id(
fn acquire_union_id_from_render_config_id<'a>(
primitive_meshes:&mut Vec<model::Mesh>,
union_id_from_render_config_id:&mut HashMap<model::MeshId,HashMap<RobloxPartDescription,model::MeshId>>,
loaded_meshes:&HashMap<model::MeshId,MeshWithSize>,
loaded_meshes:&'a HashMap<model::MeshId,MeshWithSize>,
old_union_id:model::MeshId,
part_texture_description:RobloxPartDescription,
)->Option<MeshIdWithSize>{
@@ -828,7 +772,6 @@ pub struct PartialMap1<'a>{
primitive_models_deferred_attributes:Vec<ModelDeferredAttributes<'a>>,
deferred_models_deferred_attributes:Vec<DeferredModelDeferredAttributes<'a>>,
deferred_unions_deferred_attributes:Vec<DeferredUnionDeferredAttributes<'a>>,
recoverable_errors:RecoverableErrors,
}
impl PartialMap1<'_>{
pub fn add_meshpart_meshes_and_calculate_attributes(
@@ -852,7 +795,6 @@ impl PartialMap1<'_>{
// I just want to chain iterators together man
let aint_no_way=core::cell::UnsafeCell::new(&mut self.primitive_meshes);
let mut model_counter=0;
let mut mesh_id_from_render_config_id=HashMap::new();
let mut union_id_from_render_config_id=HashMap::new();
//now that the meshes are loaded, these models can be generated
@@ -905,78 +847,61 @@ impl PartialMap1<'_>{
})
}))
.chain(self.primitive_models_deferred_attributes.into_iter())
.filter_map(|model_deferred_attributes|{
let model_id=model::ModelId::new(model_counter);
let attributes=match get_attributes(
&model_deferred_attributes.deferred_attributes.name,
model_deferred_attributes.deferred_attributes.can_collide,
model_deferred_attributes.deferred_attributes.velocity,
model_id,
&mut modes_builder,
&mut wormhole_in_model_to_id,
&mut wormhole_id_to_out_model,
){
Ok(attributes)=>attributes,
Err(e)=>{
match e{
GetAttributesError::ModeIdParseInt(e)=>self.recoverable_errors.mode_id_parse_int.push(e),
GetAttributesError::DuplicateMode(mode_id)=>{self.recoverable_errors.duplicate_mode.insert(mode_id);},
GetAttributesError::StageIdParseInt(e)=>self.recoverable_errors.stage_id_parse_int.push(e),
GetAttributesError::DuplicateStage(duplicate_stage)=>{self.recoverable_errors.duplicate_stage.insert(duplicate_stage);},
GetAttributesError::WormholeOutIdParseInt(e)=>self.recoverable_errors.wormhole_out_id_parse_int.push(e),
GetAttributesError::DuplicateWormholeOut(wormhole_id)=>{self.recoverable_errors.duplicate_wormhole_out.insert(wormhole_id);},
GetAttributesError::WormholeInIdParseInt(e)=>self.recoverable_errors.wormhole_in_id_parse_int.push(e),
GetAttributesError::JumpLimitParseInt(e)=>self.recoverable_errors.jump_limit_parse_int.push(e),
}
return None;
}
};
model_counter+=1;
Some(ModelOwnedAttributes{
.enumerate().map(|(model_id,model_deferred_attributes)|{
let model_id=model::ModelId::new(model_id as u32);
ModelOwnedAttributes{
mesh:model_deferred_attributes.mesh,
attributes,
attributes:get_attributes(
&model_deferred_attributes.deferred_attributes.name,
model_deferred_attributes.deferred_attributes.can_collide,
model_deferred_attributes.deferred_attributes.velocity,
model_id,
&mut modes_builder,
&mut wormhole_in_model_to_id,
&mut wormhole_id_to_out_model,
),
color:model_deferred_attributes.color,
transform:model_deferred_attributes.transform,
})
}
}).collect();
let models=models_owned_attributes.into_iter().enumerate().map(|(model_id,mut model_owned_attributes)|{
let model_id=model::ModelId::new(model_id as u32);
//update attributes with wormhole id
//TODO: errors/prints
if let Some(wormhole_id)=wormhole_in_model_to_id.get(&model_id){
if let Some(&wormhole_out_model_id)=wormhole_id_to_out_model.get(wormhole_id){
match &mut model_owned_attributes.attributes{
attr::CollisionAttributes::Contact(attr::ContactAttributes{contacting:_,general})
|attr::CollisionAttributes::Intersect(attr::IntersectAttributes{intersecting:_,general})
=>general.wormhole=Some(attr::Wormhole{destination_model:wormhole_out_model_id}),
attr::CollisionAttributes::Decoration=>println!("Not a wormhole"),
}
//TODO: TAB
let model_id=model::ModelId::new(model_id as u32);
//update attributes with wormhole id
//TODO: errors/prints
if let Some(wormhole_id)=wormhole_in_model_to_id.get(&model_id){
if let Some(&wormhole_out_model_id)=wormhole_id_to_out_model.get(wormhole_id){
match &mut model_owned_attributes.attributes{
attr::CollisionAttributes::Contact(attr::ContactAttributes{contacting:_,general})
|attr::CollisionAttributes::Intersect(attr::IntersectAttributes{intersecting:_,general})
=>general.wormhole=Some(attr::Wormhole{destination_model:wormhole_out_model_id}),
attr::CollisionAttributes::Decoration=>println!("Not a wormhole"),
}
}
//index the attributes
let attributes_id=if let Some(&attributes_id)=attributes_id_from_attributes.get(&model_owned_attributes.attributes){
attributes_id
}else{
let attributes_id=attr::CollisionAttributesId::new(unique_attributes.len() as u32);
attributes_id_from_attributes.insert(model_owned_attributes.attributes.clone(),attributes_id);
unique_attributes.push(model_owned_attributes.attributes);
attributes_id
};
model::Model{
mesh:model_owned_attributes.mesh,
transform:model_owned_attributes.transform,
color:model_owned_attributes.color,
attributes:attributes_id,
}
}).collect();
PartialMap2{
meshes:self.primitive_meshes,
models,
modes:modes_builder.build_normalized(),
attributes:unique_attributes,
recoverable_errors:self.recoverable_errors,
}
//index the attributes
let attributes_id=if let Some(&attributes_id)=attributes_id_from_attributes.get(&model_owned_attributes.attributes){
attributes_id
}else{
let attributes_id=attr::CollisionAttributesId::new(unique_attributes.len() as u32);
attributes_id_from_attributes.insert(model_owned_attributes.attributes.clone(),attributes_id);
unique_attributes.push(model_owned_attributes.attributes);
attributes_id
};
model::Model{
mesh:model_owned_attributes.mesh,
transform:model_owned_attributes.transform,
color:model_owned_attributes.color,
attributes:attributes_id,
}
}).collect();
PartialMap2{
meshes:self.primitive_meshes,
models,
modes:modes_builder.build_normalized(),
attributes:unique_attributes,
}
}
}
@@ -985,13 +910,12 @@ pub struct PartialMap2{
models:Vec<model::Model>,
modes:NormalizedModes,
attributes:Vec<strafesnet_common::gameplay_attributes::CollisionAttributes>,
recoverable_errors:RecoverableErrors,
}
impl PartialMap2{
pub fn add_render_configs_and_textures(
self,
render_configs:RenderConfigs,
)->(map::CompleteMap,RecoverableErrors){
)->map::CompleteMap{
let (textures,render_configs)=render_configs.consume();
let (textures,texture_id_map):(Vec<Vec<u8>>,HashMap<model::TextureId,model::TextureId>)
=textures.into_iter().enumerate().map(|(new_texture_id,(old_texture_id,Texture::ImageDDS(texture)))|{
@@ -1010,17 +934,14 @@ impl PartialMap2{
);
render_config
}).collect();
(
map::CompleteMap{
modes:self.modes,
attributes:self.attributes,
meshes:self.meshes,
models:self.models,
//the roblox legacy texture thing always works
textures,
render_configs,
},
self.recoverable_errors,
)
map::CompleteMap{
modes:self.modes,
attributes:self.attributes,
meshes:self.meshes,
models:self.models,
//the roblox legacy texture thing always works
textures,
render_configs,
}
}
}

View File

@@ -1,12 +1,13 @@
use crate::loader::MeshWithSize;
use crate::rbx::RobloxPartDescription;
use crate::primitives::{CUBE_DEFAULT_VERTICES,CUBE_DEFAULT_POLYS,FaceDescription};
use crate::primitives::{CUBE_DEFAULT_VERTICES,CUBE_DEFAULT_POLYS};
use rbx_mesh::mesh_data::{VertexId as MeshDataVertexId,NormalId as MeshDataNormalId,NormalId2 as MeshDataNormalId2,NormalId5 as MeshDataNormalId5};
use rbx_mesh::mesh_data::{VertexId as MeshDataVertexId,NormalId2 as MeshDataNormalId2};
use rbx_mesh::physics_data::VertexId as PhysicsDataVertexId;
use strafesnet_common::model::{self,IndexedVertex,MeshBuilder,PolygonGroup,PolygonGroupId,PolygonList,RenderConfigId,VertexId};
use strafesnet_common::model::{self,IndexedVertex,PolygonGroup,PolygonGroupId,PolygonList,RenderConfigId};
use strafesnet_common::integer::vec3;
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Block,
@@ -24,7 +25,7 @@ impl std::fmt::Display for Error{
// wacky state machine to make sure all vertices in a face agree upon what NormalId to use.
// Roblox duplicates this information per vertex when it should only exist per-face.
enum MeshDataNormalStatus{
Agree(MeshDataNormalId),
Agree(MeshDataNormalId2),
Conflicting,
}
struct MeshDataNormalChecker{
@@ -34,7 +35,7 @@ impl MeshDataNormalChecker{
fn new()->Self{
Self{status:None}
}
fn check(&mut self,normal:MeshDataNormalId){
fn check(&mut self,normal:MeshDataNormalId2){
self.status=match self.status.take(){
None=>Some(MeshDataNormalStatus::Agree(normal)),
Some(MeshDataNormalStatus::Agree(old_normal))=>{
@@ -47,7 +48,7 @@ impl MeshDataNormalChecker{
Some(MeshDataNormalStatus::Conflicting)=>Some(MeshDataNormalStatus::Conflicting),
};
}
fn into_agreed_normal(self)->Option<MeshDataNormalId>{
fn into_agreed_normal(self)->Option<MeshDataNormalId2>{
self.status.and_then(|status|match status{
MeshDataNormalStatus::Agree(normal)=>Some(normal),
MeshDataNormalStatus::Conflicting=>None,
@@ -55,116 +56,6 @@ impl MeshDataNormalChecker{
}
}
fn build_mesh2(
mb:&mut MeshBuilder,
polygon_groups_normal_id:&mut [Vec<Vec<VertexId>>;NORMAL_FACES],
cube_face_description:&[Option<FaceDescription>;NORMAL_FACES],
mesh:rbx_mesh::mesh_data::Mesh2,
)->Result<(),Error>{
//autoscale to size, idk what roblox is doing with the graphics mesh size
let mut pos_min=glam::Vec3::MAX;
let mut pos_max=glam::Vec3::MIN;
for vertex in &mesh.vertices{
let p=vertex.pos.into();
pos_min=pos_min.min(p);
pos_max=pos_max.max(p);
}
let graphics_size=pos_max-pos_min;
for [MeshDataVertexId(vertex_id0),MeshDataVertexId(vertex_id1),MeshDataVertexId(vertex_id2)] in mesh.faces{
let face=[
mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
];
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|vertex|{
let MeshDataNormalId2(normal_id)=vertex.normal_id;
normal_agreement_checker.check(normal_id);
let pos=glam::Vec3::from_array(vertex.pos)/graphics_size;
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(pos.to_array())?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?);
let tex_coord=glam::Vec2::from_array(vertex.tex);
let maybe_face_description=&cube_face_description[normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>().map_err(Error::Planar64Vec3)?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
Ok(())
}
fn build_mesh5(
mb:&mut MeshBuilder,
polygon_groups_normal_id:&mut [Vec<Vec<VertexId>>;NORMAL_FACES],
cube_face_description:&[Option<FaceDescription>;NORMAL_FACES],
mesh:rbx_mesh::mesh_data::CSGMDL5,
)->Result<(),Error>{
//autoscale to size, idk what roblox is doing with the graphics mesh size
let mut pos_min=glam::Vec3::MAX;
let mut pos_max=glam::Vec3::MIN;
for &pos in &mesh.positions{
let p=pos.into();
pos_min=pos_min.min(p);
pos_max=pos_max.max(p);
}
let graphics_size=pos_max-pos_min;
for face in mesh.faces.indices.chunks_exact(3){
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|&vertex_id|{
let vertex_index=vertex_id as usize;
let &pos=mesh.positions.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &MeshDataNormalId5(normal_id)=mesh.normal_ids.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &norm=mesh.normals.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &tex=mesh.tex.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &color=mesh.colors.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
normal_agreement_checker.check(normal_id);
let pos=glam::Vec3::from_array(pos)/graphics_size;
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(pos.to_array()).map_err(Error::Planar64Vec3)?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(norm).map_err(Error::Planar64Vec3)?);
let tex_coord=glam::Vec2::from_array(tex);
let maybe_face_description=&cube_face_description[normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>()?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
Ok(())
}
const NORMAL_FACES:usize=6;
impl std::error::Error for Error{}
pub fn convert(
roblox_physics_data:&[u8],
@@ -172,10 +63,11 @@ pub fn convert(
size:glam::Vec3,
RobloxPartDescription(part_texture_description):RobloxPartDescription,
)->Result<MeshWithSize,Error>{
const NORMAL_FACES:usize=6;
let mut polygon_groups_normal_id:[_;NORMAL_FACES]=[vec![],vec![],vec![],vec![],vec![],vec![]];
// build graphics and physics meshes
let mut mb=MeshBuilder::new();
let mut mb=strafesnet_common::model::MeshBuilder::new();
// graphics
let graphics_groups=if !roblox_mesh_data.is_empty(){
// create per-face texture coordinate affine transforms
@@ -187,12 +79,56 @@ pub fn convert(
let mesh_data=rbx_mesh::read_mesh_data_versioned(
std::io::Cursor::new(roblox_mesh_data)
).map_err(Error::RobloxMeshData)?;
match mesh_data{
let graphics_mesh=match mesh_data{
rbx_mesh::mesh_data::MeshData::CSGK(_)=>return Err(Error::Block),
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V2(mesh_data2))=>build_mesh2(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data2.mesh)?,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V4(mesh_data4))=>build_mesh2(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data4.mesh)?,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V5(mesh_data4))=>build_mesh5(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data4)?,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL2(mesh_data2))=>mesh_data2.mesh,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL4(mesh_data4))=>mesh_data4.mesh,
};
//autoscale to size, idk what roblox is doing with the graphics mesh size
let mut pos_min=glam::Vec3::MAX;
let mut pos_max=glam::Vec3::MIN;
for vertex in &graphics_mesh.vertices{
let p=vertex.pos.into();
pos_min=pos_min.min(p);
pos_max=pos_max.max(p);
}
let graphics_size=pos_max-pos_min;
for [MeshDataVertexId(vertex_id0),MeshDataVertexId(vertex_id1),MeshDataVertexId(vertex_id2)] in graphics_mesh.faces{
let face=[
graphics_mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
graphics_mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
graphics_mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
];
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|vertex|{
normal_agreement_checker.check(vertex.normal_id);
let pos=glam::Vec3::from_array(vertex.pos)/graphics_size;
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(pos.to_array())?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?);
let tex_coord=glam::Vec2::from_array(vertex.tex);
let maybe_face_description=&cube_face_description[vertex.normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>().map_err(Error::Planar64Vec3)?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
(0..NORMAL_FACES).map(|polygon_group_id|{
model::IndexedGraphicsGroup{
render:cube_face_description[polygon_group_id].as_ref().map_or(RenderConfigId::new(0),|face_description|face_description.render),
@@ -217,13 +153,10 @@ pub fn convert(
// have not seen this format in practice
|rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Block)
=>return Err(Error::Block),
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V3(meshes))
|rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V5(meshes))
=>meshes.meshes,
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V6(meshes))
=>vec![meshes.mesh],
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V7(meshes))
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Meshes(meshes))
=>meshes.meshes,
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::PhysicsInfoMesh(pim))
=>vec![pim.mesh],
};
let physics_convex_meshes_it=physics_convex_meshes.into_iter().map(|mesh|{
// this can be factored out of the loop but I am lazy

View File

@@ -9,6 +9,3 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
url = "2.5.4"
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "roblox_emulator"
version = "0.5.1"
version = "0.5.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -13,12 +13,9 @@ run-service=[]
[dependencies]
glam = "0.30.0"
mlua = { version = "0.11.3", features = ["luau"] }
phf = { version = "0.13.1", features = ["macros"] }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
mlua = { version = "0.10.1", features = ["luau"] }
phf = { version = "0.11.2", features = ["macros"] }
rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet", features = ["instance-userdata"] }
rbx_reflection = "5.0.0"
rbx_reflection_database = "1.0.0"
rbx_types = "2.0.0"
[lints]
workspace = true

View File

@@ -79,15 +79,7 @@ impl Context{
//insert services
let game=dom.root_ref();
let terrain_bldr=InstanceBuilder::new("Terrain")
.with_properties([
("CFrame",rbx_dom_weak::types::Variant::CFrame(rbx_dom_weak::types::CFrame::new(rbx_dom_weak::types::Vector3::new(0.0,0.0,0.0),rbx_dom_weak::types::Matrix3::identity()))),
("Size",rbx_dom_weak::types::Variant::Vector3(rbx_dom_weak::types::Vector3::new(1.0,1.0,1.0))),
("Velocity",rbx_dom_weak::types::Variant::Vector3(rbx_dom_weak::types::Vector3::new(0.0,0.0,0.0))),
("Transparency",rbx_dom_weak::types::Variant::Float32(0.0)),
("Color",rbx_dom_weak::types::Variant::Color3uint8(rbx_dom_weak::types::Color3uint8::new(255,255,255))),
("CanCollide",rbx_dom_weak::types::Variant::Bool(true)),
]);
let terrain_bldr=InstanceBuilder::new("Terrain");
let workspace=dom.insert(game,
InstanceBuilder::new("Workspace")
//Set Workspace.Terrain property equal to Terrain

View File

@@ -8,7 +8,7 @@ impl<'a> EnumItem<'a>{
Self{name:Some(name.as_ref()),value}
}
}
impl From<rbx_types::Enum> for EnumItem<'_>{
impl<'a> From<rbx_types::Enum> for EnumItem<'a>{
fn from(e:rbx_types::Enum)->Self{
EnumItem{
name:None,
@@ -65,7 +65,7 @@ impl<'a> EnumItems<'a>{
}
pub enum CoerceEnum<'a>{
Integer(i64),
Integer(i32),
String(mlua::String),
Enum(EnumItem<'a>),
}

View File

@@ -5,12 +5,12 @@ use rbx_types::Ref;
use rbx_dom_weak::{Ustr,InstanceBuilder,WeakDom};
use crate::util::static_ustr;
use crate::runner::vector3::Vector3;
use crate::runner::number::Number;
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
//class functions store
lua.set_app_data(ClassMethodsStore::default());
lua.set_app_data(InstanceValueStore::default());
let table=lua.create_table()?;
@@ -43,7 +43,7 @@ pub fn class_is_a(class:&str,superclass:&str)->bool{
};
db.has_superclass(class,superclass)
}
fn get_full_name(dom:&WeakDom,instance:&rbx_dom_weak::Instance)->String{
fn get_full_name(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)->String{
let mut full_name=instance.name.clone();
let mut pref=instance.parent();
while let Some(parent)=dom.get_by_ref(pref){
@@ -65,28 +65,28 @@ pub fn get_name_source(lua:&mlua::Lua,script:Instance)->Result<(String,String),m
})
}
pub fn find_first_child<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_child<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|inst.name==name)
}
pub fn find_first_descendant<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_descendant<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
dom.descendants_of(instance.referent()).find(|&inst|inst.name==name)
}
pub fn find_first_child_of_class<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_child_of_class<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|inst.class==class)
}
pub fn find_first_descendant_of_class<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_descendant_of_class<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
dom.descendants_of(instance.referent()).find(|&inst|inst.class==class)
}
pub fn find_first_child_which_is_a<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_child_which_is_a<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
let db=rbx_reflection_database::get();
let superclass_descriptor=db.classes.get(superclass)?;
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|{
db.classes.get(inst.class.as_str()).is_some_and(|descriptor|db.has_superclass(descriptor,superclass_descriptor))
})
}
pub fn find_first_descendant_which_is_a<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_descendant_which_is_a<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
let db=rbx_reflection_database::get();
let superclass_descriptor=db.classes.get(superclass)?;
dom.descendants_of(instance.referent()).find(|inst|{
@@ -325,16 +325,13 @@ impl mlua::UserData for Instance{
}
//find or create an associated userdata object
if let Some(value)=instance_value_store_mut(lua,|ivs|{
//TODO: walk class tree somehow
match ivs.get_or_create_instance_values(&instance){
Some(mut instance_values)=>instance_values.get_or_create_value(lua,index_str),
None=>Ok(None)
}
})?{
let instance=this.get_mut(dom)?;
if let Some(value)=get_or_create_userdata(instance,lua,index_str)?{
return value.into_lua(lua);
}
// drop mutable borrow
//find a child with a matching name
let instance=this.get(dom)?;
find_first_child(dom,instance,index_str)
.map(|instance|Instance::new_unchecked(instance.referent()))
.into_lua(lua)
@@ -422,7 +419,7 @@ impl mlua::UserData for Instance{
rbx_types::Variant::CFrame(typed_value.clone().into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::ContentId)=>{
let typed_value=value.as_string().ok_or_else(||mlua::Error::runtime("Expected string"))?.to_str()?.to_owned();
let typed_value=value.as_str().ok_or_else(||mlua::Error::runtime("Expected string"))?.to_owned();
rbx_types::Variant::ContentId(typed_value.into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::Ref)=>{
@@ -487,8 +484,8 @@ static CLASS_FUNCTION_DATABASE:CFD=phf::phf_map!{
"GetService"=>GET_SERVICE,
},
"Terrain"=>phf::phf_map!{
"FillBall"=>cf!(|_lua,_,_:(crate::runner::vector3::Vector3,Number,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillBlock"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,crate::runner::vector3::Vector3,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillBall"=>cf!(|_lua,_,_:(Vector3,Number,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillBlock"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,Vector3,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillCylinder"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,Number,Number,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"SetMaterialColor"=>cf!(|_lua,_,_:(crate::runner::r#enum::CoerceEnum,crate::runner::color3::Color3)|mlua::Result::Ok(())),
},
@@ -522,7 +519,7 @@ struct ClassMethodsStore{
}
impl ClassMethodsStore{
/// return self.classes[class] or create the ClassMethods and then return it
fn get_or_create_class_methods(&mut self,class:&str)->Option<ClassMethods<'_>>{
fn get_or_create_class_methods(&mut self,class:&str)->Option<ClassMethods>{
// Use get_entry to get the &'static str keys of the database
// and use it as a key for the classes hashmap
CLASS_FUNCTION_DATABASE.get_entry(class)
@@ -609,8 +606,6 @@ fn find_virtual_property(
}
// lazy-loaded per-instance userdata values
// This whole thing is a bad idea and a garbage collection nightmare.
// TODO: recreate rbx_dom_weak with my own instance type that owns this data.
type CreateUserData=fn(&mlua::Lua)->mlua::Result<mlua::AnyUserData>;
type LUD=phf::Map<&'static str,// Class name
phf::Map<&'static str,// Value name
@@ -643,47 +638,22 @@ static LAZY_USER_DATA:LUD=phf::phf_map!{
"MouseClick"=>create_script_signal,
},
};
#[derive(Default)]
pub struct InstanceValueStore{
values:HashMap<Ref,
HashMap<&'static str,
mlua::AnyUserData
>
>,
}
pub struct InstanceValues<'a>{
named_values:&'static phf::Map<&'static str,CreateUserData>,
values:&'a mut HashMap<&'static str,mlua::AnyUserData>,
}
impl InstanceValueStore{
pub fn get_or_create_instance_values(&mut self,instance:&rbx_dom_weak::Instance)->Option<InstanceValues<'_>>{
LAZY_USER_DATA.get(instance.class.as_str())
.map(|named_values|
InstanceValues{
named_values,
values:self.values.entry(instance.referent())
.or_insert_with(||HashMap::new()),
}
)
fn get_or_create_userdata(instance:&mut rbx_dom_weak::Instance,lua:&mlua::Lua,index:&str)->mlua::Result<Option<mlua::AnyUserData>>{
use std::collections::hash_map::Entry;
let db=rbx_reflection_database::get();
let Some(class)=db.classes.get(instance.class.as_str())else{
return Ok(None)
};
if let Some((&static_str,create_userdata))=db.superclasses_iter(class).find_map(|superclass|
// find pair (class,index)
LAZY_USER_DATA.get(&superclass.name)
.and_then(|map|map.get_entry(index))
){
let index_ustr=static_ustr(static_str);
return Ok(Some(match instance.userdata.entry(index_ustr){
Entry::Occupied(entry)=>entry.get().clone(),
Entry::Vacant(entry)=>entry.insert(create_userdata(lua)?).clone(),
}));
}
}
impl InstanceValues<'_>{
pub fn get_or_create_value(&mut self,lua:&mlua::Lua,index:&str)->mlua::Result<Option<mlua::AnyUserData>>{
Ok(match self.named_values.get_entry(index){
Some((&static_index_str,&function_pointer))=>Some(
match self.values.entry(static_index_str){
Entry::Occupied(entry)=>entry.get().clone(),
Entry::Vacant(entry)=>entry.insert(
function_pointer(lua)?
).clone(),
}
),
None=>None,
})
}
}
pub fn instance_value_store_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut InstanceValueStore)->mlua::Result<T>)->mlua::Result<T>{
let mut cf=lua.app_data_mut::<InstanceValueStore>().ok_or_else(||mlua::Error::runtime("InstanceValueStore missing"))?;
f(&mut *cf)
Ok(None)
}

View File

@@ -4,7 +4,7 @@
#[derive(Clone,Copy)]
pub enum Number{
Integer(i64),
Integer(i32),
Number(f64),
}
macro_rules! impl_ty{

View File

@@ -1,4 +1,5 @@
use crate::context::Context;
use crate::util::static_ustr;
#[cfg(feature="run-service")]
use crate::scheduler::scheduler_mut;
@@ -12,12 +13,14 @@ pub enum Error{
error:mlua::Error
},
RustLua(mlua::Error),
Services(crate::context::ServicesError),
}
impl std::fmt::Display for Error{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
match self{
Self::Lua{source,error}=>write!(f,"lua error: source:\n{source}\n{error}"),
Self::RustLua(error)=>write!(f,"rust-side lua error: {error}"),
other=>write!(f,"{other:?}"),
}
}
}
@@ -48,11 +51,6 @@ fn init(lua:&mlua::Lua)->mlua::Result<()>{
Ok(())
}
unsafe fn extend_lifetime_mut<'a,T>(src:&mut T)->&'a mut T{
let ptr:*mut T=src;
unsafe{&mut*ptr}
}
impl Runner{
pub fn new()->Result<Self,Error>{
let runner=Self{
@@ -70,7 +68,8 @@ impl Runner{
// SAFETY: This is not a &'static mut WeakDom,
// but as long as Runnable<'a> holds the lifetime of &'a mut Context
// it is a valid unique reference.
self.lua.set_app_data::<crate::context::LuaAppData>(unsafe{extend_lifetime_mut(&mut context.dom)});
let ptr=&mut context.dom as *mut rbx_dom_weak::WeakDom;
self.lua.set_app_data::<crate::context::LuaAppData>(unsafe{&mut*ptr});
#[cfg(feature="run-service")]
self.lua.set_app_data::<crate::scheduler::Scheduler>(crate::scheduler::Scheduler::default());
Ok(Runnable{
@@ -129,20 +128,15 @@ impl Runnable<'_>{
}
#[cfg(feature="run-service")]
pub fn run_service_step(&self)->Result<(),mlua::Error>{
let render_stepped=super::instance::instance::dom_mut(&self.lua,|dom|{
let render_stepped_signal=super::instance::instance::dom_mut(&self.lua,|dom|{
let run_service=super::instance::instance::find_first_child_of_class(dom,dom.root(),"RunService").ok_or_else(||mlua::Error::runtime("RunService missing"))?;
super::instance::instance::instance_value_store_mut(&self.lua,|instance_value_store|{
//unwrap because I trust my find_first_child_of_class function to
let mut instance_values=instance_value_store.get_or_create_instance_values(run_service).ok_or_else(||mlua::Error::runtime("RunService InstanceValues missing"))?;
let render_stepped=instance_values.get_or_create_value(&self.lua,"RenderStepped")?;
//let stepped=instance_values.get_or_create_value(&self.lua,"Stepped")?;
//let heartbeat=instance_values.get_or_create_value(&self.lua,"Heartbeat")?;
Ok(render_stepped)
Ok(match run_service.userdata.get(&static_ustr("RenderStepped")){
Some(render_stepped)=>Some(render_stepped.borrow::<super::script_signal::ScriptSignal>()?.clone()),
None=>None
})
})?;
if let Some(render_stepped)=render_stepped{
let signal:&super::script_signal::ScriptSignal=&*render_stepped.borrow()?;
signal.fire(&mlua::MultiValue::new());
if let Some(render_stepped_signal)=render_stepped_signal{
render_stepped_signal.fire(&mlua::MultiValue::new());
}
Ok(())
}

View File

@@ -117,7 +117,7 @@ impl mlua::FromLua for ScriptSignal{
}
impl mlua::UserData for ScriptConnection{
fn add_fields<F:UserDataFields<Self>>(fields:&mut F){
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("Connected",|_,this|{
Ok(this.position().is_some())
});

View File

@@ -1,7 +1,7 @@
use super::instance::Instance;
use super::tween_info::TweenInfo;
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Clone)]
pub struct Tween{
instance:Instance,

View File

@@ -1,7 +1,7 @@
use super::number::Number;
use super::r#enum::{CoerceEnum,Enums};
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Clone)]
pub struct TweenInfo{
time:f64,

View File

@@ -46,8 +46,8 @@ impl Scheduler{
}
}
pub fn scheduler_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut Scheduler)->mlua::Result<T>)->mlua::Result<T>{
let mut scheduler=lua.app_data_mut::<Scheduler>().ok_or_else(||mlua::Error::runtime("Scheduler missing"))?;
pub fn scheduler_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut crate::scheduler::Scheduler)->mlua::Result<T>)->mlua::Result<T>{
let mut scheduler=lua.app_data_mut::<crate::scheduler::Scheduler>().ok_or_else(||mlua::Error::runtime("Scheduler missing"))?;
f(&mut *scheduler)
}

View File

@@ -1,14 +1,11 @@
[package]
name = "strafesnet_snf"
version = "0.3.1"
version = "0.3.0"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
binrw = "0.15.0"
binrw = "0.14.0"
id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
[lints]
workspace = true
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }

View File

@@ -6,7 +6,7 @@ use strafesnet_common::physics::Time;
const VERSION:u32=0;
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,Time>;
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::Time>;
#[derive(Debug)]
pub enum Error{
@@ -85,7 +85,6 @@ pub struct Segment{
#[derive(Clone,Copy,Debug)]
pub struct SegmentInfo{
/// time of the first instruction in this segment.
#[expect(dead_code)]
time:Time,
instruction_count:u32,
/// How many total instructions in segments up to and including this segment
@@ -117,7 +116,6 @@ impl<R:BinReaderExt> StreamableBot<R>{
segment_map,
})
}
#[expect(dead_code)]
fn get_segment_info(&self,segment_id:SegmentId)->Result<SegmentInfo,Error>{
Ok(*self.segment_map.get(segment_id.get() as usize).ok_or(Error::InvalidSegmentId(segment_id))?)
}
@@ -274,7 +272,7 @@ pub fn write_bot<W:BinWriterExt>(mut writer:W,physics_version:u32,instructions:i
//probe header length
let mut bot_header_data=Vec::new();
header.write_le(&mut std::io::Cursor::new(&mut bot_header_data)).map_err(Error::InvalidData)?;
binrw::BinWrite::write_le(&header,&mut std::io::Cursor::new(&mut bot_header_data)).map_err(Error::InvalidData)?;
// the first block location is the map header
block_location.push(offset);

View File

@@ -53,6 +53,8 @@ pub(crate) enum FourCC{
Map,
#[brw(magic=b"SNFB")]
Bot,
#[brw(magic=b"SNFD")]
Demo,
}
#[binrw]
#[brw(little)]

View File

@@ -5,6 +5,7 @@ mod newtypes;
mod file;
pub mod map;
pub mod bot;
pub mod demo;
#[derive(Debug)]
pub enum Error{
@@ -12,6 +13,7 @@ pub enum Error{
Header(file::Error),
Map(map::Error),
Bot(bot::Error),
Demo(demo::Error),
}
impl std::fmt::Display for Error{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
@@ -23,6 +25,7 @@ impl std::error::Error for Error{}
pub enum SNF<R:BinReaderExt>{
Map(map::StreamableMap<R>),
Bot(bot::StreamableBot<R>),
Demo(demo::StreamableDemo<R>),
}
pub fn read_snf<R:BinReaderExt>(input:R)->Result<SNF<R>,Error>{
@@ -30,6 +33,7 @@ pub fn read_snf<R:BinReaderExt>(input:R)->Result<SNF<R>,Error>{
Ok(match file.fourcc(){
file::FourCC::Map=>SNF::Map(map::StreamableMap::new(file).map_err(Error::Map)?),
file::FourCC::Bot=>SNF::Bot(bot::StreamableBot::new(file).map_err(Error::Bot)?),
file::FourCC::Demo=>SNF::Demo(demo::StreamableDemo::new(file).map_err(Error::Demo)?),
})
}
pub fn read_map<R:BinReaderExt>(input:R)->Result<map::StreamableMap<R>,Error>{
@@ -46,6 +50,13 @@ pub fn read_bot<R:BinReaderExt>(input:R)->Result<bot::StreamableBot<R>,Error>{
_=>Err(Error::UnexpectedFourCC)
}
}
pub fn read_demo<R:BinReaderExt>(input:R)->Result<demo::StreamableDemo<R>,Error>{
let file=file::File::new(input).map_err(Error::Header)?;
match file.fourcc(){
file::FourCC::Demo=>Ok(demo::StreamableDemo::new(file).map_err(Error::Demo)?),
_=>Err(Error::UnexpectedFourCC)
}
}
#[cfg(test)]
mod tests {

View File

@@ -97,8 +97,8 @@ enum ResourceType{
}
struct ResourceMap<T>{
meshes:HashMap<model::MeshId,T>,
textures:HashMap<model::TextureId,T>,
meshes:HashMap<strafesnet_common::model::MeshId,T>,
textures:HashMap<strafesnet_common::model::TextureId,T>,
}
impl<T> Default for ResourceMap<T>{
fn default()->Self{
@@ -185,7 +185,7 @@ pub struct StreamableMap<R:BinReaderExt>{
//this is every possible attribute... need some sort of streaming system
attributes:Vec<strafesnet_common::gameplay_attributes::CollisionAttributes>,
//this is every possible render configuration... shaders and such... need streaming
render_configs:Vec<model::RenderConfig>,
render_configs:Vec<strafesnet_common::model::RenderConfig>,
//this makes sense to keep in memory for streaming, a map of which blocks occupy what space
bvh:BvhNode<BlockId>,
//something something resources hashmaps
@@ -223,7 +223,7 @@ impl<R:BinReaderExt> StreamableMap<R>{
}
Ok(Self{
file,
modes:gameplay_modes::NormalizedModes::new(modes),
modes:strafesnet_common::gameplay_modes::NormalizedModes::new(modes),
attributes,
render_configs,
bvh:strafesnet_common::bvh::generate_bvh(bvh),
@@ -366,12 +366,12 @@ fn collect_spacial_blocks(
block_location.push(sequential_block_data.position());
}else{
match bvh_node.into_content(){
RecursiveContent::Branch(bvh_node_list)=>{
strafesnet_common::bvh::RecursiveContent::Branch(bvh_node_list)=>{
for bvh_node in bvh_node_list{
collect_spacial_blocks(block_location,block_headers,sequential_block_data,bvh_node)?;
}
},
RecursiveContent::Leaf(_)=>panic!(),//bvh branches are 20 leaves minimum
strafesnet_common::bvh::RecursiveContent::Leaf(_)=>panic!(),//bvh branches are 20 leaves minimum
}
}
Ok(())
@@ -384,13 +384,13 @@ pub fn write_map<W:BinWriterExt>(mut writer:W,map:strafesnet_common::map::Comple
let boxen=map.models.into_iter().enumerate().map(|(model_id,model)|{
//grow your own aabb
let mesh=map.meshes.get(model.mesh.get() as usize).ok_or(Error::InvalidMeshId(model.mesh))?;
let mut aabb=Aabb::default();
let mut aabb=strafesnet_common::aabb::Aabb::default();
for &pos in &mesh.unique_pos{
aabb.grow(model.transform.transform_point3(pos).narrow_1().unwrap());
}
Ok(((model::ModelId::new(model_id as u32),model.into()),aabb))
}).collect::<Result<Vec<_>,_>>()?;
let bvh=weigh_contents(strafesnet_common::bvh::generate_bvh(boxen),&|_|size_of::<newtypes::model::Model>());
let bvh=weigh_contents(strafesnet_common::bvh::generate_bvh(boxen),&|_|std::mem::size_of::<newtypes::model::Model>());
//build blocks
//block location is initialized with two values
//the first value represents the location of the first byte after the file header

View File

@@ -104,7 +104,6 @@ impl From<strafesnet_common::gameplay_style::StyleModifiers> for StyleModifiers{
#[binrw::binrw]
#[brw(little,repr=u8)]
#[expect(dead_code)]
pub enum JumpCalculation{
Max,
BoostThenJump,
@@ -129,7 +128,6 @@ impl From<strafesnet_common::gameplay_style::JumpCalculation> for JumpCalculatio
}
}
#[expect(dead_code)]
pub enum JumpImpulse{
Time(Time),
Height(Planar64),

View File

@@ -20,7 +20,7 @@ impl TryInto<TimedPhysicsInstruction> for TimedInstruction{
}
}
impl TryFrom<TimedPhysicsInstruction> for TimedInstruction{
type Error=InstructionConvert;
type Error=super::physics::InstructionConvert;
fn try_from(value:TimedPhysicsInstruction)->Result<Self,Self::Error>{
Ok(Self{
time:value.time.get(),

View File

@@ -1,6 +1,6 @@
[package]
name = "map-tool"
version = "1.7.2"
version = "1.7.0"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -12,16 +12,17 @@ flate2 = "1.0.27"
futures = "0.3.31"
image = "0.25.2"
image_dds = "0.7.1"
rbx_asset = { version = "0.5.0", registry = "strafesnet" }
rbx_binary = { version = "1.0.1-sn5", registry = "strafesnet" }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
lazy-regex = "3.1.0"
rbx_asset = { version = "0.4.4", registry = "strafesnet" }
rbx_binary = { version = "1.1.0-sn4", registry = "strafesnet" }
rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet" }
rbx_reflection_database = "1.0.0"
rbx_xml = { version = "1.0.1-sn5", registry = "strafesnet" }
rbx_xml = { version = "1.1.0-sn4", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.3.1", path = "../lib/bsp_loader", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../lib/deferred_loader", registry = "strafesnet" }
strafesnet_rbx_loader = { version = "0.7.0", path = "../lib/rbx_loader", registry = "strafesnet" }
strafesnet_snf = { version = "0.3.1", path = "../lib/snf", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.3.0", path = "../lib/bsp_loader", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../lib/deferred_loader", registry = "strafesnet" }
strafesnet_rbx_loader = { version = "0.6.0", path = "../lib/rbx_loader", registry = "strafesnet" }
strafesnet_snf = { version = "0.3.0", path = "../lib/snf", registry = "strafesnet" }
thiserror = "2.0.11"
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "fs"] }
vbsp = "0.9.1"
@@ -35,6 +36,3 @@ vtf = "0.3.0"
#lto = true
#strip = true
#codegen-units = 1
[lints]
workspace = true

View File

@@ -32,12 +32,8 @@ pub struct RobloxToSNFSubcommand {
pub struct DownloadAssetsSubcommand{
#[arg(required=true)]
roblox_files:Vec<PathBuf>,
#[arg(long,group="cookie",required=true)]
cookie_literal:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_envvar:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_file:Option<PathBuf>,
// #[arg(long)]
// cookie_file:Option<String>,
}
impl Commands{
@@ -46,27 +42,13 @@ impl Commands{
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
Commands::DownloadAssets(subcommand)=>download_assets(
subcommand.roblox_files,
cookie_from_args(
subcommand.cookie_literal,
subcommand.cookie_envvar,
subcommand.cookie_file,
).await?,
rbx_asset::cookie::Cookie::new("".to_string()),
).await,
}
}
}
async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<rbx_asset::cookie::Cookie>{
let cookie=match (literal,environment,file){
(Some(cookie_literal),None,None)=>cookie_literal,
(None,Some(cookie_environment),None)=>std::env::var(cookie_environment)?,
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
_=>Err(anyhow::Error::msg("Illegal cookie argument triple"))?,
};
Ok(rbx_asset::cookie::Cookie::new(cookie))
}
#[expect(dead_code)]
#[allow(unused)]
#[derive(Debug)]
enum LoadDomError{
IO(std::io::Error),
@@ -192,7 +174,7 @@ impl UniqueAssets{
}
}
#[expect(dead_code)]
#[allow(unused)]
#[derive(Debug)]
enum UniqueAssetError{
IO(std::io::Error),
@@ -267,8 +249,8 @@ async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,dow
tokio::fs::write(path,&data).await?;
break Ok(DownloadResult::Data(data));
},
Err(rbx_asset::cookie::GetError::Response(rbx_asset::types::ResponseError::Details{status_code,url_and_body}))=>{
if status_code.as_u16()==429{
Err(rbx_asset::cookie::GetError::Response(rbx_asset::types::ResponseError::StatusCodeWithUrlAndBody(scwuab)))=>{
if scwuab.status_code.as_u16()==429{
if retry==12{
println!("Giving up asset download {asset_id}");
stats.timed_out_downloads+=1;
@@ -280,7 +262,7 @@ async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,dow
retry+=1;
}else{
stats.failed_downloads+=1;
println!("weird status_code error: status_code={status_code} url={} body={}",url_and_body.url,url_and_body.body);
println!("weird scuwab error: {scwuab:?}");
break Ok(DownloadResult::Failed);
}
},
@@ -421,7 +403,7 @@ async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->A
}
#[derive(Debug)]
#[expect(dead_code)]
#[allow(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
@@ -434,23 +416,17 @@ impl std::fmt::Display for ConvertError{
}
}
impl std::error::Error for ConvertError{}
struct Errors{
script_errors:Vec<strafesnet_rbx_loader::RunnerError>,
convert_errors:strafesnet_rbx_loader::RecoverableErrors,
}
fn convert_to_snf(path:&Path,output_folder:PathBuf)->Result<Errors,ConvertError>{
let entire_file=std::fs::read(path).map_err(ConvertError::IO)?;
async fn convert_to_snf(path:&Path,output_folder:PathBuf)->AResult<()>{
let entire_file=tokio::fs::read(path).await?;
let model=strafesnet_rbx_loader::read(
entire_file.as_slice()
std::io::Cursor::new(entire_file)
).map_err(ConvertError::RobloxRead)?;
let mut place=strafesnet_rbx_loader::Place::from(model);
let script_errors=place.run_scripts().unwrap_or_else(|e|vec![e]);
place.run_scripts();
let (map,convert_errors)=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let map=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
@@ -459,37 +435,24 @@ fn convert_to_snf(path:&Path,output_folder:PathBuf)->Result<Errors,ConvertError>
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
Ok(Errors{
script_errors,
convert_errors,
})
Ok(())
}
async fn roblox_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf)->AResult<()>{
async fn roblox_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();
let mut it=paths.into_iter();
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
// This is wrong! Calling roblox_to_snf multiple times keeps adding permits
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let output_folder=output_folder.clone();
tokio::task::spawn_blocking(move||{
let result=convert_to_snf(path.as_path(),output_folder);
tokio::spawn(async move{
let result=convert_to_snf(path.as_path(),output_folder).await;
drop(permit);
match result{
Ok(errors)=>{
for error in errors.script_errors{
println!("Script error: {error}");
}
let error_count=errors.convert_errors.count();
if error_count!=0{
println!("Error count: {error_count}");
println!("Errors: {}",errors.convert_errors);
}
},
Ok(())=>(),
Err(e)=>println!("Convert error: {e:?}"),
}
});

View File

@@ -452,7 +452,7 @@ fn bsp_contents(path:PathBuf)->AResult<()>{
}
#[derive(Debug)]
#[expect(dead_code)]
#[allow(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
@@ -484,7 +484,7 @@ async fn convert_to_snf(path:&Path,vpk_list:&[strafesnet_bsp_loader::Vpk],output
Ok(())
}
async fn source_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
async fn source_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();

View File

@@ -28,12 +28,5 @@ strafesnet_rbx_loader = { path = "../lib/rbx_loader", registry = "strafesnet", o
strafesnet_session = { path = "../engine/session", registry = "strafesnet" }
strafesnet_settings = { path = "../engine/settings", registry = "strafesnet" }
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet", optional = true }
wgpu = "27.0.0"
wgpu = "25.0.0"
winit = "0.30.7"
[profile.dev]
strip = false
opt-level = 3
[lints]
workspace = true

View File

@@ -3,7 +3,7 @@ use std::io::Read;
#[cfg(any(feature="roblox",feature="source"))]
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum ReadError{
#[cfg(feature="roblox")]
@@ -63,7 +63,7 @@ pub fn read<R:Read+std::io::Seek>(input:R)->Result<ReadFormat,ReadError>{
}
}
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum LoadError{
ReadError(ReadError),
@@ -98,15 +98,10 @@ pub fn load<P:AsRef<std::path::Path>>(path:P)->Result<LoadFormat,LoadError>{
#[cfg(feature="roblox")]
ReadFormat::Roblox(model)=>{
let mut place=strafesnet_rbx_loader::Place::from(model);
let script_errors=place.run_scripts().unwrap();
for error in script_errors{
println!("Script error: {error}");
}
let (map,errors)=place.to_snf(LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRoblox)?;
if errors.count()!=0{
print!("Errors encountered while loading the map:\n{}",errors);
}
Ok(LoadFormat::Map(map))
place.run_scripts();
Ok(LoadFormat::Map(
place.to_snf(LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRoblox)?
))
},
#[cfg(feature="source")]
ReadFormat::Source(bsp)=>Ok(LoadFormat::Map(

View File

@@ -21,7 +21,7 @@ WorkerDescription{
pub fn new(
mut graphics:graphics::GraphicsState,
mut config:wgpu::SurfaceConfiguration,
surface:wgpu::Surface<'_>,
surface:wgpu::Surface,
device:wgpu::Device,
queue:wgpu::Queue,
)->crate::compat_worker::INWorker<'_,Instruction>{

View File

@@ -119,13 +119,12 @@ impl<'a> SetupContextPartial3<'a>{
let (device, queue)=pollster::block_on(self.adapter
.request_device(
&wgpu::DeviceDescriptor{
label:None,
required_features:(optional_features&self.adapter.features())|required_features,
required_limits:needed_limits,
&wgpu::DeviceDescriptor {
label: None,
required_features: (optional_features & self.adapter.features()) | required_features,
required_limits: needed_limits,
memory_hints:wgpu::MemoryHints::Performance,
trace:wgpu::Trace::Off,
experimental_features:wgpu::ExperimentalFeatures::disabled(),
trace: wgpu::Trace::Off,
},
))
.expect("Unable to find a suitable GPU adapter!");

View File

@@ -188,7 +188,7 @@ impl WindowContext<'_>{
}
}
fn device_event(&mut self,time:SessionTime,event:winit::event::DeviceEvent){
fn device_event(&mut self,time:SessionTime,event: winit::event::DeviceEvent){
match event{
winit::event::DeviceEvent::MouseMotion{
delta,

View File

@@ -1 +0,0 @@
mangohud ../target/release/strafe-client bhop_maps/5692098704.snfm "$@"