Compare commits

..

1 Commits

Author SHA1 Message Date
567ca4b794 idea: multiple collisions can happen in the same instant 2025-08-29 18:32:48 -07:00
18 changed files with 567 additions and 731 deletions

882
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -11,4 +11,4 @@ id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" } strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
strafesnet_session = { path = "../session", registry = "strafesnet" } strafesnet_session = { path = "../session", registry = "strafesnet" }
strafesnet_settings = { path = "../settings", registry = "strafesnet" } strafesnet_settings = { path = "../settings", registry = "strafesnet" }
wgpu = "27.0.0" wgpu = "26.0.1"

View File

@@ -23,10 +23,15 @@ use strafesnet_common::physics::{Instruction,MouseInstruction,ModeInstruction,Mi
//internal influence //internal influence
//when the physics asks itself what happens next, this is how it's represented //when the physics asks itself what happens next, this is how it's represented
#[derive(Debug,Clone)] #[derive(Debug)]
pub enum InternalInstruction{ pub enum InternalInstruction{
CollisionStart(Collision,model_physics::GigaTime), // begin accepting touch updates
CollisionEnd(Collision,model_physics::GigaTime), OpenMultiCollision(model_physics::GigaTime),
// mutliple touch updates
CollisionStart(Collision),
CollisionEnd(Collision),
// confirm there will be no more touch updates and apply the transaction
CloseMultiCollision,
StrafeTick, StrafeTick,
ReachWalkTargetVelocity, ReachWalkTargetVelocity,
// Water, // Water,
@@ -889,9 +894,6 @@ impl PhysicsState{
pub const fn mode(&self)->gameplay_modes::ModeId{ pub const fn mode(&self)->gameplay_modes::ModeId{
self.mode_state.get_mode_id() self.mode_state.get_mode_id()
} }
pub const fn style_mut(&mut self)->&mut StyleModifiers{
&mut self.style
}
pub fn get_finish_time(&self)->Option<run::Time>{ pub fn get_finish_time(&self)->Option<run::Time>{
self.run.get_finish_time() self.run.get_finish_time()
} }
@@ -1126,7 +1128,7 @@ impl InstructionEmitter<InternalInstruction> for PhysicsContext<'_>{
next_instruction_internal(&self.state,&self.data,time_limit) next_instruction_internal(&self.state,&self.data,time_limit)
} }
} }
impl<'a> PhysicsContext<'a>{ impl PhysicsContext<'_>{
pub fn run_input_instruction( pub fn run_input_instruction(
state:&mut PhysicsState, state:&mut PhysicsState,
data:&PhysicsData, data:&PhysicsData,
@@ -1136,13 +1138,6 @@ impl<'a> PhysicsContext<'a>{
context.process_exhaustive(instruction.time); context.process_exhaustive(instruction.time);
context.process_instruction(instruction); context.process_instruction(instruction);
} }
pub fn iter_internal(
state:&'a mut PhysicsState,
data:&'a PhysicsData,
time_limit:Time,
)->instruction::InstructionIter<InternalInstruction,Time,Self>{
PhysicsContext{state,data}.into_iter(time_limit)
}
} }
//this is the one who asks //this is the one who asks

View File

@@ -1,9 +1,11 @@
use strafesnet_physics::physics::{InternalInstruction,PhysicsData,PhysicsState,PhysicsContext}; use strafesnet_physics::physics::{PhysicsData,PhysicsState,PhysicsContext};
use strafesnet_common::gameplay_modes::NormalizedModes; use strafesnet_common::gameplay_modes::NormalizedModes;
use strafesnet_common::gameplay_attributes::{CollisionAttributes,CollisionAttributesId}; use strafesnet_common::gameplay_attributes::{CollisionAttributes,CollisionAttributesId};
use strafesnet_common::integer::{vec3,mat3,Planar64Affine3,Time}; use strafesnet_common::integer::{vec3,mat3,Planar64Affine3,Time};
use strafesnet_common::model::{Mesh,Model,MeshId,ModelId,RenderConfigId}; use strafesnet_common::model::{Mesh,Model,MeshId,ModelId,RenderConfigId};
use strafesnet_common::map::CompleteMap; use strafesnet_common::map::CompleteMap;
use strafesnet_common::physics::Instruction;
use strafesnet_common::instruction::TimedInstruction;
use strafesnet_rbx_loader::primitives::{unit_cube,CubeFaceDescription}; use strafesnet_rbx_loader::primitives::{unit_cube,CubeFaceDescription};
struct TestSceneBuilder{ struct TestSceneBuilder{
@@ -71,47 +73,19 @@ fn test_scene()->PhysicsData{
fn simultaneous_collision(){ fn simultaneous_collision(){
let physics_data=test_scene(); let physics_data=test_scene();
let body=strafesnet_physics::physics::Body::new( let body=strafesnet_physics::physics::Body::new(
(vec3::int(5+2,0,0)>>1)+vec3::int(1,1,0), vec3::int(5+1,1,0),
vec3::int(-1,-1,0), vec3::int(-1,-1,0),
vec3::int(0,0,0), vec3::int(0,0,0),
Time::ZERO, Time::ZERO,
); );
let mut physics=PhysicsState::new_with_body(body); let mut physics=PhysicsState::new_with_body(body);
physics.style_mut().gravity=vec3::ZERO; PhysicsContext::run_input_instruction(&mut physics,&physics_data,TimedInstruction{
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(2)) time:Time::from_secs(2),
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick)); instruction:Instruction::Idle,
// the order that they hit does matter, but we aren't currently worrying about that. });
// See multi-collision branch
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
assert!(phys_iter.next().is_none());
let body=physics.body(); let body=physics.body();
assert_eq!(body.position,vec3::int(5,0,0)); assert_eq!(body.position,vec3::int(5,0,0));
assert_eq!(body.velocity,vec3::int(0,0,0)); assert_eq!(body.velocity,vec3::int(0,0,0));
assert_eq!(body.acceleration,vec3::int(0,0,0)); assert_eq!(body.acceleration,vec3::int(0,0,0));
assert_eq!(body.time,Time::from_secs(1)); assert_eq!(body.time,Time::ONE_SECOND);
}
#[test]
fn bug_3(){
let physics_data=test_scene();
let body=strafesnet_physics::physics::Body::new(
(vec3::int(5+2,0,0)>>1)+vec3::int(1,2,0),
vec3::int(-1,-1,0),
vec3::int(0,0,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
physics.style_mut().gravity=vec3::ZERO;
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(3))
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick));
// touch side of part at 0,0,0
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
// touch top of part at 5,-5,0
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(2));
assert!(phys_iter.next().is_none());
let body=physics.body();
assert_eq!(body.position,vec3::int(5+2,0,0)>>1);
assert_eq!(body.velocity,vec3::int(0,0,0));
assert_eq!(body.acceleration,vec3::int(0,0,0));
assert_eq!(body.time,Time::from_secs(2));
} }

View File

@@ -61,7 +61,7 @@ pub fn convert_mesh(model:vmdl::Model,deferred_loader:&mut RenderConfigDeferredL
_=>None, _=>None,
} }
}) })
}).filter_map(|[v1,v2,v3]|{ }).flat_map(|[v1,v2,v3]|{
// this should probably be a fatal error :D // this should probably be a fatal error :D
let v1=model_vertices.get(v1)?; let v1=model_vertices.get(v1)?;
let v2=model_vertices.get(v2)?; let v2=model_vertices.get(v2)?;

View File

@@ -61,11 +61,11 @@ impl Aabb{
pub fn center(&self)->Planar64Vec3{ pub fn center(&self)->Planar64Vec3{
self.min.map_zip(self.max,|(min,max)|min.midpoint(max)) self.min.map_zip(self.max,|(min,max)|min.midpoint(max))
} }
#[inline] //probably use floats for area & volume because we don't care about precision
pub fn area_weight(&self)->fixed_wide::fixed::Fixed<2,64>{ // pub fn area_weight(&self)->f32{
let d=self.max-self.min; // let d=self.max-self.min;
d.x*d.y+d.y*d.z+d.z*d.x // d.x*d.y+d.y*d.z+d.z*d.x
} // }
// pub fn volume(&self)->f32{ // pub fn volume(&self)->f32{
// let d=self.max-self.min; // let d=self.max-self.min;
// d.x*d.y*d.z // d.x*d.y*d.z

View File

@@ -245,19 +245,18 @@ pub fn generate_bvh<T>(boxen:Vec<(T,Aabb)>)->BvhNode<T>{
fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
let n=boxen.len(); let n=boxen.len();
const MAX_TERMINAL_BRANCH_LEAF_NODES:usize=20; if force||n<20{
if force||n<MAX_TERMINAL_BRANCH_LEAF_NODES{ let mut aabb=Aabb::default();
let mut aabb_outer=Aabb::default(); let nodes=boxen.into_iter().map(|b|{
let nodes=boxen.into_iter().map(|(data,aabb)|{ aabb.join(&b.1);
aabb_outer.join(&aabb);
BvhNode{ BvhNode{
content:RecursiveContent::Leaf(data), content:RecursiveContent::Leaf(b.0),
aabb, aabb:b.1,
} }
}).collect(); }).collect();
BvhNode{ BvhNode{
content:RecursiveContent::Branch(nodes), content:RecursiveContent::Branch(nodes),
aabb:aabb_outer, aabb,
} }
}else{ }else{
let mut sort_x=Vec::with_capacity(n); let mut sort_x=Vec::with_capacity(n);
@@ -272,64 +271,62 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
sort_x.sort_by_key(|&(_,c)|c); sort_x.sort_by_key(|&(_,c)|c);
sort_y.sort_by_key(|&(_,c)|c); sort_y.sort_by_key(|&(_,c)|c);
sort_z.sort_by_key(|&(_,c)|c); sort_z.sort_by_key(|&(_,c)|c);
let h=n/2;
let mut reverse_acumulated_aabbs=vec![fixed_wide::fixed::Fixed::ZERO;n]; let median_x=sort_x[h].1;
fn get_min_area<T>( let median_y=sort_y[h].1;
boxen:&[(T,Aabb)], let median_z=sort_z[h].1;
sorted_list:&[(usize,Planar64)], //locate a run of values equal to the median
reverse_acumulated_aabbs:&mut Vec<fixed_wide::fixed::Fixed<2,64>>, //partition point gives the first index for which the predicate evaluates to false
best_area:&mut fixed_wide::fixed::Fixed<2,64>, let first_index_eq_median_x=sort_x.partition_point(|&(_,x)|x<median_x);
)->Option<usize>{ let first_index_eq_median_y=sort_y.partition_point(|&(_,y)|y<median_y);
let mut accumulated_aabb=Aabb::default(); let first_index_eq_median_z=sort_z.partition_point(|&(_,z)|z<median_z);
// create an array of aabbs which accumulates box aabbs from the end of the list let first_index_gt_median_x=sort_x.partition_point(|&(_,x)|x<=median_x);
for (i,&(index,_)) in sorted_list.iter().enumerate().rev(){ let first_index_gt_median_y=sort_y.partition_point(|&(_,y)|y<=median_y);
accumulated_aabb.join(&boxen[index].1); let first_index_gt_median_z=sort_z.partition_point(|&(_,z)|z<=median_z);
reverse_acumulated_aabbs[i]=accumulated_aabb.area_weight(); //pick which side median value copies go into such that both sides are as balanced as possible based on distance from n/2
} let partition_point_x=if n.abs_diff(2*first_index_eq_median_x)<n.abs_diff(2*first_index_gt_median_x){first_index_eq_median_x}else{first_index_gt_median_x};
// iterate the list forwards and calculate the total area let partition_point_y=if n.abs_diff(2*first_index_eq_median_y)<n.abs_diff(2*first_index_gt_median_y){first_index_eq_median_y}else{first_index_gt_median_y};
// if the boxes were split at this index let partition_point_z=if n.abs_diff(2*first_index_eq_median_z)<n.abs_diff(2*first_index_gt_median_z){first_index_eq_median_z}else{first_index_gt_median_z};
accumulated_aabb=Aabb::default(); //this ids which octant the boxen is put in
let mut best_index=None; let mut octant=vec![0;n];
for (i,&(index,_)) in sorted_list.iter().enumerate(){ for &(i,_) in &sort_x[partition_point_x..]{
accumulated_aabb.join(&boxen[index].1); octant[i]+=1<<0;
let area=accumulated_aabb.area_weight()+reverse_acumulated_aabbs[i];
if area<*best_area{
*best_area=area;
best_index=Some(i);
}
}
best_index
} }
let mut best_area=fixed_wide::fixed::Fixed::MAX; for &(i,_) in &sort_y[partition_point_y..]{
let mut best_index=0; octant[i]+=1<<1;
let mut best_list=Vec::new();
if let Some(index_x)=get_min_area(&boxen,&sort_x,&mut reverse_acumulated_aabbs,&mut best_area){
best_index=index_x;
best_list=sort_x;
} }
if let Some(index_y)=get_min_area(&boxen,&sort_y,&mut reverse_acumulated_aabbs,&mut best_area){ for &(i,_) in &sort_z[partition_point_z..]{
best_index=index_y; octant[i]+=1<<2;
best_list=sort_y;
} }
if let Some(index_z)=get_min_area(&boxen,&sort_z,&mut reverse_acumulated_aabbs,&mut best_area){ //generate lists for unique octant values
best_index=index_z; let mut list_list=Vec::with_capacity(8);
best_list=sort_z; let mut octant_list=Vec::with_capacity(8);
for (i,(data,aabb)) in boxen.into_iter().enumerate(){
let octant_id=octant[i];
let list_id=if let Some(list_id)=octant_list.iter().position(|&id|id==octant_id){
list_id
}else{
let list_id=list_list.len();
octant_list.push(octant_id);
list_list.push(Vec::new());
list_id
};
list_list[list_id].push((data,aabb));
} }
// need to split boxen into two according to best_list and best_index
let mut second=Vec::with_capacity(best_index);
boxen.retain(|i|);
let second=best_list.split_off(best_index);
let mut aabb=Aabb::default(); let mut aabb=Aabb::default();
BvhNode{ if list_list.len()==1{
content:RecursiveContent::Branch( generate_bvh_node(list_list.remove(0),true)
[best_list,second].map(|b|{ }else{
let node=generate_bvh_node(b,false); BvhNode{
aabb.join(&node.aabb); content:RecursiveContent::Branch(
node list_list.into_iter().map(|b|{
}).collect() let node=generate_bvh_node(b,false);
), aabb.join(&node.aabb);
aabb, node
}).collect()
),
aabb,
}
} }
} }
} }

View File

@@ -34,41 +34,12 @@ pub trait InstructionFeedback<I,T>:InstructionEmitter<I,Time=T>+InstructionConsu
self.process_instruction(instruction); self.process_instruction(instruction);
} }
} }
#[inline]
fn into_iter(self,time_limit:T)->InstructionIter<I,T,Self>
where
Self:Sized
{
InstructionIter{
time_limit,
feedback:self,
_phantom:core::marker::PhantomData,
}
}
} }
impl<I,T,F> InstructionFeedback<I,T> for F impl<I,T,X> InstructionFeedback<I,T> for X
where where
T:Copy, T:Copy,
F:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>, X:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
{} {}
pub struct InstructionIter<I,T:Copy,F:InstructionFeedback<I,T>>{
time_limit:T,
feedback:F,
_phantom:core::marker::PhantomData<I>,
}
impl<I,T,F> Iterator for InstructionIter<I,T,F>
where
I:Clone,
T:Clone+Copy,
F:InstructionFeedback<I,T>,
{
type Item=TimedInstruction<I,T>;
fn next(&mut self)->Option<Self::Item>{
let instruction=self.feedback.next_instruction(self.time_limit)?;
self.feedback.process_instruction(instruction.clone());
Some(instruction)
}
}
//PROPER PRIVATE FIELDS!!! //PROPER PRIVATE FIELDS!!!
pub struct InstructionCollector<I,T>{ pub struct InstructionCollector<I,T>{

View File

@@ -13,12 +13,12 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
bytemuck = "1.14.3" bytemuck = "1.14.3"
glam = "0.30.0" glam = "0.30.0"
lazy-regex = "3.1.0" lazy-regex = "3.1.0"
rbx_binary = { version = "1.0.1-sn5", registry = "strafesnet" } rbx_binary = { version = "1.1.0-sn4", registry = "strafesnet" }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" } rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet", features = ["instance-userdata"] }
rbx_mesh = "0.5.0" rbx_mesh = "0.5.0"
rbx_reflection = "5.0.0" rbx_reflection = "5.0.0"
rbx_reflection_database = "1.0.0" rbx_reflection_database = "1.0.0"
rbx_xml = { version = "1.0.1-sn5", registry = "strafesnet" } rbx_xml = { version = "1.1.0-sn4", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", path = "../rbxassetid", registry = "strafesnet" } rbxassetid = { version = "0.1.0", path = "../rbxassetid", registry = "strafesnet" }
roblox_emulator = { version = "0.5.1", path = "../roblox_emulator", default-features = false, registry = "strafesnet" } roblox_emulator = { version = "0.5.1", path = "../roblox_emulator", default-features = false, registry = "strafesnet" }
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" } strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }

View File

@@ -9,6 +9,7 @@ use crate::loader::MeshWithSize;
#[derive(Debug)] #[derive(Debug)]
pub enum Error{ pub enum Error{
Planar64Vec3(strafesnet_common::integer::Planar64TryFromFloatError),
RbxMesh(rbx_mesh::mesh::Error) RbxMesh(rbx_mesh::mesh::Error)
} }
impl std::fmt::Display for Error{ impl std::fmt::Display for Error{
@@ -21,43 +22,41 @@ impl std::error::Error for Error{}
fn ingest_vertices2( fn ingest_vertices2(
vertices:Vec<Vertex2>, vertices:Vec<Vertex2>,
mb:&mut model::MeshBuilder, mb:&mut model::MeshBuilder,
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{ )->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>{
//this monster is collecting a map of old_vertices_index -> unique_vertices_index //this monster is collecting a map of old_vertices_index -> unique_vertices_index
//while also doing the inserting unique entries into lists simultaneously //while also doing the inserting unique entries into lists simultaneously
// vertex positions that fail to convert are DROPPED Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
rbx_mesh::mesh::VertexId2(vertex_id as u32), rbx_mesh::mesh::VertexId2(vertex_id as u32),
{ {
let vertex=IndexedVertex{ let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?), pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos)?),
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)), tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?), normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?),
color:mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32))), color:mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)))
}; };
mb.acquire_vertex_id(vertex) mb.acquire_vertex_id(vertex)
} }
))).collect() ))).collect::<Result<_,_>>().map_err(Error::Planar64Vec3)?)
} }
fn ingest_vertices_truncated2( fn ingest_vertices_truncated2(
vertices:Vec<Vertex2Truncated>, vertices:Vec<Vertex2Truncated>,
mb:&mut model::MeshBuilder, mb:&mut model::MeshBuilder,
static_color_id:ColorId,//pick one color and fill everything with it static_color_id:ColorId,//pick one color and fill everything with it
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{ )->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>{
//this monster is collecting a map of old_vertices_index -> unique_vertices_index //this monster is collecting a map of old_vertices_index -> unique_vertices_index
//while also doing the inserting unique entries into lists simultaneously //while also doing the inserting unique entries into lists simultaneously
// vertex positions that fail to convert are DROPPED Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
rbx_mesh::mesh::VertexId2(vertex_id as u32), rbx_mesh::mesh::VertexId2(vertex_id as u32),
{ {
let vertex=IndexedVertex{ let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?), pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos)?),
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)), tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?), normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?),
color:static_color_id, color:static_color_id,
}; };
mb.acquire_vertex_id(vertex) mb.acquire_vertex_id(vertex)
} }
))).collect() ))).collect::<Result<_,_>>().map_err(Error::Planar64Vec3)?)
} }
fn ingest_faces2_lods3( fn ingest_faces2_lods3(
@@ -68,8 +67,8 @@ fn ingest_faces2_lods3(
){ ){
//faces have to be split into polygon groups based on lod //faces have to be split into polygon groups based on lod
polygon_groups.extend(lods.windows(2).map(|lod_pair| polygon_groups.extend(lods.windows(2).map(|lod_pair|
PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().filter_map(|rbx_mesh::mesh::Face2(v0,v1,v2)| PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().map(|rbx_mesh::mesh::Face2(v0,v1,v2)|
Some(vec![*vertex_id_map.get(&v0)?,*vertex_id_map.get(&v1)?,*vertex_id_map.get(&v2)?]) vec![vertex_id_map[&v0],vertex_id_map[&v1],vertex_id_map[&v2]]
).collect())) ).collect()))
)) ))
} }
@@ -81,18 +80,18 @@ pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<MeshWithS
match rbx_mesh::read_versioned(roblox_mesh_bytes.cursor()).map_err(Error::RbxMesh)?{ match rbx_mesh::read_versioned(roblox_mesh_bytes.cursor()).map_err(Error::RbxMesh)?{
rbx_mesh::mesh::Mesh::V1(mesh)=>{ rbx_mesh::mesh::Mesh::V1(mesh)=>{
let color_id=mb.acquire_color_id(glam::Vec4::ONE); let color_id=mb.acquire_color_id(glam::Vec4::ONE);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).filter_map(|trip|{ polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).map(|trip|{
let mut ingest_vertex1=|vertex:&rbx_mesh::mesh::Vertex1|{ let mut ingest_vertex1=|vertex:&rbx_mesh::mesh::Vertex1|{
let vertex=IndexedVertex{ let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?), pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos)?),
tex:mb.acquire_tex_id(glam::vec2(vertex.tex[0],vertex.tex[1])), tex:mb.acquire_tex_id(glam::vec2(vertex.tex[0],vertex.tex[1])),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?), normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?),
color:color_id, color:color_id,
}; };
Some(mb.acquire_vertex_id(vertex)) Ok(mb.acquire_vertex_id(vertex))
}; };
Some(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?]) Ok(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?])
}).collect()))); }).collect::<Result<_,_>>().map_err(Error::Planar64Vec3)?)));
}, },
rbx_mesh::mesh::Mesh::V2(mesh)=>{ rbx_mesh::mesh::Mesh::V2(mesh)=>{
let vertex_id_map=match mesh.header.sizeof_vertex{ let vertex_id_map=match mesh.header.sizeof_vertex{
@@ -102,10 +101,10 @@ pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<MeshWithS
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id) ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
}, },
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb), rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
}; }?;
//one big happy group for all the faces //one big happy group for all the faces
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().filter_map(|face| polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|face|
Some(vec![*vertex_id_map.get(&face.0)?,*vertex_id_map.get(&face.1)?,*vertex_id_map.get(&face.2)?]) vec![vertex_id_map[&face.0],vertex_id_map[&face.1],vertex_id_map[&face.2]]
).collect()))); ).collect())));
}, },
rbx_mesh::mesh::Mesh::V3(mesh)=>{ rbx_mesh::mesh::Mesh::V3(mesh)=>{
@@ -115,15 +114,15 @@ pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<MeshWithS
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id) ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
}, },
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb), rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
}; }?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods); ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
}, },
rbx_mesh::mesh::Mesh::V4(mesh)=>{ rbx_mesh::mesh::Mesh::V4(mesh)=>{
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb); let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb)?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods); ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
}, },
rbx_mesh::mesh::Mesh::V5(mesh)=>{ rbx_mesh::mesh::Mesh::V5(mesh)=>{
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb); let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb)?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods); ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
}, },
} }

View File

@@ -13,9 +13,9 @@ run-service=[]
[dependencies] [dependencies]
glam = "0.30.0" glam = "0.30.0"
mlua = { version = "0.11.3", features = ["luau"] } mlua = { version = "0.10.1", features = ["luau"] }
phf = { version = "0.13.1", features = ["macros"] } phf = { version = "0.12.1", features = ["macros"] }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" } rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet", features = ["instance-userdata"] }
rbx_reflection = "5.0.0" rbx_reflection = "5.0.0"
rbx_reflection_database = "1.0.0" rbx_reflection_database = "1.0.0"
rbx_types = "2.0.0" rbx_types = "2.0.0"

View File

@@ -65,7 +65,7 @@ impl<'a> EnumItems<'a>{
} }
pub enum CoerceEnum<'a>{ pub enum CoerceEnum<'a>{
Integer(i64), Integer(i32),
String(mlua::String), String(mlua::String),
Enum(EnumItem<'a>), Enum(EnumItem<'a>),
} }

View File

@@ -11,7 +11,6 @@ use crate::runner::number::Number;
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{ pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
//class functions store //class functions store
lua.set_app_data(ClassMethodsStore::default()); lua.set_app_data(ClassMethodsStore::default());
lua.set_app_data(InstanceValueStore::default());
let table=lua.create_table()?; let table=lua.create_table()?;
@@ -326,16 +325,13 @@ impl mlua::UserData for Instance{
} }
//find or create an associated userdata object //find or create an associated userdata object
if let Some(value)=instance_value_store_mut(lua,|ivs|{ let instance=this.get_mut(dom)?;
//TODO: walk class tree somehow if let Some(value)=get_or_create_userdata(instance,lua,index_str)?{
match ivs.get_or_create_instance_values(&instance){
Some(mut instance_values)=>instance_values.get_or_create_value(lua,index_str),
None=>Ok(None)
}
})?{
return value.into_lua(lua); return value.into_lua(lua);
} }
// drop mutable borrow
//find a child with a matching name //find a child with a matching name
let instance=this.get(dom)?;
find_first_child(dom,instance,index_str) find_first_child(dom,instance,index_str)
.map(|instance|Instance::new_unchecked(instance.referent())) .map(|instance|Instance::new_unchecked(instance.referent()))
.into_lua(lua) .into_lua(lua)
@@ -423,7 +419,7 @@ impl mlua::UserData for Instance{
rbx_types::Variant::CFrame(typed_value.clone().into()) rbx_types::Variant::CFrame(typed_value.clone().into())
}, },
rbx_reflection::DataType::Value(rbx_types::VariantType::ContentId)=>{ rbx_reflection::DataType::Value(rbx_types::VariantType::ContentId)=>{
let typed_value=value.as_string().ok_or_else(||mlua::Error::runtime("Expected string"))?.to_str()?.to_owned(); let typed_value=value.as_str().ok_or_else(||mlua::Error::runtime("Expected string"))?.to_owned();
rbx_types::Variant::ContentId(typed_value.into()) rbx_types::Variant::ContentId(typed_value.into())
}, },
rbx_reflection::DataType::Value(rbx_types::VariantType::Ref)=>{ rbx_reflection::DataType::Value(rbx_types::VariantType::Ref)=>{
@@ -610,8 +606,6 @@ fn find_virtual_property(
} }
// lazy-loaded per-instance userdata values // lazy-loaded per-instance userdata values
// This whole thing is a bad idea and a garbage collection nightmare.
// TODO: recreate rbx_dom_weak with my own instance type that owns this data.
type CreateUserData=fn(&mlua::Lua)->mlua::Result<mlua::AnyUserData>; type CreateUserData=fn(&mlua::Lua)->mlua::Result<mlua::AnyUserData>;
type LUD=phf::Map<&'static str,// Class name type LUD=phf::Map<&'static str,// Class name
phf::Map<&'static str,// Value name phf::Map<&'static str,// Value name
@@ -644,47 +638,22 @@ static LAZY_USER_DATA:LUD=phf::phf_map!{
"MouseClick"=>create_script_signal, "MouseClick"=>create_script_signal,
}, },
}; };
#[derive(Default)] fn get_or_create_userdata(instance:&mut rbx_dom_weak::Instance,lua:&mlua::Lua,index:&str)->mlua::Result<Option<mlua::AnyUserData>>{
pub struct InstanceValueStore{ use std::collections::hash_map::Entry;
values:HashMap<Ref, let db=rbx_reflection_database::get();
HashMap<&'static str, let Some(class)=db.classes.get(instance.class.as_str())else{
mlua::AnyUserData return Ok(None)
> };
>, if let Some((&static_str,create_userdata))=db.superclasses_iter(class).find_map(|superclass|
} // find pair (class,index)
pub struct InstanceValues<'a>{ LAZY_USER_DATA.get(&superclass.name)
named_values:&'static phf::Map<&'static str,CreateUserData>, .and_then(|map|map.get_entry(index))
values:&'a mut HashMap<&'static str,mlua::AnyUserData>, ){
} let index_ustr=static_ustr(static_str);
impl InstanceValueStore{ return Ok(Some(match instance.userdata.entry(index_ustr){
pub fn get_or_create_instance_values(&mut self,instance:&rbx_dom_weak::Instance)->Option<InstanceValues<'_>>{ Entry::Occupied(entry)=>entry.get().clone(),
LAZY_USER_DATA.get(instance.class.as_str()) Entry::Vacant(entry)=>entry.insert(create_userdata(lua)?).clone(),
.map(|named_values| }));
InstanceValues{
named_values,
values:self.values.entry(instance.referent())
.or_insert_with(||HashMap::new()),
}
)
} }
} Ok(None)
impl InstanceValues<'_>{
pub fn get_or_create_value(&mut self,lua:&mlua::Lua,index:&str)->mlua::Result<Option<mlua::AnyUserData>>{
Ok(match self.named_values.get_entry(index){
Some((&static_index_str,&function_pointer))=>Some(
match self.values.entry(static_index_str){
Entry::Occupied(entry)=>entry.get().clone(),
Entry::Vacant(entry)=>entry.insert(
function_pointer(lua)?
).clone(),
}
),
None=>None,
})
}
}
pub fn instance_value_store_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut InstanceValueStore)->mlua::Result<T>)->mlua::Result<T>{
let mut cf=lua.app_data_mut::<InstanceValueStore>().ok_or_else(||mlua::Error::runtime("InstanceValueStore missing"))?;
f(&mut *cf)
} }

View File

@@ -4,7 +4,7 @@
#[derive(Clone,Copy)] #[derive(Clone,Copy)]
pub enum Number{ pub enum Number{
Integer(i64), Integer(i32),
Number(f64), Number(f64),
} }
macro_rules! impl_ty{ macro_rules! impl_ty{

View File

@@ -1,4 +1,5 @@
use crate::context::Context; use crate::context::Context;
use crate::util::static_ustr;
#[cfg(feature="run-service")] #[cfg(feature="run-service")]
use crate::scheduler::scheduler_mut; use crate::scheduler::scheduler_mut;
@@ -125,20 +126,15 @@ impl Runnable<'_>{
} }
#[cfg(feature="run-service")] #[cfg(feature="run-service")]
pub fn run_service_step(&self)->Result<(),mlua::Error>{ pub fn run_service_step(&self)->Result<(),mlua::Error>{
let render_stepped=super::instance::instance::dom_mut(&self.lua,|dom|{ let render_stepped_signal=super::instance::instance::dom_mut(&self.lua,|dom|{
let run_service=super::instance::instance::find_first_child_of_class(dom,dom.root(),"RunService").ok_or_else(||mlua::Error::runtime("RunService missing"))?; let run_service=super::instance::instance::find_first_child_of_class(dom,dom.root(),"RunService").ok_or_else(||mlua::Error::runtime("RunService missing"))?;
super::instance::instance::instance_value_store_mut(&self.lua,|instance_value_store|{ Ok(match run_service.userdata.get(&static_ustr("RenderStepped")){
//unwrap because I trust my find_first_child_of_class function to Some(render_stepped)=>Some(render_stepped.borrow::<super::script_signal::ScriptSignal>()?.clone()),
let mut instance_values=instance_value_store.get_or_create_instance_values(run_service).ok_or_else(||mlua::Error::runtime("RunService InstanceValues missing"))?; None=>None
let render_stepped=instance_values.get_or_create_value(&self.lua,"RenderStepped")?;
//let stepped=instance_values.get_or_create_value(&self.lua,"Stepped")?;
//let heartbeat=instance_values.get_or_create_value(&self.lua,"Heartbeat")?;
Ok(render_stepped)
}) })
})?; })?;
if let Some(render_stepped)=render_stepped{ if let Some(render_stepped_signal)=render_stepped_signal{
let signal:&super::script_signal::ScriptSignal=&*render_stepped.borrow()?; render_stepped_signal.fire(&mlua::MultiValue::new());
signal.fire(&mlua::MultiValue::new());
} }
Ok(()) Ok(())
} }

View File

@@ -13,11 +13,11 @@ futures = "0.3.31"
image = "0.25.2" image = "0.25.2"
image_dds = "0.7.1" image_dds = "0.7.1"
lazy-regex = "3.1.0" lazy-regex = "3.1.0"
rbx_asset = { version = "0.5.0", registry = "strafesnet" } rbx_asset = { version = "0.4.4", registry = "strafesnet" }
rbx_binary = { version = "1.0.1-sn5", registry = "strafesnet" } rbx_binary = { version = "1.1.0-sn4", registry = "strafesnet" }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" } rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet" }
rbx_reflection_database = "1.0.0" rbx_reflection_database = "1.0.0"
rbx_xml = { version = "1.0.1-sn5", registry = "strafesnet" } rbx_xml = { version = "1.1.0-sn4", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", registry = "strafesnet" } rbxassetid = { version = "0.1.0", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.3.1", path = "../lib/bsp_loader", registry = "strafesnet" } strafesnet_bsp_loader = { version = "0.3.1", path = "../lib/bsp_loader", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../lib/deferred_loader", registry = "strafesnet" } strafesnet_deferred_loader = { version = "0.5.1", path = "../lib/deferred_loader", registry = "strafesnet" }

View File

@@ -28,7 +28,7 @@ strafesnet_rbx_loader = { path = "../lib/rbx_loader", registry = "strafesnet", o
strafesnet_session = { path = "../engine/session", registry = "strafesnet" } strafesnet_session = { path = "../engine/session", registry = "strafesnet" }
strafesnet_settings = { path = "../engine/settings", registry = "strafesnet" } strafesnet_settings = { path = "../engine/settings", registry = "strafesnet" }
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet", optional = true } strafesnet_snf = { path = "../lib/snf", registry = "strafesnet", optional = true }
wgpu = "27.0.0" wgpu = "26.0.1"
winit = "0.30.7" winit = "0.30.7"
[profile.dev] [profile.dev]

View File

@@ -119,13 +119,12 @@ impl<'a> SetupContextPartial3<'a>{
let (device, queue)=pollster::block_on(self.adapter let (device, queue)=pollster::block_on(self.adapter
.request_device( .request_device(
&wgpu::DeviceDescriptor{ &wgpu::DeviceDescriptor {
label:None, label: None,
required_features:(optional_features&self.adapter.features())|required_features, required_features: (optional_features & self.adapter.features()) | required_features,
required_limits:needed_limits, required_limits: needed_limits,
memory_hints:wgpu::MemoryHints::Performance, memory_hints:wgpu::MemoryHints::Performance,
trace:wgpu::Trace::Off, trace: wgpu::Trace::Off,
experimental_features:wgpu::ExperimentalFeatures::disabled(),
}, },
)) ))
.expect("Unable to find a suitable GPU adapter!"); .expect("Unable to find a suitable GPU adapter!");