Compare commits
24 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 6c3f20eb53 | |||
| 15736b60ce | |||
| 661d706a22 | |||
| 5550d5771e | |||
| c834d1d1ca | |||
| ca9d2238a7 | |||
|
f3bb8dd067
|
|||
|
e58f9b9ff2
|
|||
|
54c4ed6bad
|
|||
|
9aceafa0df
|
|||
|
d065bac130
|
|||
|
a4d0393556
|
|||
|
3692d7f79e
|
|||
|
7e49840768
|
|||
|
4ecdd547c6
|
|||
|
b0365165e8
|
|||
|
c2ff52a2ae
|
|||
|
6e778869e8
|
|||
|
6509bef070
|
|||
|
0fa097a004
|
|||
|
55d4b1d264
|
|||
|
ea28663e95
|
|||
|
bac9be9684
|
|||
|
7e76f3309b
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1 +1,2 @@
|
||||
/target
|
||||
.zed
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
// Project-local debug tasks
|
||||
//
|
||||
// For more documentation on how to configure debug tasks,
|
||||
// see: https://zed.dev/docs/debugger
|
||||
[
|
||||
{
|
||||
"label": "Strafe Client",
|
||||
"adapter": "CodeLLDB",
|
||||
"program": "target/debug/strafe-client",
|
||||
"args": [
|
||||
"tools/bhop_maps/5692113331.snfm"
|
||||
],
|
||||
"request": "launch",
|
||||
"build": {
|
||||
"command": "cargo",
|
||||
"args": ["build","-p strafe-client"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Integration Testing",
|
||||
"adapter": "CodeLLDB",
|
||||
"program": "target/release/integration-testing",
|
||||
"request": "launch",
|
||||
"build": {
|
||||
"command": "cargo",
|
||||
"args": ["test","-p integration-testing"]
|
||||
}
|
||||
}
|
||||
]
|
||||
884
Cargo.lock
generated
884
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -11,4 +11,4 @@ id = { version = "0.1.0", registry = "strafesnet" }
|
||||
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
|
||||
strafesnet_session = { path = "../session", registry = "strafesnet" }
|
||||
strafesnet_settings = { path = "../settings", registry = "strafesnet" }
|
||||
wgpu = "26.0.1"
|
||||
wgpu = "27.0.0"
|
||||
|
||||
@@ -23,7 +23,7 @@ use strafesnet_common::physics::{Instruction,MouseInstruction,ModeInstruction,Mi
|
||||
|
||||
//internal influence
|
||||
//when the physics asks itself what happens next, this is how it's represented
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug,Clone)]
|
||||
pub enum InternalInstruction{
|
||||
CollisionStart(Collision,model_physics::GigaTime),
|
||||
CollisionEnd(Collision,model_physics::GigaTime),
|
||||
@@ -874,6 +874,9 @@ impl PhysicsState{
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
pub const fn body(&self)->&Body{
|
||||
&self.body
|
||||
}
|
||||
pub fn camera_body(&self)->Body{
|
||||
Body{
|
||||
position:self.body.position+self.style.camera_offset,
|
||||
@@ -886,6 +889,9 @@ impl PhysicsState{
|
||||
pub const fn mode(&self)->gameplay_modes::ModeId{
|
||||
self.mode_state.get_mode_id()
|
||||
}
|
||||
pub const fn style_mut(&mut self)->&mut StyleModifiers{
|
||||
&mut self.style
|
||||
}
|
||||
pub fn get_finish_time(&self)->Option<run::Time>{
|
||||
self.run.get_finish_time()
|
||||
}
|
||||
@@ -949,8 +955,8 @@ pub struct PhysicsData{
|
||||
//cached calculations
|
||||
hitbox_mesh:HitboxMesh,
|
||||
}
|
||||
impl Default for PhysicsData{
|
||||
fn default()->Self{
|
||||
impl PhysicsData{
|
||||
pub fn empty()->Self{
|
||||
Self{
|
||||
bvh:bvh::BvhNode::empty(),
|
||||
models:Default::default(),
|
||||
@@ -958,47 +964,7 @@ impl Default for PhysicsData{
|
||||
hitbox_mesh:StyleModifiers::default().calculate_mesh(),
|
||||
}
|
||||
}
|
||||
}
|
||||
// the collection of information required to run physics
|
||||
pub struct PhysicsContext<'a>{
|
||||
state:&'a mut PhysicsState,//this captures the entire state of the physics.
|
||||
data:&'a PhysicsData,//data currently loaded into memory which is needded for physics to run, but is not part of the state.
|
||||
}
|
||||
// the physics consumes both Instruction and PhysicsInternalInstruction,
|
||||
// but can only emit PhysicsInternalInstruction
|
||||
impl InstructionConsumer<InternalInstruction> for PhysicsContext<'_>{
|
||||
type Time=Time;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<InternalInstruction,Time>){
|
||||
atomic_internal_instruction(&mut self.state,&self.data,ins)
|
||||
}
|
||||
}
|
||||
impl InstructionConsumer<Instruction> for PhysicsContext<'_>{
|
||||
type Time=Time;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,Time>){
|
||||
atomic_input_instruction(&mut self.state,&self.data,ins)
|
||||
}
|
||||
}
|
||||
impl InstructionEmitter<InternalInstruction> for PhysicsContext<'_>{
|
||||
type Time=Time;
|
||||
//this little next instruction function could cache its return value and invalidate the cached value by watching the State.
|
||||
fn next_instruction(&self,time_limit:Time)->Option<TimedInstruction<InternalInstruction,Time>>{
|
||||
next_instruction_internal(&self.state,&self.data,time_limit)
|
||||
}
|
||||
}
|
||||
impl PhysicsContext<'_>{
|
||||
pub fn run_input_instruction(
|
||||
state:&mut PhysicsState,
|
||||
data:&PhysicsData,
|
||||
instruction:TimedInstruction<Instruction,Time>
|
||||
){
|
||||
let mut context=PhysicsContext{state,data};
|
||||
context.process_exhaustive(instruction.time);
|
||||
context.process_instruction(instruction);
|
||||
}
|
||||
}
|
||||
impl PhysicsData{
|
||||
/// use with caution, this is the only non-instruction way to mess with physics
|
||||
pub fn generate_models(&mut self,map:&map::CompleteMap){
|
||||
pub fn new(map:&map::CompleteMap)->Self{
|
||||
let modes=map.modes.clone().denormalize();
|
||||
let mut used_contact_attributes=Vec::new();
|
||||
let mut used_intersect_attributes=Vec::new();
|
||||
@@ -1125,11 +1091,57 @@ impl PhysicsData{
|
||||
(IntersectAttributesId::new(attr_id as u32),attr)
|
||||
).collect(),
|
||||
};
|
||||
self.bvh=bvh;
|
||||
self.models=models;
|
||||
self.modes=modes;
|
||||
//hitbox_mesh is unchanged
|
||||
println!("Physics Objects: {}",model_count);
|
||||
Self{
|
||||
hitbox_mesh:StyleModifiers::default().calculate_mesh(),
|
||||
bvh,
|
||||
models,
|
||||
modes,
|
||||
}
|
||||
}
|
||||
}
|
||||
// the collection of information required to run physics
|
||||
pub struct PhysicsContext<'a>{
|
||||
state:&'a mut PhysicsState,//this captures the entire state of the physics.
|
||||
data:&'a PhysicsData,//data currently loaded into memory which is needded for physics to run, but is not part of the state.
|
||||
}
|
||||
// the physics consumes both Instruction and PhysicsInternalInstruction,
|
||||
// but can only emit PhysicsInternalInstruction
|
||||
impl InstructionConsumer<InternalInstruction> for PhysicsContext<'_>{
|
||||
type Time=Time;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<InternalInstruction,Time>){
|
||||
atomic_internal_instruction(&mut self.state,&self.data,ins)
|
||||
}
|
||||
}
|
||||
impl InstructionConsumer<Instruction> for PhysicsContext<'_>{
|
||||
type Time=Time;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,Time>){
|
||||
atomic_input_instruction(&mut self.state,&self.data,ins)
|
||||
}
|
||||
}
|
||||
impl InstructionEmitter<InternalInstruction> for PhysicsContext<'_>{
|
||||
type Time=Time;
|
||||
//this little next instruction function could cache its return value and invalidate the cached value by watching the State.
|
||||
fn next_instruction(&self,time_limit:Time)->Option<TimedInstruction<InternalInstruction,Time>>{
|
||||
next_instruction_internal(&self.state,&self.data,time_limit)
|
||||
}
|
||||
}
|
||||
impl<'a> PhysicsContext<'a>{
|
||||
pub fn run_input_instruction(
|
||||
state:&mut PhysicsState,
|
||||
data:&PhysicsData,
|
||||
instruction:TimedInstruction<Instruction,Time>
|
||||
){
|
||||
let mut context=PhysicsContext{state,data};
|
||||
context.process_exhaustive(instruction.time);
|
||||
context.process_instruction(instruction);
|
||||
}
|
||||
pub fn iter_internal(
|
||||
state:&'a mut PhysicsState,
|
||||
data:&'a PhysicsData,
|
||||
time_limit:Time,
|
||||
)->instruction::InstructionIter<InternalInstruction,Time,Self>{
|
||||
PhysicsContext{state,data}.into_iter(time_limit)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2124,4 +2136,115 @@ mod test{
|
||||
Time::ZERO
|
||||
),None);
|
||||
}
|
||||
// overlap edges by 1 epsilon
|
||||
#[test]
|
||||
fn almost_miss_north(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
(int3(0,10,-7)>>1)+vec3::raw_xyz(0,0,1),
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),Some(Time::from_secs(2)))
|
||||
}
|
||||
#[test]
|
||||
fn almost_miss_east(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
(int3(7,10,0)>>1)+vec3::raw_xyz(-1,0,0),
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),Some(Time::from_secs(2)))
|
||||
}
|
||||
#[test]
|
||||
fn almost_miss_south(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
(int3(0,10,7)>>1)+vec3::raw_xyz(0,0,-1),
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),Some(Time::from_secs(2)))
|
||||
}
|
||||
#[test]
|
||||
fn almost_miss_west(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
(int3(-7,10,0)>>1)+vec3::raw_xyz(1,0,0),
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),Some(Time::from_secs(2)))
|
||||
}
|
||||
// exactly miss edges
|
||||
#[test]
|
||||
fn exact_miss_north(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
int3(0,10,-7)>>1,
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),None)
|
||||
}
|
||||
#[test]
|
||||
fn exact_miss_east(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
int3(7,10,0)>>1,
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),None)
|
||||
}
|
||||
#[test]
|
||||
fn exact_miss_south(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
int3(0,10,7)>>1,
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),None)
|
||||
}
|
||||
#[test]
|
||||
fn exact_miss_west(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
int3(-7,10,0)>>1,
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),None)
|
||||
}
|
||||
// miss edges by 1 epsilon
|
||||
#[test]
|
||||
fn narrow_miss_north(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
(int3(0,10,-7)>>1)-vec3::raw_xyz(0,0,1),
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),None)
|
||||
}
|
||||
#[test]
|
||||
fn narrow_miss_east(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
(int3(7,10,0)>>1)-vec3::raw_xyz(-1,0,0),
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),None)
|
||||
}
|
||||
#[test]
|
||||
fn narrow_miss_south(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
(int3(0,10,7)>>1)-vec3::raw_xyz(0,0,-1),
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),None)
|
||||
}
|
||||
#[test]
|
||||
fn narrow_miss_west(){
|
||||
test_collision_axis_aligned(Body::new(
|
||||
(int3(-7,10,0)>>1)-vec3::raw_xyz(1,0,0),
|
||||
int3(0,-1,0),
|
||||
vec3::ZERO,
|
||||
Time::ZERO
|
||||
),None)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -172,7 +172,7 @@ impl Session{
|
||||
user_settings,
|
||||
directories,
|
||||
mouse_interpolator:MouseInterpolator::new(),
|
||||
geometry_shared:Default::default(),
|
||||
geometry_shared:PhysicsData::empty(),
|
||||
simulation,
|
||||
view_state:ViewState::Play,
|
||||
recording:Default::default(),
|
||||
@@ -184,7 +184,7 @@ impl Session{
|
||||
}
|
||||
fn change_map(&mut self,map:&strafesnet_common::map::CompleteMap){
|
||||
self.simulation.physics.clear();
|
||||
self.geometry_shared.generate_models(map);
|
||||
self.geometry_shared=PhysicsData::new(map);
|
||||
}
|
||||
pub fn get_frame_state(&self,time:SessionTime)->Option<FrameState>{
|
||||
match &self.view_state{
|
||||
|
||||
@@ -4,6 +4,9 @@ version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
glam = "0.30.0"
|
||||
strafesnet_common = { path = "../lib/common", registry = "strafesnet" }
|
||||
strafesnet_physics = { path = "../engine/physics", registry = "strafesnet" }
|
||||
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet" }
|
||||
# this is just for the primitive constructor
|
||||
strafesnet_rbx_loader = { path = "../lib/rbx_loader", registry = "strafesnet" }
|
||||
|
||||
@@ -3,6 +3,8 @@ mod util;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
#[cfg(test)]
|
||||
mod test_scenes;
|
||||
|
||||
use std::time::Instant;
|
||||
|
||||
@@ -29,9 +31,8 @@ fn run_replay()->Result<(),ReplayError>{
|
||||
let bot=strafesnet_snf::read_bot(data)?.read_all()?;
|
||||
|
||||
// create recording
|
||||
let mut physics_data=PhysicsData::default();
|
||||
println!("generating models..");
|
||||
physics_data.generate_models(&map);
|
||||
let physics_data=PhysicsData::new(&map);
|
||||
println!("simulating...");
|
||||
let mut physics=PhysicsState::default();
|
||||
for ins in bot.instructions{
|
||||
@@ -139,9 +140,8 @@ fn test_determinism()->Result<(),ReplayError>{
|
||||
let data=read_entire_file("../tools/bhop_maps/5692113331.snfm")?;
|
||||
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
|
||||
|
||||
let mut physics_data=PhysicsData::default();
|
||||
println!("generating models..");
|
||||
physics_data.generate_models(&map);
|
||||
let physics_data=PhysicsData::new(&map);
|
||||
|
||||
let (send,recv)=std::sync::mpsc::channel();
|
||||
|
||||
|
||||
117
integration-testing/src/test_scenes.rs
Normal file
117
integration-testing/src/test_scenes.rs
Normal file
@@ -0,0 +1,117 @@
|
||||
use strafesnet_physics::physics::{InternalInstruction,PhysicsData,PhysicsState,PhysicsContext};
|
||||
use strafesnet_common::gameplay_modes::NormalizedModes;
|
||||
use strafesnet_common::gameplay_attributes::{CollisionAttributes,CollisionAttributesId};
|
||||
use strafesnet_common::integer::{vec3,mat3,Planar64Affine3,Time};
|
||||
use strafesnet_common::model::{Mesh,Model,MeshId,ModelId,RenderConfigId};
|
||||
use strafesnet_common::map::CompleteMap;
|
||||
use strafesnet_rbx_loader::primitives::{unit_cube,CubeFaceDescription};
|
||||
|
||||
struct TestSceneBuilder{
|
||||
meshes:Vec<Mesh>,
|
||||
models:Vec<Model>,
|
||||
}
|
||||
impl TestSceneBuilder{
|
||||
fn new()->Self{
|
||||
Self{
|
||||
meshes:Vec::new(),
|
||||
models:Vec::new(),
|
||||
}
|
||||
}
|
||||
fn push_mesh(&mut self,mesh:Mesh)->MeshId{
|
||||
let mesh_id=self.meshes.len();
|
||||
self.meshes.push(mesh);
|
||||
MeshId::new(mesh_id as u32)
|
||||
}
|
||||
fn push_mesh_instance(&mut self,mesh:MeshId,transform:Planar64Affine3)->ModelId{
|
||||
let model=Model{
|
||||
mesh,
|
||||
attributes:CollisionAttributesId::new(0),
|
||||
color:glam::Vec4::ONE,
|
||||
transform,
|
||||
};
|
||||
let model_id=self.models.len();
|
||||
self.models.push(model);
|
||||
ModelId::new(model_id as u32)
|
||||
}
|
||||
fn build(self)->PhysicsData{
|
||||
let modes=NormalizedModes::new(Vec::new());
|
||||
let attributes=vec![CollisionAttributes::contact_default()];
|
||||
let meshes=self.meshes;
|
||||
let models=self.models;
|
||||
let textures=Vec::new();
|
||||
let render_configs=Vec::new();
|
||||
PhysicsData::new(&CompleteMap{
|
||||
modes,
|
||||
attributes,
|
||||
meshes,
|
||||
models,
|
||||
textures,
|
||||
render_configs,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn test_scene()->PhysicsData{
|
||||
let mut builder=TestSceneBuilder::new();
|
||||
let cube_face_description=CubeFaceDescription::new(Default::default(),RenderConfigId::new(0));
|
||||
let mesh=builder.push_mesh(unit_cube(cube_face_description));
|
||||
// place two 5x5x5 cubes.
|
||||
builder.push_mesh_instance(mesh,Planar64Affine3::new(
|
||||
mat3::from_diagonal(vec3::int(5,5,5)>>1),
|
||||
vec3::int(0,0,0)
|
||||
));
|
||||
builder.push_mesh_instance(mesh,Planar64Affine3::new(
|
||||
mat3::from_diagonal(vec3::int(5,5,5)>>1),
|
||||
vec3::int(5,-5,0)
|
||||
));
|
||||
builder.build()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simultaneous_collision(){
|
||||
let physics_data=test_scene();
|
||||
let body=strafesnet_physics::physics::Body::new(
|
||||
(vec3::int(5+2,0,0)>>1)+vec3::int(1,1,0),
|
||||
vec3::int(-1,-1,0),
|
||||
vec3::int(0,0,0),
|
||||
Time::ZERO,
|
||||
);
|
||||
let mut physics=PhysicsState::new_with_body(body);
|
||||
physics.style_mut().gravity=vec3::ZERO;
|
||||
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(2))
|
||||
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick));
|
||||
// the order that they hit does matter, but we aren't currently worrying about that.
|
||||
// See multi-collision branch
|
||||
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
|
||||
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
|
||||
assert!(phys_iter.next().is_none());
|
||||
let body=physics.body();
|
||||
assert_eq!(body.position,vec3::int(5,0,0));
|
||||
assert_eq!(body.velocity,vec3::int(0,0,0));
|
||||
assert_eq!(body.acceleration,vec3::int(0,0,0));
|
||||
assert_eq!(body.time,Time::from_secs(1));
|
||||
}
|
||||
#[test]
|
||||
fn bug_3(){
|
||||
let physics_data=test_scene();
|
||||
let body=strafesnet_physics::physics::Body::new(
|
||||
(vec3::int(5+2,0,0)>>1)+vec3::int(1,2,0),
|
||||
vec3::int(-1,-1,0),
|
||||
vec3::int(0,0,0),
|
||||
Time::ZERO,
|
||||
);
|
||||
let mut physics=PhysicsState::new_with_body(body);
|
||||
physics.style_mut().gravity=vec3::ZERO;
|
||||
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(3))
|
||||
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick));
|
||||
// touch side of part at 0,0,0
|
||||
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
|
||||
// touch top of part at 5,-5,0
|
||||
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(2));
|
||||
assert!(phys_iter.next().is_none());
|
||||
let body=physics.body();
|
||||
assert_eq!(body.position,vec3::int(5+2,0,0)>>1);
|
||||
assert_eq!(body.velocity,vec3::int(0,0,0));
|
||||
assert_eq!(body.acceleration,vec3::int(0,0,0));
|
||||
assert_eq!(body.time,Time::from_secs(2));
|
||||
}
|
||||
@@ -10,9 +10,8 @@ fn physics_bug_2()->Result<(),ReplayError>{
|
||||
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
|
||||
|
||||
// create recording
|
||||
let mut physics_data=PhysicsData::default();
|
||||
println!("generating models..");
|
||||
physics_data.generate_models(&map);
|
||||
let physics_data=PhysicsData::new(&map);
|
||||
println!("simulating...");
|
||||
|
||||
//teleport to bug
|
||||
@@ -45,9 +44,8 @@ fn physics_bug_3()->Result<(),ReplayError>{
|
||||
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
|
||||
|
||||
// create recording
|
||||
let mut physics_data=PhysicsData::default();
|
||||
println!("generating models..");
|
||||
physics_data.generate_models(&map);
|
||||
let physics_data=PhysicsData::new(&map);
|
||||
println!("simulating...");
|
||||
|
||||
//teleport to bug
|
||||
|
||||
@@ -61,7 +61,7 @@ pub fn convert_mesh(model:vmdl::Model,deferred_loader:&mut RenderConfigDeferredL
|
||||
_=>None,
|
||||
}
|
||||
})
|
||||
}).flat_map(|[v1,v2,v3]|{
|
||||
}).filter_map(|[v1,v2,v3]|{
|
||||
// this should probably be a fatal error :D
|
||||
let v1=model_vertices.get(v1)?;
|
||||
let v2=model_vertices.get(v2)?;
|
||||
|
||||
@@ -2,7 +2,9 @@ use crate::integer::{vec3,Planar64Vec3};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Aabb{
|
||||
// min is inclusive
|
||||
min:Planar64Vec3,
|
||||
// max is not inclusive
|
||||
max:Planar64Vec3,
|
||||
}
|
||||
|
||||
@@ -43,7 +45,7 @@ impl Aabb{
|
||||
}
|
||||
#[inline]
|
||||
pub fn contains(&self,point:Planar64Vec3)->bool{
|
||||
let bvec=self.min.lt(point)&point.lt(self.max);
|
||||
let bvec=self.min.le(point)&point.lt(self.max);
|
||||
bvec.all()
|
||||
}
|
||||
#[inline]
|
||||
@@ -59,11 +61,11 @@ impl Aabb{
|
||||
pub fn center(&self)->Planar64Vec3{
|
||||
self.min.map_zip(self.max,|(min,max)|min.midpoint(max))
|
||||
}
|
||||
//probably use floats for area & volume because we don't care about precision
|
||||
// pub fn area_weight(&self)->f32{
|
||||
// let d=self.max-self.min;
|
||||
// d.x*d.y+d.y*d.z+d.z*d.x
|
||||
// }
|
||||
#[inline]
|
||||
pub fn area_weight(&self)->fixed_wide::fixed::Fixed<2,64>{
|
||||
let d=self.max-self.min;
|
||||
d.x*d.y+d.y*d.z+d.z*d.x
|
||||
}
|
||||
// pub fn volume(&self)->f32{
|
||||
// let d=self.max-self.min;
|
||||
// d.x*d.y*d.z
|
||||
|
||||
@@ -245,18 +245,19 @@ pub fn generate_bvh<T>(boxen:Vec<(T,Aabb)>)->BvhNode<T>{
|
||||
|
||||
fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
|
||||
let n=boxen.len();
|
||||
if force||n<20{
|
||||
let mut aabb=Aabb::default();
|
||||
let nodes=boxen.into_iter().map(|b|{
|
||||
aabb.join(&b.1);
|
||||
const MAX_TERMINAL_BRANCH_LEAF_NODES:usize=20;
|
||||
if force||n<MAX_TERMINAL_BRANCH_LEAF_NODES{
|
||||
let mut aabb_outer=Aabb::default();
|
||||
let nodes=boxen.into_iter().map(|(data,aabb)|{
|
||||
aabb_outer.join(&aabb);
|
||||
BvhNode{
|
||||
content:RecursiveContent::Leaf(b.0),
|
||||
aabb:b.1,
|
||||
content:RecursiveContent::Leaf(data),
|
||||
aabb,
|
||||
}
|
||||
}).collect();
|
||||
BvhNode{
|
||||
content:RecursiveContent::Branch(nodes),
|
||||
aabb,
|
||||
aabb:aabb_outer,
|
||||
}
|
||||
}else{
|
||||
let mut sort_x=Vec::with_capacity(n);
|
||||
@@ -271,62 +272,64 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
|
||||
sort_x.sort_by_key(|&(_,c)|c);
|
||||
sort_y.sort_by_key(|&(_,c)|c);
|
||||
sort_z.sort_by_key(|&(_,c)|c);
|
||||
let h=n/2;
|
||||
let median_x=sort_x[h].1;
|
||||
let median_y=sort_y[h].1;
|
||||
let median_z=sort_z[h].1;
|
||||
//locate a run of values equal to the median
|
||||
//partition point gives the first index for which the predicate evaluates to false
|
||||
let first_index_eq_median_x=sort_x.partition_point(|&(_,x)|x<median_x);
|
||||
let first_index_eq_median_y=sort_y.partition_point(|&(_,y)|y<median_y);
|
||||
let first_index_eq_median_z=sort_z.partition_point(|&(_,z)|z<median_z);
|
||||
let first_index_gt_median_x=sort_x.partition_point(|&(_,x)|x<=median_x);
|
||||
let first_index_gt_median_y=sort_y.partition_point(|&(_,y)|y<=median_y);
|
||||
let first_index_gt_median_z=sort_z.partition_point(|&(_,z)|z<=median_z);
|
||||
//pick which side median value copies go into such that both sides are as balanced as possible based on distance from n/2
|
||||
let partition_point_x=if n.abs_diff(2*first_index_eq_median_x)<n.abs_diff(2*first_index_gt_median_x){first_index_eq_median_x}else{first_index_gt_median_x};
|
||||
let partition_point_y=if n.abs_diff(2*first_index_eq_median_y)<n.abs_diff(2*first_index_gt_median_y){first_index_eq_median_y}else{first_index_gt_median_y};
|
||||
let partition_point_z=if n.abs_diff(2*first_index_eq_median_z)<n.abs_diff(2*first_index_gt_median_z){first_index_eq_median_z}else{first_index_gt_median_z};
|
||||
//this ids which octant the boxen is put in
|
||||
let mut octant=vec![0;n];
|
||||
for &(i,_) in &sort_x[partition_point_x..]{
|
||||
octant[i]+=1<<0;
|
||||
}
|
||||
for &(i,_) in &sort_y[partition_point_y..]{
|
||||
octant[i]+=1<<1;
|
||||
}
|
||||
for &(i,_) in &sort_z[partition_point_z..]{
|
||||
octant[i]+=1<<2;
|
||||
}
|
||||
//generate lists for unique octant values
|
||||
let mut list_list=Vec::with_capacity(8);
|
||||
let mut octant_list=Vec::with_capacity(8);
|
||||
for (i,(data,aabb)) in boxen.into_iter().enumerate(){
|
||||
let octant_id=octant[i];
|
||||
let list_id=if let Some(list_id)=octant_list.iter().position(|&id|id==octant_id){
|
||||
list_id
|
||||
}else{
|
||||
let list_id=list_list.len();
|
||||
octant_list.push(octant_id);
|
||||
list_list.push(Vec::new());
|
||||
list_id
|
||||
};
|
||||
list_list[list_id].push((data,aabb));
|
||||
}
|
||||
let mut aabb=Aabb::default();
|
||||
if list_list.len()==1{
|
||||
generate_bvh_node(list_list.remove(0),true)
|
||||
}else{
|
||||
BvhNode{
|
||||
content:RecursiveContent::Branch(
|
||||
list_list.into_iter().map(|b|{
|
||||
let node=generate_bvh_node(b,false);
|
||||
aabb.join(&node.aabb);
|
||||
node
|
||||
}).collect()
|
||||
),
|
||||
aabb,
|
||||
|
||||
let mut reverse_acumulated_aabbs=vec![fixed_wide::fixed::Fixed::ZERO;n];
|
||||
fn get_min_area<T>(
|
||||
boxen:&[(T,Aabb)],
|
||||
sorted_list:&[(usize,Planar64)],
|
||||
reverse_acumulated_aabbs:&mut Vec<fixed_wide::fixed::Fixed<2,64>>,
|
||||
best_area:&mut fixed_wide::fixed::Fixed<2,64>,
|
||||
)->Option<usize>{
|
||||
let mut accumulated_aabb=Aabb::default();
|
||||
// create an array of aabbs which accumulates box aabbs from the end of the list
|
||||
for (i,&(index,_)) in sorted_list.iter().enumerate().rev(){
|
||||
accumulated_aabb.join(&boxen[index].1);
|
||||
reverse_acumulated_aabbs[i]=accumulated_aabb.area_weight();
|
||||
}
|
||||
// iterate the list forwards and calculate the total area
|
||||
// if the boxes were split at this index
|
||||
accumulated_aabb=Aabb::default();
|
||||
let mut best_index=None;
|
||||
for (i,&(index,_)) in sorted_list.iter().enumerate(){
|
||||
accumulated_aabb.join(&boxen[index].1);
|
||||
let area=accumulated_aabb.area_weight()+reverse_acumulated_aabbs[i];
|
||||
if area<*best_area{
|
||||
*best_area=area;
|
||||
best_index=Some(i);
|
||||
}
|
||||
}
|
||||
best_index
|
||||
}
|
||||
let mut best_area=fixed_wide::fixed::Fixed::MAX;
|
||||
let mut best_index=0;
|
||||
let mut best_list=Vec::new();
|
||||
if let Some(index_x)=get_min_area(&boxen,&sort_x,&mut reverse_acumulated_aabbs,&mut best_area){
|
||||
best_index=index_x;
|
||||
best_list=sort_x;
|
||||
}
|
||||
if let Some(index_y)=get_min_area(&boxen,&sort_y,&mut reverse_acumulated_aabbs,&mut best_area){
|
||||
best_index=index_y;
|
||||
best_list=sort_y;
|
||||
}
|
||||
if let Some(index_z)=get_min_area(&boxen,&sort_z,&mut reverse_acumulated_aabbs,&mut best_area){
|
||||
best_index=index_z;
|
||||
best_list=sort_z;
|
||||
}
|
||||
|
||||
// need to split boxen into two according to best_list and best_index
|
||||
let mut second=Vec::with_capacity(best_index);
|
||||
boxen.retain(|i|);
|
||||
let second=best_list.split_off(best_index);
|
||||
let mut aabb=Aabb::default();
|
||||
BvhNode{
|
||||
content:RecursiveContent::Branch(
|
||||
[best_list,second].map(|b|{
|
||||
let node=generate_bvh_node(b,false);
|
||||
aabb.join(&node.aabb);
|
||||
node
|
||||
}).collect()
|
||||
),
|
||||
aabb,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,12 +34,41 @@ pub trait InstructionFeedback<I,T>:InstructionEmitter<I,Time=T>+InstructionConsu
|
||||
self.process_instruction(instruction);
|
||||
}
|
||||
}
|
||||
#[inline]
|
||||
fn into_iter(self,time_limit:T)->InstructionIter<I,T,Self>
|
||||
where
|
||||
Self:Sized
|
||||
{
|
||||
InstructionIter{
|
||||
time_limit,
|
||||
feedback:self,
|
||||
_phantom:core::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<I,T,X> InstructionFeedback<I,T> for X
|
||||
impl<I,T,F> InstructionFeedback<I,T> for F
|
||||
where
|
||||
T:Copy,
|
||||
X:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
|
||||
F:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
|
||||
{}
|
||||
pub struct InstructionIter<I,T:Copy,F:InstructionFeedback<I,T>>{
|
||||
time_limit:T,
|
||||
feedback:F,
|
||||
_phantom:core::marker::PhantomData<I>,
|
||||
}
|
||||
impl<I,T,F> Iterator for InstructionIter<I,T,F>
|
||||
where
|
||||
I:Clone,
|
||||
T:Clone+Copy,
|
||||
F:InstructionFeedback<I,T>,
|
||||
{
|
||||
type Item=TimedInstruction<I,T>;
|
||||
fn next(&mut self)->Option<Self::Item>{
|
||||
let instruction=self.feedback.next_instruction(self.time_limit)?;
|
||||
self.feedback.process_instruction(instruction.clone());
|
||||
Some(instruction)
|
||||
}
|
||||
}
|
||||
|
||||
//PROPER PRIVATE FIELDS!!!
|
||||
pub struct InstructionCollector<I,T>{
|
||||
|
||||
@@ -13,12 +13,12 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
|
||||
bytemuck = "1.14.3"
|
||||
glam = "0.30.0"
|
||||
lazy-regex = "3.1.0"
|
||||
rbx_binary = { version = "1.1.0-sn4", registry = "strafesnet" }
|
||||
rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet", features = ["instance-userdata"] }
|
||||
rbx_binary = { version = "1.0.1-sn5", registry = "strafesnet" }
|
||||
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
|
||||
rbx_mesh = "0.5.0"
|
||||
rbx_reflection = "5.0.0"
|
||||
rbx_reflection_database = "1.0.0"
|
||||
rbx_xml = { version = "1.1.0-sn4", registry = "strafesnet" }
|
||||
rbx_xml = { version = "1.0.1-sn5", registry = "strafesnet" }
|
||||
rbxassetid = { version = "0.1.0", path = "../rbxassetid", registry = "strafesnet" }
|
||||
roblox_emulator = { version = "0.5.1", path = "../roblox_emulator", default-features = false, registry = "strafesnet" }
|
||||
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
|
||||
|
||||
@@ -12,7 +12,7 @@ mod mesh;
|
||||
mod error;
|
||||
mod union;
|
||||
pub mod loader;
|
||||
mod primitives;
|
||||
pub mod primitives;
|
||||
|
||||
pub mod data{
|
||||
pub struct RobloxMeshBytes(Vec<u8>);
|
||||
|
||||
@@ -9,7 +9,6 @@ use crate::loader::MeshWithSize;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
Planar64Vec3(strafesnet_common::integer::Planar64TryFromFloatError),
|
||||
RbxMesh(rbx_mesh::mesh::Error)
|
||||
}
|
||||
impl std::fmt::Display for Error{
|
||||
@@ -22,41 +21,43 @@ impl std::error::Error for Error{}
|
||||
fn ingest_vertices2(
|
||||
vertices:Vec<Vertex2>,
|
||||
mb:&mut model::MeshBuilder,
|
||||
)->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>{
|
||||
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{
|
||||
//this monster is collecting a map of old_vertices_index -> unique_vertices_index
|
||||
//while also doing the inserting unique entries into lists simultaneously
|
||||
Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
|
||||
// vertex positions that fail to convert are DROPPED
|
||||
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
|
||||
rbx_mesh::mesh::VertexId2(vertex_id as u32),
|
||||
{
|
||||
let vertex=IndexedVertex{
|
||||
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos)?),
|
||||
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
|
||||
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
|
||||
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?),
|
||||
color:mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)))
|
||||
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
|
||||
color:mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32))),
|
||||
};
|
||||
mb.acquire_vertex_id(vertex)
|
||||
}
|
||||
))).collect::<Result<_,_>>().map_err(Error::Planar64Vec3)?)
|
||||
))).collect()
|
||||
}
|
||||
fn ingest_vertices_truncated2(
|
||||
vertices:Vec<Vertex2Truncated>,
|
||||
mb:&mut model::MeshBuilder,
|
||||
static_color_id:ColorId,//pick one color and fill everything with it
|
||||
)->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>{
|
||||
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{
|
||||
//this monster is collecting a map of old_vertices_index -> unique_vertices_index
|
||||
//while also doing the inserting unique entries into lists simultaneously
|
||||
Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
|
||||
// vertex positions that fail to convert are DROPPED
|
||||
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
|
||||
rbx_mesh::mesh::VertexId2(vertex_id as u32),
|
||||
{
|
||||
let vertex=IndexedVertex{
|
||||
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos)?),
|
||||
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
|
||||
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
|
||||
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?),
|
||||
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
|
||||
color:static_color_id,
|
||||
};
|
||||
mb.acquire_vertex_id(vertex)
|
||||
}
|
||||
))).collect::<Result<_,_>>().map_err(Error::Planar64Vec3)?)
|
||||
))).collect()
|
||||
}
|
||||
|
||||
fn ingest_faces2_lods3(
|
||||
@@ -67,8 +68,8 @@ fn ingest_faces2_lods3(
|
||||
){
|
||||
//faces have to be split into polygon groups based on lod
|
||||
polygon_groups.extend(lods.windows(2).map(|lod_pair|
|
||||
PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().map(|rbx_mesh::mesh::Face2(v0,v1,v2)|
|
||||
vec![vertex_id_map[&v0],vertex_id_map[&v1],vertex_id_map[&v2]]
|
||||
PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().filter_map(|rbx_mesh::mesh::Face2(v0,v1,v2)|
|
||||
Some(vec![*vertex_id_map.get(&v0)?,*vertex_id_map.get(&v1)?,*vertex_id_map.get(&v2)?])
|
||||
).collect()))
|
||||
))
|
||||
}
|
||||
@@ -80,18 +81,18 @@ pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<MeshWithS
|
||||
match rbx_mesh::read_versioned(roblox_mesh_bytes.cursor()).map_err(Error::RbxMesh)?{
|
||||
rbx_mesh::mesh::Mesh::V1(mesh)=>{
|
||||
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
|
||||
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).map(|trip|{
|
||||
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).filter_map(|trip|{
|
||||
let mut ingest_vertex1=|vertex:&rbx_mesh::mesh::Vertex1|{
|
||||
let vertex=IndexedVertex{
|
||||
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos)?),
|
||||
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
|
||||
tex:mb.acquire_tex_id(glam::vec2(vertex.tex[0],vertex.tex[1])),
|
||||
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?),
|
||||
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
|
||||
color:color_id,
|
||||
};
|
||||
Ok(mb.acquire_vertex_id(vertex))
|
||||
Some(mb.acquire_vertex_id(vertex))
|
||||
};
|
||||
Ok(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?])
|
||||
}).collect::<Result<_,_>>().map_err(Error::Planar64Vec3)?)));
|
||||
Some(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?])
|
||||
}).collect())));
|
||||
},
|
||||
rbx_mesh::mesh::Mesh::V2(mesh)=>{
|
||||
let vertex_id_map=match mesh.header.sizeof_vertex{
|
||||
@@ -101,10 +102,10 @@ pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<MeshWithS
|
||||
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
|
||||
},
|
||||
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
|
||||
}?;
|
||||
};
|
||||
//one big happy group for all the faces
|
||||
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|face|
|
||||
vec![vertex_id_map[&face.0],vertex_id_map[&face.1],vertex_id_map[&face.2]]
|
||||
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().filter_map(|face|
|
||||
Some(vec![*vertex_id_map.get(&face.0)?,*vertex_id_map.get(&face.1)?,*vertex_id_map.get(&face.2)?])
|
||||
).collect())));
|
||||
},
|
||||
rbx_mesh::mesh::Mesh::V3(mesh)=>{
|
||||
@@ -114,15 +115,15 @@ pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<MeshWithS
|
||||
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
|
||||
},
|
||||
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
|
||||
}?;
|
||||
};
|
||||
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
|
||||
},
|
||||
rbx_mesh::mesh::Mesh::V4(mesh)=>{
|
||||
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb)?;
|
||||
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb);
|
||||
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
|
||||
},
|
||||
rbx_mesh::mesh::Mesh::V5(mesh)=>{
|
||||
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb)?;
|
||||
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb);
|
||||
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
|
||||
},
|
||||
}
|
||||
|
||||
@@ -13,9 +13,9 @@ run-service=[]
|
||||
|
||||
[dependencies]
|
||||
glam = "0.30.0"
|
||||
mlua = { version = "0.10.1", features = ["luau"] }
|
||||
phf = { version = "0.12.1", features = ["macros"] }
|
||||
rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet", features = ["instance-userdata"] }
|
||||
mlua = { version = "0.11.3", features = ["luau"] }
|
||||
phf = { version = "0.13.1", features = ["macros"] }
|
||||
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
|
||||
rbx_reflection = "5.0.0"
|
||||
rbx_reflection_database = "1.0.0"
|
||||
rbx_types = "2.0.0"
|
||||
|
||||
@@ -65,7 +65,7 @@ impl<'a> EnumItems<'a>{
|
||||
}
|
||||
|
||||
pub enum CoerceEnum<'a>{
|
||||
Integer(i32),
|
||||
Integer(i64),
|
||||
String(mlua::String),
|
||||
Enum(EnumItem<'a>),
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ use crate::runner::number::Number;
|
||||
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
|
||||
//class functions store
|
||||
lua.set_app_data(ClassMethodsStore::default());
|
||||
lua.set_app_data(InstanceValueStore::default());
|
||||
|
||||
let table=lua.create_table()?;
|
||||
|
||||
@@ -325,13 +326,16 @@ impl mlua::UserData for Instance{
|
||||
}
|
||||
|
||||
//find or create an associated userdata object
|
||||
let instance=this.get_mut(dom)?;
|
||||
if let Some(value)=get_or_create_userdata(instance,lua,index_str)?{
|
||||
if let Some(value)=instance_value_store_mut(lua,|ivs|{
|
||||
//TODO: walk class tree somehow
|
||||
match ivs.get_or_create_instance_values(&instance){
|
||||
Some(mut instance_values)=>instance_values.get_or_create_value(lua,index_str),
|
||||
None=>Ok(None)
|
||||
}
|
||||
})?{
|
||||
return value.into_lua(lua);
|
||||
}
|
||||
// drop mutable borrow
|
||||
//find a child with a matching name
|
||||
let instance=this.get(dom)?;
|
||||
find_first_child(dom,instance,index_str)
|
||||
.map(|instance|Instance::new_unchecked(instance.referent()))
|
||||
.into_lua(lua)
|
||||
@@ -419,7 +423,7 @@ impl mlua::UserData for Instance{
|
||||
rbx_types::Variant::CFrame(typed_value.clone().into())
|
||||
},
|
||||
rbx_reflection::DataType::Value(rbx_types::VariantType::ContentId)=>{
|
||||
let typed_value=value.as_str().ok_or_else(||mlua::Error::runtime("Expected string"))?.to_owned();
|
||||
let typed_value=value.as_string().ok_or_else(||mlua::Error::runtime("Expected string"))?.to_str()?.to_owned();
|
||||
rbx_types::Variant::ContentId(typed_value.into())
|
||||
},
|
||||
rbx_reflection::DataType::Value(rbx_types::VariantType::Ref)=>{
|
||||
@@ -606,6 +610,8 @@ fn find_virtual_property(
|
||||
}
|
||||
|
||||
// lazy-loaded per-instance userdata values
|
||||
// This whole thing is a bad idea and a garbage collection nightmare.
|
||||
// TODO: recreate rbx_dom_weak with my own instance type that owns this data.
|
||||
type CreateUserData=fn(&mlua::Lua)->mlua::Result<mlua::AnyUserData>;
|
||||
type LUD=phf::Map<&'static str,// Class name
|
||||
phf::Map<&'static str,// Value name
|
||||
@@ -638,22 +644,47 @@ static LAZY_USER_DATA:LUD=phf::phf_map!{
|
||||
"MouseClick"=>create_script_signal,
|
||||
},
|
||||
};
|
||||
fn get_or_create_userdata(instance:&mut rbx_dom_weak::Instance,lua:&mlua::Lua,index:&str)->mlua::Result<Option<mlua::AnyUserData>>{
|
||||
use std::collections::hash_map::Entry;
|
||||
let db=rbx_reflection_database::get();
|
||||
let Some(class)=db.classes.get(instance.class.as_str())else{
|
||||
return Ok(None)
|
||||
};
|
||||
if let Some((&static_str,create_userdata))=db.superclasses_iter(class).find_map(|superclass|
|
||||
// find pair (class,index)
|
||||
LAZY_USER_DATA.get(&superclass.name)
|
||||
.and_then(|map|map.get_entry(index))
|
||||
){
|
||||
let index_ustr=static_ustr(static_str);
|
||||
return Ok(Some(match instance.userdata.entry(index_ustr){
|
||||
Entry::Occupied(entry)=>entry.get().clone(),
|
||||
Entry::Vacant(entry)=>entry.insert(create_userdata(lua)?).clone(),
|
||||
}));
|
||||
}
|
||||
Ok(None)
|
||||
#[derive(Default)]
|
||||
pub struct InstanceValueStore{
|
||||
values:HashMap<Ref,
|
||||
HashMap<&'static str,
|
||||
mlua::AnyUserData
|
||||
>
|
||||
>,
|
||||
}
|
||||
pub struct InstanceValues<'a>{
|
||||
named_values:&'static phf::Map<&'static str,CreateUserData>,
|
||||
values:&'a mut HashMap<&'static str,mlua::AnyUserData>,
|
||||
}
|
||||
impl InstanceValueStore{
|
||||
pub fn get_or_create_instance_values(&mut self,instance:&rbx_dom_weak::Instance)->Option<InstanceValues<'_>>{
|
||||
LAZY_USER_DATA.get(instance.class.as_str())
|
||||
.map(|named_values|
|
||||
InstanceValues{
|
||||
named_values,
|
||||
values:self.values.entry(instance.referent())
|
||||
.or_insert_with(||HashMap::new()),
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
impl InstanceValues<'_>{
|
||||
pub fn get_or_create_value(&mut self,lua:&mlua::Lua,index:&str)->mlua::Result<Option<mlua::AnyUserData>>{
|
||||
Ok(match self.named_values.get_entry(index){
|
||||
Some((&static_index_str,&function_pointer))=>Some(
|
||||
match self.values.entry(static_index_str){
|
||||
Entry::Occupied(entry)=>entry.get().clone(),
|
||||
Entry::Vacant(entry)=>entry.insert(
|
||||
function_pointer(lua)?
|
||||
).clone(),
|
||||
}
|
||||
),
|
||||
None=>None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance_value_store_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut InstanceValueStore)->mlua::Result<T>)->mlua::Result<T>{
|
||||
let mut cf=lua.app_data_mut::<InstanceValueStore>().ok_or_else(||mlua::Error::runtime("InstanceValueStore missing"))?;
|
||||
f(&mut *cf)
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
#[derive(Clone,Copy)]
|
||||
pub enum Number{
|
||||
Integer(i32),
|
||||
Integer(i64),
|
||||
Number(f64),
|
||||
}
|
||||
macro_rules! impl_ty{
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::context::Context;
|
||||
use crate::util::static_ustr;
|
||||
#[cfg(feature="run-service")]
|
||||
use crate::scheduler::scheduler_mut;
|
||||
|
||||
@@ -126,15 +125,20 @@ impl Runnable<'_>{
|
||||
}
|
||||
#[cfg(feature="run-service")]
|
||||
pub fn run_service_step(&self)->Result<(),mlua::Error>{
|
||||
let render_stepped_signal=super::instance::instance::dom_mut(&self.lua,|dom|{
|
||||
let render_stepped=super::instance::instance::dom_mut(&self.lua,|dom|{
|
||||
let run_service=super::instance::instance::find_first_child_of_class(dom,dom.root(),"RunService").ok_or_else(||mlua::Error::runtime("RunService missing"))?;
|
||||
Ok(match run_service.userdata.get(&static_ustr("RenderStepped")){
|
||||
Some(render_stepped)=>Some(render_stepped.borrow::<super::script_signal::ScriptSignal>()?.clone()),
|
||||
None=>None
|
||||
super::instance::instance::instance_value_store_mut(&self.lua,|instance_value_store|{
|
||||
//unwrap because I trust my find_first_child_of_class function to
|
||||
let mut instance_values=instance_value_store.get_or_create_instance_values(run_service).ok_or_else(||mlua::Error::runtime("RunService InstanceValues missing"))?;
|
||||
let render_stepped=instance_values.get_or_create_value(&self.lua,"RenderStepped")?;
|
||||
//let stepped=instance_values.get_or_create_value(&self.lua,"Stepped")?;
|
||||
//let heartbeat=instance_values.get_or_create_value(&self.lua,"Heartbeat")?;
|
||||
Ok(render_stepped)
|
||||
})
|
||||
})?;
|
||||
if let Some(render_stepped_signal)=render_stepped_signal{
|
||||
render_stepped_signal.fire(&mlua::MultiValue::new());
|
||||
if let Some(render_stepped)=render_stepped{
|
||||
let signal:&super::script_signal::ScriptSignal=&*render_stepped.borrow()?;
|
||||
signal.fire(&mlua::MultiValue::new());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -13,11 +13,11 @@ futures = "0.3.31"
|
||||
image = "0.25.2"
|
||||
image_dds = "0.7.1"
|
||||
lazy-regex = "3.1.0"
|
||||
rbx_asset = { version = "0.4.4", registry = "strafesnet" }
|
||||
rbx_binary = { version = "1.1.0-sn4", registry = "strafesnet" }
|
||||
rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet" }
|
||||
rbx_asset = { version = "0.5.0", registry = "strafesnet" }
|
||||
rbx_binary = { version = "1.0.1-sn5", registry = "strafesnet" }
|
||||
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
|
||||
rbx_reflection_database = "1.0.0"
|
||||
rbx_xml = { version = "1.1.0-sn4", registry = "strafesnet" }
|
||||
rbx_xml = { version = "1.0.1-sn5", registry = "strafesnet" }
|
||||
rbxassetid = { version = "0.1.0", registry = "strafesnet" }
|
||||
strafesnet_bsp_loader = { version = "0.3.1", path = "../lib/bsp_loader", registry = "strafesnet" }
|
||||
strafesnet_deferred_loader = { version = "0.5.1", path = "../lib/deferred_loader", registry = "strafesnet" }
|
||||
|
||||
@@ -28,7 +28,7 @@ strafesnet_rbx_loader = { path = "../lib/rbx_loader", registry = "strafesnet", o
|
||||
strafesnet_session = { path = "../engine/session", registry = "strafesnet" }
|
||||
strafesnet_settings = { path = "../engine/settings", registry = "strafesnet" }
|
||||
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet", optional = true }
|
||||
wgpu = "26.0.1"
|
||||
wgpu = "27.0.0"
|
||||
winit = "0.30.7"
|
||||
|
||||
[profile.dev]
|
||||
|
||||
@@ -119,12 +119,13 @@ impl<'a> SetupContextPartial3<'a>{
|
||||
|
||||
let (device, queue)=pollster::block_on(self.adapter
|
||||
.request_device(
|
||||
&wgpu::DeviceDescriptor {
|
||||
label: None,
|
||||
required_features: (optional_features & self.adapter.features()) | required_features,
|
||||
required_limits: needed_limits,
|
||||
&wgpu::DeviceDescriptor{
|
||||
label:None,
|
||||
required_features:(optional_features&self.adapter.features())|required_features,
|
||||
required_limits:needed_limits,
|
||||
memory_hints:wgpu::MemoryHints::Performance,
|
||||
trace: wgpu::Trace::Off,
|
||||
trace:wgpu::Trace::Off,
|
||||
experimental_features:wgpu::ExperimentalFeatures::disabled(),
|
||||
},
|
||||
))
|
||||
.expect("Unable to find a suitable GPU adapter!");
|
||||
|
||||
Reference in New Issue
Block a user