Compare commits

...

197 Commits

Author SHA1 Message Date
0fecc1ade0 compare to infinity fev (notes below)
Looking at the test_collision_parabola_edge_north_from_ne test:
- Inifinity fev seems to return an incorrect value `Face(FaceVert(SubmeshFaceId(1), SubmeshVertId(7)))`, but despite that, the face crawler works.
- the new md algorithm seems to return the correct value `Vert(VertVert(SubmeshVertId(1), SubmeshVertId(6)))`, but the face crawler breaks.
2025-12-12 14:11:18 -08:00
4d1153c44c fix md harness 2025-12-12 13:46:38 -08:00
b011352438 load source once at runtime 2025-12-12 12:35:34 -08:00
f05141aec4 fix it 2025-12-12 12:35:34 -08:00
4143968173 implement lua 2025-12-12 12:35:34 -08:00
52d37f14c2 add mlua 2025-12-12 12:35:34 -08:00
8f580ad6ba add loop 2025-12-10 15:14:48 -08:00
68e81b31ba add test points 2025-12-10 15:09:30 -08:00
cbd6e1a608 add unit test 2025-12-10 15:06:58 -08:00
0e3cdedc05 partially implement md generic 2025-12-10 14:52:14 -08:00
97cec0e709 fix constraints epsilon
these were supposed to be 3 voxels but were on the order of 3 units
2025-12-05 10:34:21 -08:00
8e24739721 handle non-canonnical multi-edge spanning edges 2025-12-05 10:34:21 -08:00
5f2d9d34bb Meshquery::farthest_vert 2025-12-05 10:31:17 -08:00
cd43c5aabe todo 2025-12-05 10:31:17 -08:00
cf4aa8ed16 why 2025-12-05 10:31:17 -08:00
c71e5e9e20 think through simplex constraints 2025-12-04 12:03:38 -08:00
261bc5c845 refactor algorithm to use a struct 2025-12-04 11:21:08 -08:00
6f8c2692ca comments 2025-12-02 10:12:51 -08:00
3dd897b47f comment todos 2025-12-02 10:12:51 -08:00
fcc2348eb0 add unfortunate algorithm 2025-12-02 09:21:42 -08:00
8ca0c94445 remove Eq for MinkowskiFace 2025-11-27 12:15:29 -08:00
463a70c5f4 breakout 2025-11-27 12:15:29 -08:00
643b4fea37 change on_exact signature 2025-11-27 12:15:29 -08:00
1752c51b8f fail without crash 2025-11-27 12:15:29 -08:00
348769a270 no print 2025-11-27 12:15:29 -08:00
7b53d7d595 use new algorithm 2025-11-27 12:15:29 -08:00
18c73b12d8 convert to fev using dumbest algorithm possible 2025-11-27 11:52:28 -08:00
4ff62cc991 negate minkowski input to minimum_difference 2025-11-27 10:11:15 -08:00
a8d9167152 make hint_point consistent with vert 2025-11-27 10:05:13 -08:00
21f4d13ce0 rename variable 2025-11-27 10:05:13 -08:00
84e55c557a physics: derive Eq for Minkowski FEV 2025-11-26 09:45:03 -08:00
f96398cc84 remove indexing 2025-11-25 15:57:14 -08:00
9ff0736d50 put comment back in 2025-11-25 12:12:33 -08:00
c3d390dda0 use struct 2025-11-25 11:50:40 -08:00
eeb47abde1 reduce member fn 2025-11-25 11:43:58 -08:00
567354cebb split reduce 2025-11-25 11:43:47 -08:00
a787bf2ab2 deconstruct 2025-11-25 11:35:11 -08:00
623cc8d822 refactor using Simplex enum 2025-11-25 11:30:24 -08:00
6da4f3b3fb simplify perp 2025-11-25 09:49:30 -08:00
982b837143 fix algorithm 2025-11-25 09:32:25 -08:00
02b52ef748 rename 2025-11-25 09:22:46 -08:00
e32523e626 style 2025-11-25 09:15:25 -08:00
9a0c26cc97 careful relative point opti 2025-11-25 09:15:18 -08:00
45c2fdbb3f work 2025-11-25 09:12:31 -08:00
c1d3045a77 include relative point 2025-11-25 09:09:46 -08:00
f1ca5a3735 fix degenerate case 2025-11-25 09:00:23 -08:00
3fa048434b fix wrong 2025-11-25 09:00:13 -08:00
0909a62caf use min diff 2025-11-25 08:42:05 -08:00
97012940cd implement contains_point 2025-11-25 08:33:10 -08:00
b601667d6a rename variable 2025-11-25 08:32:56 -08:00
6d5c7df14a work 2025-11-25 08:15:45 -08:00
3140c0a552 reduce min dist bit width 2025-11-25 08:15:45 -08:00
f9a92b7a8d FnOnce 2025-11-24 14:34:05 -08:00
1fa6d5c031 fast fail fn 2025-11-24 14:33:57 -08:00
05954bc487 more naming things 2025-11-24 14:18:06 -08:00
e11d96a9b3 refine naming 2025-11-24 14:07:34 -08:00
5ee8a02693 names 2025-11-24 13:58:11 -08:00
efba2a8f19 stuff 2025-11-24 13:54:18 -08:00
de44e19909 switch trait to closures 2025-11-24 13:42:10 -08:00
c7e29d05d4 insane trait just to remove if statements 2025-11-24 13:28:28 -08:00
6171ceab6d refactor calculation result 2025-11-24 13:04:51 -08:00
41aeeefb5b details later 2025-11-24 13:04:51 -08:00
1473fe8fe1 remove is more clear 2025-11-24 13:04:51 -08:00
ac987a2efd remove unused 2025-11-24 13:04:51 -08:00
1050d824e6 work 2025-11-24 13:04:51 -08:00
496f838408 work 2025-11-24 13:04:51 -08:00
38a7aaa046 work 2025-11-24 13:04:51 -08:00
ce246acb43 work 2025-11-24 13:04:51 -08:00
3a7eeaee7f notes 2025-11-24 13:04:51 -08:00
9afbd0a91d zero 2025-11-24 13:04:51 -08:00
2ffa9cbe6c idea 2025-11-24 13:04:51 -08:00
8dee2140e1 eugh 2025-11-24 13:04:51 -08:00
af9f1a218e work 2025-11-24 13:04:51 -08:00
b8442274a5 work 2025-11-24 13:04:51 -08:00
72ba5bad63 work 2025-11-24 13:04:51 -08:00
d7b779170e work 2025-11-24 13:04:51 -08:00
ff28c0a311 wip paste fns 2025-11-24 13:04:51 -08:00
69f49d3dcf no hold ref 2025-11-24 13:04:51 -08:00
a91dfe07ec work 2025-11-24 13:04:51 -08:00
922f80657c work 2025-11-24 13:04:51 -08:00
f05e6b5995 work 2025-11-24 13:04:51 -08:00
e2ba15880a work 2025-11-24 13:04:51 -08:00
749ace538d work 2025-11-24 13:04:51 -08:00
6d9fc38ef1 work 2025-11-24 13:04:51 -08:00
565b53138c work 2025-11-24 13:04:51 -08:00
b0668136d6 wip 2025-11-24 13:04:51 -08:00
0ea353b27d common: fixed_wide: min max 2025-11-24 13:04:44 -08:00
99706079d9 common: fixed_wide: add mul_sign div_sign 2025-11-24 13:04:44 -08:00
730c5fb7dd common: integer: generic zero 2025-11-22 08:47:16 -08:00
d1b61bb997 push_solve: remove epsilon 2025-11-21 10:52:34 -08:00
0343ad19cf MeshQuery::hint_point returns any point inside the mesh 2025-11-20 10:59:08 -08:00
43210b1417 less access to TouchingState private fields 2025-11-19 13:39:07 -08:00
e9d28cf15f document jank 2025-11-19 13:15:31 -08:00
452bac4049 change collision_end_contact & collision_end_intersect fn signatures 2025-11-19 10:57:44 -08:00
48aad78f59 change contact_normal function signature to reduce copies 2025-11-19 10:20:33 -08:00
d45a42f5aa change ContactCollision struct layout
Match TouchingState contacts HashMap K,V layout to try to get lucky with compiler optimization.
2025-11-19 10:20:33 -08:00
c219fec3bc specialize touching member access 2025-11-19 10:08:40 -08:00
2a05d50abb check touching before testing collision 2025-11-19 10:08:40 -08:00
fbb047f8d4 combine call chain 2025-11-19 09:01:51 -08:00
c4d837a552 Fix infinite loop with intersects when allowing 0s collisions 2025-11-19 09:01:51 -08:00
a08bd44789 Generic ConvexMeshId 2025-11-19 09:01:51 -08:00
4ae5359046 rename not_spawn_at to is_not_spawn_at 2025-11-19 09:01:27 -08:00
15ecaf602a deep match 2025-11-18 12:29:46 -08:00
1e0511a7ba remove intermediate allocation 2025-11-18 12:23:05 -08:00
a9e4705d89 remove (some) fixed point implicit conversion
They may be convenient, but they cannot be done at compile-time.
TODO: remove more of them i.e. impl_multiplicative_operator
2025-11-18 11:53:52 -08:00
98069859b5 Gracefully handle 0 acceleration for walking targets 2025-11-18 19:47:04 +00:00
64d3996fa9 use From instead of Into 2025-11-18 11:46:32 -08:00
49c0c16e35 Use a From implementation instead of manual conversion
If the contacts and intersects map ever change in the future to not be 1:1 with gaps but instead something else, this guarantees that this implicit use of the relationship will flag at a compiler level
2025-11-18 19:25:44 +00:00
255bed4803 Ensure the PhysicsData's bvh respects the original model ordering
There's no importance in worrying about the core HashMap ordering since it's not used as an iterator except for outside of this very function for bvh purposes
2025-11-18 19:25:44 +00:00
128e137829 remove redundant code 2025-11-17 13:22:46 -08:00
1e19f804cc custom hex Debug print for Fixed 2025-11-17 12:45:55 -08:00
f6f35c5f54 fix lints 2025-11-17 12:41:34 -08:00
4e7d580918 add lints to workspace 2025-11-16 14:53:23 -08:00
8d5a100a2e update deps 2025-11-09 05:48:31 -08:00
91208db706 drop lazy_regex dep 2025-11-09 05:47:33 -08:00
5a320b514e fix style 2025-11-07 16:52:50 -08:00
661d706a22 common: aabb: area_weight fn 2025-10-17 15:48:59 +01:00
5550d5771e common: bvh: reduce variable scope 2025-10-17 15:03:54 +01:00
c834d1d1ca common: bvh: name constant 2025-10-17 14:57:53 +01:00
ca9d2238a7 common: bvh: tweak code style 2025-10-17 14:57:53 +01:00
f3bb8dd067 update deps 2025-10-01 23:37:07 -07:00
e58f9b9ff2 rbx_loader: silently filter vertices which fail to convert 2025-09-29 19:45:16 -07:00
54c4ed6bad update deps 2025-08-30 15:20:23 -07:00
9aceafa0df roblox_emulator: fix lints 2025-08-30 15:15:23 -07:00
d065bac130 Revert "roblox_emulator: use extended instances"
This reverts commit bb8e131464.
2025-08-30 15:13:28 -07:00
a4d0393556 update rbx-dom 2025-08-30 15:12:22 -07:00
3692d7f79e it: fix bug 3 test 2025-08-29 19:07:57 -07:00
7e49840768 it: set gravity to 0 2025-08-29 19:06:51 -07:00
4ecdd547c6 it: use instruction iter 2025-08-29 18:30:28 -07:00
b0365165e8 physics: create iterator over internal instructions 2025-08-29 18:30:18 -07:00
c2ff52a2ae instruction: iterator 2025-08-29 18:30:18 -07:00
6e778869e8 it: bug 3 test scene 2025-08-29 18:30:18 -07:00
6509bef070 it: add test scene 2025-08-29 16:47:46 -07:00
0fa097a004 aabb: tweak Aabb.contains 2025-08-29 15:40:00 -07:00
55d4b1d264 physics: PhysicsData is immutable after construction 2025-08-28 16:37:48 -07:00
ea28663e95 physics: move code 2025-08-28 16:02:23 -07:00
bac9be9684 physics: add edge case tests 2025-08-28 14:49:36 -07:00
7e76f3309b ignore debugger config 2025-08-26 16:54:36 -07:00
cfd9550566 common: truncate vertex precision to 16 bits in MeshBuilder
The physics algorithm expects vertices to align exactly with faces.  Since the face normal is calculated via the cross product of vertex positions, this allows the face normals to be exact with respect to the vertex positions.
2025-08-26 16:09:15 -07:00
bd16720b5a rbx_loader: refactor mesh convert to use MeshBuilder 2025-08-26 15:55:55 -07:00
633f767d0f snf: disable demo code 2025-08-26 15:55:55 -07:00
602b63e953 fix lifetime lints 2025-08-26 15:55:55 -07:00
6abe622885 fix dead code lints 2025-08-26 15:55:55 -07:00
0ab23dde2b move dev config to strafe-client only 2025-08-26 13:26:14 -07:00
657a2530dc update deps 2025-08-19 21:59:25 -07:00
c4bd034928 update rbx_mesh with CSGMDL5 support 2025-07-23 23:26:36 -07:00
bbcdac8879 rbx_loader: fix dead code lints 2025-07-23 23:26:36 -07:00
38b3f3d7a3 use expect instead of allow 2025-07-19 02:25:55 -07:00
eb80c8b9b5 update deps 2025-06-12 04:56:48 -07:00
63714f190d update snf binrw 2025-05-26 15:36:49 -07:00
f50dfb9399 update rbx_mesh 2025-05-26 15:33:38 -07:00
9db39d2a62 physics: face crawler opti 2025-05-23 14:40:06 -07:00
9e2e1d9d4a it: add physics bug tests, use cargo test -- --ignored 2025-05-22 15:37:10 -07:00
6f1548403a it: split into modules 2025-05-22 13:41:43 -07:00
5f3e998b3d map-tool: v1.7.2 provide download cookie 2025-05-20 16:30:44 -07:00
2d8792be4f tools: add clarion shortcut 2025-05-20 15:52:41 -07:00
15d33eb49d map-tool: cookie arg, now required by roblox 2025-05-20 15:35:03 -07:00
156dacb838 map-tool: v1.7.1 rbx_loader error reports 2025-05-16 16:04:55 -07:00
a7f0e431cb snf: v0.3.1 update common 2025-05-16 15:58:25 -07:00
0ed3cb2adb bsp_loader: v0.3.1 update common 2025-05-16 15:56:19 -07:00
bac43eab66 rbx_loader: v0.7.0 error reporting 2025-05-16 15:55:56 -07:00
2a257236fd deferred_loader: v0.5.1 update common 2025-05-16 15:55:47 -07:00
8fe2c20635 common: v0.7.0 misc 2025-05-16 15:49:12 -07:00
2da7ccce7c fixed_wide: v0.2.1 impl Display for FixedFromFloatError 2025-05-16 15:45:21 -07:00
89e6d11630 update deps 2025-05-16 15:42:19 -07:00
6abb40b6d2 roblox_emulator: v0.5.1 2025-05-16 15:35:54 -07:00
4dcf06c44c rbx_loader: support Seat, VehicleSeat, SpawnLocation 2025-05-16 15:26:59 -07:00
0171a711d9 rbx_loader: skip terrain 2025-05-16 15:08:48 -07:00
3eb702eaea roblox_emulator: give terrain BasePart properties to dodge error 2025-05-16 15:08:48 -07:00
92878a4ae8 strafe-client: print error report 2025-05-16 15:08:48 -07:00
88dcf40d77 map-tool: print error report 2025-05-16 15:08:48 -07:00
df9fcb5b02 rbx_loader: impl Display for RecoverableErrors 2025-05-16 15:08:48 -07:00
b07064cc9d common: impl Display for ModeId 2025-05-16 15:08:43 -07:00
6e435e46ac fixed_wide: impl Display for FixedFromFloatError 2025-05-16 14:21:32 -07:00
ce0368590d roblox_emulator: unused error variant 2025-05-16 14:21:32 -07:00
f896e6cfff rbx_loader: report script errors 2025-05-16 14:21:32 -07:00
dcc91db6f7 rbx_loader: refactor to make RecoverableError report 2025-05-16 14:21:32 -07:00
b6d6878137 physics: body double clone/copy fixups 2025-05-14 18:15:26 -07:00
81c9e3470b physics: use Bounds 2025-05-14 18:15:26 -07:00
b45e02c487 integer: export Parity trait 2025-05-14 17:26:41 -07:00
8698ca4a7e integer: time bitshift operations 2025-05-14 17:26:41 -07:00
8f04953326 physics: simplify face crawler trait bound 2025-05-14 16:52:51 -07:00
ae81d8ceaf derive Debug for many structs 2025-05-14 16:52:51 -07:00
2ecaeb1615 physics: test_collision_small_mv 2025-05-14 13:51:14 -07:00
768cd4ad1a physics: deref can be coerced 2025-05-13 17:26:10 -07:00
708462441a physics: clean up PhysicsMesh generation 2025-05-13 16:08:51 -07:00
da3ab52fe0 physics: do not require complete_mesh as first submesh
This removes a silent assumption about the input meshes and moves the branching from submeshes() to complete_mesh()
2025-05-13 15:45:30 -07:00
20f3e79cde rbx_loader: anything that uses velocity property should not be a booster 2025-05-09 20:56:42 -07:00
217f7fd7c3 physics: recalculate acceleration in collision_{start|end}_intersect 2025-05-09 20:56:42 -07:00
1e4d98f386 physics: use scratch vector in vert_edges 2025-05-09 20:56:42 -07:00
71bce361e6 rbx_loader: default meshpart mesh to empty string 2025-05-08 16:02:22 -07:00
d36c184f7e rbx_loader: auto-scale union graphics to fit size 2025-05-08 15:33:37 -07:00
2c3f257f0e rbx_loader: move mesh size detection into mesh convert 2025-05-08 15:33:37 -07:00
9e7e115809 deferred_loader: load generic mesh 2025-05-08 15:31:32 -07:00
2ea60b07fe rbx_loader: default physics for unions 2025-05-08 15:31:32 -07:00
2fe884175e rbx_loader: export primitive cube info 2025-05-07 15:57:19 -07:00
9f570d0f3e rbx_loader: prepare union convert for default physics 2025-05-07 15:57:19 -07:00
83 changed files with 4319 additions and 2162 deletions

1
.gitignore vendored
View File

@@ -1 +1,2 @@
/target
.zed

1947
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,6 +25,13 @@ resolver = "2"
strip = true
codegen-units = 1
[profile.dev]
strip = false
opt-level = 3
[workspace.lints.rust]
# unsafe_code = "forbid"
# missing_docs = "warn"
# missing_debug_implementations = "warn"
single_use_lifetimes = "warn"
trivial_casts = "warn"
unused_lifetimes = "warn"
unused_qualifications = "warn"
# variant_size_differences = "warn"
unexpected_cfgs = "warn"

View File

@@ -11,4 +11,7 @@ id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
strafesnet_session = { path = "../session", registry = "strafesnet" }
strafesnet_settings = { path = "../settings", registry = "strafesnet" }
wgpu = "25.0.0"
wgpu = "27.0.0"
[lints]
workspace = true

View File

@@ -94,7 +94,7 @@ impl GraphicsCamera{
raw
}
}
impl std::default::Default for GraphicsCamera{
impl Default for GraphicsCamera{
fn default()->Self{
Self{
screen_size:glam::UVec2::ONE,
@@ -167,7 +167,7 @@ impl GraphicsState{
}
pub fn generate_models(&mut self,device:&wgpu::Device,queue:&wgpu::Queue,map:&map::CompleteMap){
//generate texture view per texture
let texture_views:HashMap<strafesnet_common::model::TextureId,wgpu::TextureView>=map.textures.iter().enumerate().filter_map(|(texture_id,texture_data)|{
let texture_views:HashMap<model::TextureId,wgpu::TextureView>=map.textures.iter().enumerate().filter_map(|(texture_id,texture_data)|{
let texture_id=model::TextureId::new(texture_id as u32);
let image=match ddsfile::Dds::read(std::io::Cursor::new(texture_data)){
Ok(image)=>image,
@@ -803,7 +803,7 @@ impl GraphicsState{
module:&shader,
entry_point:Some("vs_entity_texture"),
buffers:&[wgpu::VertexBufferLayout{
array_stride:std::mem::size_of::<GraphicsVertex>() as wgpu::BufferAddress,
array_stride:size_of::<GraphicsVertex>() as wgpu::BufferAddress,
step_mode:wgpu::VertexStepMode::Vertex,
attributes:&wgpu::vertex_attr_array![0=>Float32x3,1=>Float32x2,2=>Float32x3,3=>Float32x4],
}],
@@ -953,6 +953,7 @@ impl GraphicsState{
}),
store:wgpu::StoreOp::Store,
},
depth_slice:None,
})],
depth_stencil_attachment:Some(wgpu::RenderPassDepthStencilAttachment{
view:&self.depth_view,

View File

@@ -7,4 +7,8 @@ edition = "2024"
arrayvec = "0.7.6"
glam = "0.30.0"
id = { version = "0.1.0", registry = "strafesnet" }
mlua = { version = "0.11.5", features = ["luau"] }
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -18,11 +18,22 @@ impl<T> std::ops::Neg for Body<T>{
}
}
}
impl<T:Copy> std::ops::Neg for &Body<T>{
type Output=Body<T>;
fn neg(self)->Self::Output{
Body{
position:self.position,
velocity:-self.velocity,
acceleration:self.acceleration,
time:-self.time,
}
}
}
impl<T> Body<T>
where Time<T>:Copy,
{
pub const ZERO:Self=Self::new(vec3::ZERO,vec3::ZERO,vec3::ZERO,Time::ZERO);
pub const ZERO:Self=Self::new(vec3::zero(),vec3::zero(),vec3::zero(),Time::ZERO);
pub const fn new(position:Planar64Vec3,velocity:Planar64Vec3,acceleration:Planar64Vec3,time:Time<T>)->Self{
Self{
position,
@@ -96,8 +107,8 @@ impl<T> Body<T>
self.time+=dt.into();
}
pub fn infinity_dir(&self)->Option<Planar64Vec3>{
if self.velocity==vec3::ZERO{
if self.acceleration==vec3::ZERO{
if self.velocity==vec3::zero(){
if self.acceleration==vec3::zero(){
None
}else{
Some(self.acceleration)

View File

@@ -1,7 +1,9 @@
use crate::model::{GigaTime,FEV,MeshQuery,DirectedEdge};
use crate::model::{into_giga_time,GigaTime,FEV,MeshQuery,DirectedEdge};
use strafesnet_common::integer::{Fixed,Ratio,vec3::Vector3};
use crate::physics::{Time,Body};
use core::ops::Bound;
enum Transition<M:MeshQuery>{
Miss,
Next(FEV<M>,GigaTime),
@@ -27,6 +29,47 @@ impl<M:MeshQuery> CrawlResult<M>{
}
}
// TODO: move predict_collision_face_out algorithm in here or something
/// check_lower_bound
pub fn low<LhsNum,LhsDen,RhsNum,RhsDen,T>(lower_bound:&Bound<Ratio<LhsNum,LhsDen>>,dt:&Ratio<RhsNum,RhsDen>)->bool
where
RhsNum:Copy,
RhsDen:Copy,
LhsNum:Copy,
LhsDen:Copy,
LhsDen:strafesnet_common::integer::Parity,
RhsDen:strafesnet_common::integer::Parity,
LhsNum:core::ops::Mul<RhsDen,Output=T>,
LhsDen:core::ops::Mul<RhsNum,Output=T>,
T:Ord+Copy,
{
match lower_bound{
Bound::Included(time)=>time.le_ratio(*dt),
Bound::Excluded(time)=>time.lt_ratio(*dt),
Bound::Unbounded=>true,
}
}
/// check_upper_bound
pub fn upp<LhsNum,LhsDen,RhsNum,RhsDen,T>(dt:&Ratio<LhsNum,LhsDen>,upper_bound:&Bound<Ratio<RhsNum,RhsDen>>)->bool
where
RhsNum:Copy,
RhsDen:Copy,
LhsNum:Copy,
LhsDen:Copy,
LhsDen:strafesnet_common::integer::Parity,
RhsDen:strafesnet_common::integer::Parity,
LhsNum:core::ops::Mul<RhsDen,Output=T>,
LhsDen:core::ops::Mul<RhsNum,Output=T>,
T:Ord+Copy,
{
match upper_bound{
Bound::Included(time)=>dt.le_ratio(*time),
Bound::Excluded(time)=>dt.lt_ratio(*time),
Bound::Unbounded=>true,
}
}
impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
where
// This is hardcoded for MinkowskiMesh lol
@@ -35,9 +78,9 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
M::Vert:Copy,
F:core::ops::Mul<Fixed<1,32>,Output=Fixed<4,128>>,
<F as core::ops::Mul<Fixed<1,32>>>::Output:core::iter::Sum,
<M as MeshQuery>::Offset:core::ops::Sub<<F as std::ops::Mul<Fixed<1,32>>>::Output>,
M::Offset:core::ops::Sub<<F as std::ops::Mul<Fixed<1,32>>>::Output>,
{
fn next_transition(&self,body_time:GigaTime,mesh:&M,body:&Body,mut best_time:GigaTime)->Transition<M>{
fn next_transition(&self,mesh:&M,body:&Body,lower_bound:Bound<GigaTime>,mut upper_bound:Bound<GigaTime>)->Transition<M>{
//conflicting derivative means it crosses in the wrong direction.
//if the transition time is equal to an already tested transition, do not replace the current best.
let mut best_transition=Transition::Miss;
@@ -50,8 +93,8 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//TODO: use higher precision d value?
//use the mesh transform translation instead of baking it into the d value.
for dt in Fixed::<4,128>::zeroes2((n.dot(body.position)-d)*2,n.dot(body.velocity)*2,n.dot(body.acceleration)){
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
upper_bound=Bound::Included(dt);
best_transition=Transition::Hit(face_id,dt);
break;
}
@@ -65,8 +108,8 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//WARNING: precision is swept under the rug!
//wrap for speed
for dt in Fixed::<4,128>::zeroes2(n.dot(body.position*2-(mesh.vert(v0)+mesh.vert(v1))).wrap_4(),n.dot(body.velocity).wrap_4()*2,n.dot(body.acceleration).wrap_4()){
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
upper_bound=Bound::Included(dt);
best_transition=Transition::Next(FEV::Edge(directed_edge_id.as_undirected()),dt);
break;
}
@@ -76,10 +119,11 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
},
&FEV::Edge(edge_id)=>{
//test each face collision time, ignoring roots with zero or conflicting derivative
let edge_n=mesh.edge_n(edge_id);
let edge_verts=mesh.edge_verts(edge_id);
let &[ev0,ev1]=edge_verts.as_ref();
let delta_pos=body.position*2-(mesh.vert(ev0)+mesh.vert(ev1));
let (v0,v1)=(mesh.vert(ev0),mesh.vert(ev1));
let edge_n=v1-v0;
let delta_pos=body.position*2-(v0+v1);
for (i,&edge_face_id) in mesh.edge_faces(edge_id).as_ref().iter().enumerate(){
let face_n=mesh.face_nd(edge_face_id).0;
//edge_n gets parity from the order of edge_faces
@@ -87,8 +131,8 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//WARNING yada yada d *2
//wrap for speed
for dt in Fixed::<4,128>::zeroes2(n.dot(delta_pos).wrap_4(),n.dot(body.velocity).wrap_4()*2,n.dot(body.acceleration).wrap_4()){
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
upper_bound=Bound::Included(dt);
best_transition=Transition::Next(FEV::Face(edge_face_id),dt);
break;
}
@@ -99,9 +143,9 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//vertex normal gets parity from vert index
let n=edge_n*(1-2*(i as i64));
for dt in Fixed::<2,64>::zeroes2((n.dot(body.position-mesh.vert(vert_id)))*2,n.dot(body.velocity)*2,n.dot(body.acceleration)){
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
let dt=Ratio::new(dt.num.widen_4(),dt.den.widen_4());
best_time=dt;
upper_bound=Bound::Included(dt);
best_transition=Transition::Next(FEV::Vert(vert_id),dt);
break;
}
@@ -115,9 +159,9 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//edge is directed away from vertex, but we want the dot product to turn out negative
let n=-mesh.directed_edge_n(directed_edge_id);
for dt in Fixed::<2,64>::zeroes2((n.dot(body.position-mesh.vert(vert_id)))*2,n.dot(body.velocity)*2,n.dot(body.acceleration)){
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
let dt=Ratio::new(dt.num.widen_4(),dt.den.widen_4());
best_time=dt;
upper_bound=Bound::Included(dt);
best_transition=Transition::Next(FEV::Edge(directed_edge_id.as_undirected()),dt);
break;
}
@@ -128,19 +172,13 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
}
best_transition
}
pub fn crawl(mut self,mesh:&M,relative_body:&Body,start_time:Time,time_limit:Time)->CrawlResult<M>{
let mut body_time={
let r=(start_time-relative_body.time).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
};
let time_limit={
let r=(time_limit-relative_body.time).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
};
pub fn crawl(mut self,mesh:&M,relative_body:&Body,lower_bound:Bound<&Time>,upper_bound:Bound<&Time>)->CrawlResult<M>{
let mut lower_bound=lower_bound.map(|&t|into_giga_time(t,relative_body.time));
let upper_bound=upper_bound.map(|&t|into_giga_time(t,relative_body.time));
for _ in 0..20{
match self.next_transition(body_time,mesh,relative_body,time_limit){
match self.next_transition(mesh,relative_body,lower_bound,upper_bound){
Transition::Miss=>return CrawlResult::Miss(self),
Transition::Next(next_fev,next_time)=>(self,body_time)=(next_fev,next_time),
Transition::Next(next_fev,next_time)=>(self,lower_bound)=(next_fev,Bound::Included(next_time)),
Transition::Hit(face,time)=>return CrawlResult::Hit(face,time),
}
}

View File

@@ -1,7 +1,9 @@
mod body;
mod push_solve;
mod face_crawler;
mod model;
mod push_solve;
mod minimum_difference;
mod minimum_difference_lua;
pub mod physics;

View File

@@ -0,0 +1,882 @@
use strafesnet_common::integer::vec3;
use strafesnet_common::integer::vec3::Vector3;
use strafesnet_common::integer::{Fixed,Planar64,Planar64Vec3};
use crate::model::{DirectedEdge,FEV,MeshQuery};
// TODO: remove mesh invert
use crate::model::{MinkowskiMesh,MinkowskiVert};
// This algorithm is based on Lua code
// written by Trey Reynolds in 2021
type Simplex<const N:usize,Vert>=[Vert;N];
#[derive(Clone,Copy)]
enum Simplex1_3<Vert>{
Simplex1(Simplex<1,Vert>),
Simplex2(Simplex<2,Vert>),
Simplex3(Simplex<3,Vert>),
}
impl<Vert> Simplex1_3<Vert>{
fn push_front(self,v:Vert)->Simplex2_4<Vert>{
match self{
Simplex1_3::Simplex1([v0])=>Simplex2_4::Simplex2([v,v0]),
Simplex1_3::Simplex2([v0,v1])=>Simplex2_4::Simplex3([v,v0,v1]),
Simplex1_3::Simplex3([v0,v1,v2])=>Simplex2_4::Simplex4([v,v0,v1,v2]),
}
}
}
#[derive(Clone,Copy)]
enum Simplex2_4<Vert>{
Simplex2(Simplex<2,Vert>),
Simplex3(Simplex<3,Vert>),
Simplex4(Simplex<4,Vert>),
}
/*
local function absDet(r, u, v, w)
if w then
return math.abs((u - r):Cross(v - r):Dot(w - r))
elseif v then
return (u - r):Cross(v - r).magnitude
elseif u then
return (u - r).magnitude
else
return 1
end
end
*/
impl<Vert> Simplex2_4<Vert>{
fn det_is_zero<M:MeshQuery<Vert=Vert>>(self,mesh:&M)->bool{
match self{
Self::Simplex4([p0,p1,p2,p3])=>{
let p0=mesh.vert(p0);
let p1=mesh.vert(p1);
let p2=mesh.vert(p2);
let p3=mesh.vert(p3);
(p1-p0).cross(p2-p0).dot(p3-p0)==Fixed::ZERO
},
Self::Simplex3([p0,p1,p2])=>{
let p0=mesh.vert(p0);
let p1=mesh.vert(p1);
let p2=mesh.vert(p2);
(p1-p0).cross(p2-p0)==vec3::zero()
},
Self::Simplex2([p0,p1])=>{
let p0=mesh.vert(p0);
let p1=mesh.vert(p1);
p1-p0==vec3::zero()
}
}
}
}
/*
local function choosePerpendicularDirection(d)
local x, y, z = d.x, d.y, d.z
local best = math.min(x*x, y*y, z*z)
if x*x == best then
return Vector3.new(y*y + z*z, -x*y, -x*z)
elseif y*y == best then
return Vector3.new(-x*y, x*x + z*z, -y*z)
else
return Vector3.new(-x*z, -y*z, x*x + y*y)
end
end
*/
fn choose_perpendicular_direction(d:Planar64Vec3)->Planar64Vec3{
let x=d.x.abs();
let y=d.y.abs();
let z=d.z.abs();
if x<y&&x<z{
Vector3::new([Fixed::ZERO,-d.z,d.y])
}else if y<z{
Vector3::new([d.z,Fixed::ZERO,-d.x])
}else{
Vector3::new([-d.y,d.x,Fixed::ZERO])
}
}
const fn choose_any_direction()->Planar64Vec3{
vec3::X
}
fn reduce1<M:MeshQuery>(
[v0]:Simplex<1,M::Vert>,
mesh:&M,
point:Planar64Vec3,
)->Reduced<M::Vert>{
// --debug.profilebegin("reduceSimplex0")
// local a = a1 - a0
let p0=mesh.vert(v0);
// local p = -a
let p=-(p0+point);
// local direction = p
let mut dir=p;
// if direction.magnitude == 0 then
// direction = chooseAnyDirection()
if dir==vec3::zero(){
dir=choose_any_direction();
}
// return direction, a0, a1
Reduced{
dir,
simplex:Simplex1_3::Simplex1([v0]),
}
}
// local function reduceSimplex1(a0, a1, b0, b1)
fn reduce2<M:MeshQuery>(
[v0,v1]:Simplex<2,M::Vert>,
mesh:&M,
point:Planar64Vec3,
)->Reduced<M::Vert>{
// --debug.profilebegin("reduceSimplex1")
// local a = a1 - a0
// local b = b1 - b0
let p0=mesh.vert(v0);
let p1=mesh.vert(v1);
// local p = -a
// local u = b - a
let p=-(p0+point);
let u=p1-p0;
// -- modify to take into account the radiuses
// local p_u = p:Dot(u)
let p_u=p.dot(u);
// if p_u >= 0 then
if !p_u.is_negative(){
// local direction = u:Cross(p):Cross(u)
let direction=u.cross(p).cross(u);
// if direction.magnitude == 0 then
if direction==vec3::zero(){
return Reduced{
dir:choose_perpendicular_direction(u),
simplex:Simplex1_3::Simplex2([v0,v1]),
};
}
// -- modify the direction to take into account a0R and b0R
// return direction, a0, a1, b0, b1
return Reduced{
dir:direction.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex2([v0,v1]),
};
}
// local direction = p
let mut dir=p;
// if direction.magnitude == 0 then
if dir==vec3::zero(){
dir=choose_perpendicular_direction(u);
}
// return direction, a0, a1
Reduced{
dir,
simplex:Simplex1_3::Simplex1([v0]),
}
}
// local function reduceSimplex2(a0, a1, b0, b1, c0, c1)
fn reduce3<M:MeshQuery>(
[v0,mut v1,v2]:Simplex<3,M::Vert>,
mesh:&M,
point:Planar64Vec3,
)->Reduced<M::Vert>{
// --debug.profilebegin("reduceSimplex2")
// local a = a1 - a0
// local b = b1 - b0
// local c = c1 - c0
let p0=mesh.vert(v0);
let p1=mesh.vert(v1);
let p2=mesh.vert(v2);
// local p = -a
// local u = b - a
// local v = c - a
let p=-(p0+point);
let mut u=p1-p0;
let v=p2-p0;
// local uv = u:Cross(v)
// local up = u:Cross(p)
// local pv = p:Cross(v)
// local uv_up = uv:Dot(up)
// local uv_pv = uv:Dot(pv)
let mut uv=u.cross(v);
let mut up=u.cross(p);
let pv=p.cross(v);
let uv_up=uv.dot(up);
let uv_pv=uv.dot(pv);
// if uv_up >= 0 and uv_pv >= 0 then
if !uv_up.is_negative()&&!uv_pv.is_negative(){
// local uvp = uv:Dot(p)
let uvp=uv.dot(p);
// local direction = uvp < 0 and -uv or uv
let direction=if uvp.is_negative(){
-uv
}else{
uv
};
// return direction, a0, a1, b0, b1, c0, c1
return Reduced{
dir:direction.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex3([v0,v1,v2]),
};
}
// local u_u = u:Dot(u)
// local v_v = v:Dot(v)
// local uDist = uv_up/(u_u*v.magnitude)
// local vDist = uv_pv/(v_v*u.magnitude)
// local minDist2 = math.min(uDist, vDist)
let u_dist=uv_up*v.length();
let v_dist=uv_pv*u.length();
// if vDist == minDist2 then
if v_dist<u_dist{
u=v;
up=-pv;
uv=-uv;
// b0 = c0
// b1 = c1
v1=v2;
}
// local p_u = p:Dot(u)
let p_u=p.dot(u);
// if p_u >= 0 then
if !p_u.is_negative(){
// local direction = up:Cross(u)
let direction=up.cross(u);
// if direction.magnitude == 0 then
if direction==vec3::zero(){
// direction = uv
return Reduced{
dir:uv.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex2([v0,v1]),
};
}
// return direction, a0, a1, b0, b1
return Reduced{
dir:direction.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex2([v0,v1]),
};
}
// local direction = p
let dir=p;
// if direction.magnitude == 0 then
if dir==vec3::zero(){
// direction = uv
return Reduced{
dir:uv.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex1([v0]),
};
}
// return direction, a0, a0
Reduced{
dir,
simplex:Simplex1_3::Simplex1([v0]),
}
}
// local function reduceSimplex3(a0, a1, b0, b1, c0, c1, d0, d1)
fn reduce4<M:MeshQuery>(
[v0,mut v1,mut v2,v3]:Simplex<4,M::Vert>,
mesh:&M,
point:Planar64Vec3,
)->Reduce<M::Vert>{
// --debug.profilebegin("reduceSimplex3")
// local a = a1 - a0
// local b = b1 - b0
// local c = c1 - c0
// local d = d1 - d0
let p0=mesh.vert(v0);
let p1=mesh.vert(v1);
let p2=mesh.vert(v2);
let p3=mesh.vert(v3);
// local p = -a
// local u = b - a
// local v = c - a
// local w = d - a
let p=-(p0+point);
let mut u=p1-p0;
let mut v=p2-p0;
let w=p3-p0;
// local uv = u:Cross(v)
// local vw = v:Cross(w)
// local wu = w:Cross(u)
// local uvw = uv:Dot(w)
// local pvw = vw:Dot(p)
// local upw = wu:Dot(p)
// local uvp = uv:Dot(p)
let mut uv=u.cross(v);
let vw=v.cross(w);
let wu=w.cross(u);
let uv_w=uv.dot(w);
let pv_w=vw.dot(p);
let up_w=wu.dot(p);
let uv_p=uv.dot(p);
// if pvw/uvw >= 0 and upw/uvw >= 0 and uvp/uvw >= 0 then
if !pv_w.div_sign(uv_w).is_negative()
||!up_w.div_sign(uv_w).is_negative()
||!uv_p.div_sign(uv_w).is_negative(){
// origin is contained, this is a positive detection
// local direction = Vector3.new(0, 0, 0)
// return direction, a0, a1, b0, b1, c0, c1, d0, d1
return Reduce::Escape([v0,v1,v2,v3]);
}
// local uvwSign = uvw < 0 and -1 or uvw > 0 and 1 or 0
// local uvDist = uvp*uvwSign/uv.magnitude
// local vwDist = pvw*uvwSign/vw.magnitude
// local wuDist = upw*uvwSign/wu.magnitude
// local minDist3 = math.min(uvDist, vwDist, wuDist)
let uv_dist=uv_p.mul_sign(uv_w);
let vw_dist=pv_w.mul_sign(uv_w);
let wu_dist=up_w.mul_sign(uv_w);
let wu_len=wu.length();
let uv_len=uv.length();
let vw_len=vw.length();
if vw_dist*wu_len<wu_dist*vw_len{
// if vwDist == minDist3 then
if vw_dist*uv_len<uv_dist*vw_len{
(u,v)=(v,w);
uv=vw;
// uv_p=pv_w; // unused
// b0, c0 = c0, d0
// b1, c1 = c1, d1
(v1,v2)=(v2,v3);
}else{
v2=v3;
}
}else{
// elseif wuDist == minDist3 then
if wu_dist*uv_len<uv_dist*wu_len{
(u,v)=(w,u);
uv=wu;
// uv_p=up_w; // unused
// b0, c0 = d0, b0
// b1, c1 = d1, b1
// before [a,b,c,d]
(v1,v2)=(v3,v1);
// after [a,d,b]
}else{
v2=v3;
}
}
// local up = u:Cross(p)
// local pv = p:Cross(v)
// local uv_up = uv:Dot(up)
// local uv_pv = uv:Dot(pv)
let mut up=u.cross(p);
let pv=p.cross(v);
let uv_up=uv.dot(up);
let uv_pv=uv.dot(pv);
// if uv_up >= 0 and uv_pv >= 0 then
if !uv_up.is_negative()&&!uv_pv.is_negative(){
// local direction = uvw < 0 and uv or -uv
// return direction, a0, a1, b0, b1, c0, c1
if uv_w.is_negative(){
return Reduce::Reduced(Reduced{
dir:uv.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex3([v0,v1,v2]),
});
}else{
return Reduce::Reduced(Reduced{
dir:-uv.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex3([v0,v1,v2]),
});
}
}
// local u_u = u:Dot(u)
// local v_v = v:Dot(v)
// local uDist = uv_up/(u_u*v.magnitude)
// local vDist = uv_pv/(v_v*u.magnitude)
// local minDist2 = math.min(uDist, vDist)
let u_dist=uv_up*v.length();
let v_dist=uv_pv*u.length();
// if vDist == minDist2 then
if v_dist<u_dist{
u=v;
up=-pv;
uv=-uv;
// b0 = c0
// b1 = c1
v1=v2;
}
// local p_u = p:Dot(u)
let p_u=p.dot(u);
// if p_u >= 0 then
if !p_u.is_negative(){
// local direction = up:Cross(u)
let direction=up.cross(u);
// if direction.magnitude == 0 then
if direction==vec3::zero(){
// direction = uvw < 0 and uv or -uv
// return direction, a0, a1, b0, b1
if uv_w.is_negative(){
return Reduce::Reduced(Reduced{
dir:uv.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex2([v0,v1]),
});
}else{
return Reduce::Reduced(Reduced{
dir:-uv.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex2([v0,v1]),
});
}
}
// return direction, a0, a1, b0, b1
return Reduce::Reduced(Reduced{
dir:direction.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex2([v0,v1]),
});
}
// local direction = p
let dir=p;
// if direction.magnitude == 0 then
if dir==vec3::zero(){
// direction = uvw < 0 and uv or -uv
if uv_w.is_negative(){
return Reduce::Reduced(Reduced{
dir:uv.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex1([v0]),
});
}else{
return Reduce::Reduced(Reduced{
dir:-uv.narrow_1().unwrap(),
simplex:Simplex1_3::Simplex1([v0]),
});
}
}
// return direction, a0, a1
Reduce::Reduced(Reduced{
dir,
simplex:Simplex1_3::Simplex1([v0]),
})
}
struct Reduced<Vert>{
dir:Planar64Vec3,
simplex:Simplex1_3<Vert>,
}
enum Reduce<Vert>{
Escape(Simplex<4,Vert>),
Reduced(Reduced<Vert>),
}
impl<Vert> Simplex2_4<Vert>{
fn reduce<M:MeshQuery<Vert=Vert>>(self,mesh:&M,point:Planar64Vec3)->Reduce<Vert>{
match self{
Self::Simplex2(simplex)=>Reduce::Reduced(reduce2(simplex,mesh,point)),
Self::Simplex3(simplex)=>Reduce::Reduced(reduce3(simplex,mesh,point)),
Self::Simplex4(simplex)=>reduce4(simplex,mesh,point),
}
}
}
pub fn contains_point(mesh:&MinkowskiMesh<'_>,point:Planar64Vec3)->bool{
const ENABLE_FAST_FAIL:bool=true;
// TODO: remove mesh negation
minimum_difference::<ENABLE_FAST_FAIL,_,_>(&-mesh,point,
// on_exact
|is_intersecting,_simplex|{
is_intersecting
},
// on_escape
|_simplex|{
// intersection is guaranteed at this point
true
},
// fast_fail value
||false
)
}
//infinity fev algorithm state transition
#[derive(Debug)]
enum Transition<Vert>{
Done,//found closest vert, no edges are better
Vert(Vert),//transition to vert
}
enum EV<M:MeshQuery>{
Vert(M::Vert),
Edge(<M::Edge as DirectedEdge>::UndirectedEdge),
}
impl<M:MeshQuery> From<EV<M>> for FEV<M>{
fn from(value:EV<M>)->Self{
match value{
EV::Vert(minkowski_vert)=>FEV::Vert(minkowski_vert),
EV::Edge(minkowski_edge)=>FEV::Edge(minkowski_edge),
}
}
}
trait Contains{
fn contains(&self,point:Planar64Vec3)->bool;
}
// convenience type to check if a point is within some threshold of a plane.
struct ThickPlane{
point:Planar64Vec3,
normal:Vector3<Fixed<2,64>>,
epsilon:Fixed<3,96>,
}
impl ThickPlane{
fn new<M:MeshQuery>(mesh:&M,[v0,v1,v2]:Simplex<3,M::Vert>)->Self{
let p0=mesh.vert(v0);
let p1=mesh.vert(v1);
let p2=mesh.vert(v2);
let point=p0;
let normal=(p1-p0).cross(p2-p0);
// Allow ~ 2*sqrt(3) units of thickness on the plane
// This is to account for the variance of two voxels across the longest diagonal
let epsilon=(normal.length()*(Planar64::EPSILON*3)).wrap_3();
Self{point,normal,epsilon}
}
}
impl Contains for ThickPlane{
fn contains(&self,point:Planar64Vec3)->bool{
(point-self.point).dot(self.normal).abs()<=self.epsilon
}
}
struct ThickLine{
point:Planar64Vec3,
dir:Planar64Vec3,
epsilon:Fixed<4,128>,
}
impl ThickLine{
fn new<M:MeshQuery>(mesh:&M,[v0,v1]:Simplex<2,M::Vert>)->Self{
let p0=mesh.vert(v0);
let p1=mesh.vert(v1);
let point=p0;
let dir=p1-p0;
// Allow ~ 2*sqrt(3) units of thickness on the plane
// This is to account for the variance of two voxels across the longest diagonal
let epsilon=(dir.length_squared()*(Planar64::EPSILON*3)).widen_4();
Self{point,dir,epsilon}
}
}
impl Contains for ThickLine{
fn contains(&self,point:Planar64Vec3)->bool{
(point-self.point).cross(self.dir).length_squared()<=self.epsilon
}
}
struct EVFinder<'a,M,C>{
mesh:&'a M,
constraint:C,
best_distance_squared:Fixed<2,64>,
}
impl<M:MeshQuery,C:Contains> EVFinder<'_,M,C>{
fn next_transition_vert(&mut self,vert_id:M::Vert,point:Planar64Vec3)->Transition<M::Vert>{
let mut best_transition=Transition::Done;
for &directed_edge_id in self.mesh.vert_edges(vert_id).as_ref(){
//test if this edge's opposite vertex closer
let edge_verts=self.mesh.edge_verts(directed_edge_id.as_undirected());
//select opposite vertex
let test_vert_id=edge_verts.as_ref()[directed_edge_id.parity() as usize];
let test_pos=self.mesh.vert(test_vert_id);
let diff=point-test_pos;
let distance_squared=diff.dot(diff);
// ensure test_vert_id is coplanar to simplex
if distance_squared<self.best_distance_squared&&self.constraint.contains(test_pos){
best_transition=Transition::Vert(test_vert_id);
self.best_distance_squared=distance_squared;
}
}
best_transition
}
fn final_ev(&mut self,vert_id:M::Vert,point:Planar64Vec3)->EV<M>{
let mut best_transition=EV::Vert(vert_id);
let vert_pos=self.mesh.vert(vert_id);
let diff=point-vert_pos;
for &directed_edge_id in self.mesh.vert_edges(vert_id).as_ref(){
//test if this edge is closer
let edge_verts=self.mesh.edge_verts(directed_edge_id.as_undirected());
let test_vert_id=edge_verts.as_ref()[directed_edge_id.parity() as usize];
let test_pos=self.mesh.vert(test_vert_id);
let edge_n=test_pos-vert_pos;
let d=edge_n.dot(diff);
//test the edge
let edge_nn=edge_n.dot(edge_n);
// ensure edge contains closest point and directed_edge_id is coplanar to simplex
if !d.is_negative()&&d<=edge_nn&&self.constraint.contains(test_pos){
let distance_squared={
let c=diff.cross(edge_n);
//wrap for speed
(c.dot(c)/edge_nn).divide().wrap_2()
};
if distance_squared<=self.best_distance_squared{
best_transition=EV::Edge(directed_edge_id.as_undirected());
self.best_distance_squared=distance_squared;
}
}
}
best_transition
}
fn crawl_boundaries(&mut self,mut vert_id:M::Vert,point:Planar64Vec3)->EV<M>{
loop{
match self.next_transition_vert(vert_id,point){
Transition::Done=>return self.final_ev(vert_id,point),
Transition::Vert(new_vert_id)=>vert_id=new_vert_id,
}
}
}
}
/// This function drops a vertex down to an edge or a face if the path from infinity did not cross any vertex-edge boundaries but the point is supposed to have already crossed a boundary down from a vertex
fn crawl_to_closest_ev<M:MeshQuery>(mesh:&M,simplex:Simplex<2,M::Vert>,point:Planar64Vec3)->EV<M>{
// naively start at the closest vertex
// the closest vertex is not necessarily the one with the fewest boundary hops
// but it doesn't matter, we will get there regardless.
let (vert_id,best_distance_squared)=simplex.into_iter().map(|vert_id|{
let diff=point-mesh.vert(vert_id);
(vert_id,diff.dot(diff))
}).min_by_key(|&(_,d)|d).unwrap();
let constraint=ThickLine::new(mesh,simplex);
let mut finder=EVFinder{constraint,mesh,best_distance_squared};
//start on any vertex
//cross uncrossable vertex-edge boundaries until you find the closest vertex or edge
//cross edge-face boundary if it's uncrossable
finder.crawl_boundaries(vert_id,point)
}
/// This function drops a vertex down to an edge or a face if the path from infinity did not cross any vertex-edge boundaries but the point is supposed to have already crossed a boundary down from a vertex
fn crawl_to_closest_fev<'a>(mesh:&MinkowskiMesh<'a>,simplex:Simplex<3,MinkowskiVert>,point:Planar64Vec3)->FEV::<MinkowskiMesh<'a>>{
// naively start at the closest vertex
// the closest vertex is not necessarily the one with the fewest boundary hops
// but it doesn't matter, we will get there regardless.
let (vert_id,best_distance_squared)=simplex.into_iter().map(|vert_id|{
let diff=point-mesh.vert(vert_id);
(vert_id,diff.dot(diff))
}).min_by_key(|&(_,d)|d).unwrap();
let constraint=ThickPlane::new(mesh,simplex);
let mut finder=EVFinder{constraint,mesh,best_distance_squared};
//start on any vertex
//cross uncrossable vertex-edge boundaries until you find the closest vertex or edge
//cross edge-face boundary if it's uncrossable
match finder.crawl_boundaries(vert_id,point){
//if a vert is returned, it is the closest point to the infinity point
EV::Vert(vert_id)=>FEV::Vert(vert_id),
EV::Edge(edge_id)=>{
//cross to face if we are on the wrong side
let edge_n=mesh.edge_n(edge_id);
// point is multiplied by two because vert_sum sums two vertices.
let delta_pos=point*2-{
let &[v0,v1]=mesh.edge_verts(edge_id).as_ref();
mesh.vert(v0)+mesh.vert(v1)
};
for (i,&face_id) in mesh.edge_faces(edge_id).as_ref().iter().enumerate(){
//test if this face is closer
let (face_n,d)=mesh.face_nd(face_id);
//if test point is behind face, the face is invalid
// TODO: find out why I thought of this backwards
if !(face_n.dot(point)-d).is_positive(){
continue;
}
//edge-face boundary nd, n facing out of the face towards the edge
let boundary_n=face_n.cross(edge_n)*(i as i64*2-1);
let boundary_d=boundary_n.dot(delta_pos);
//is test point behind edge, i.e. contained in the face
if !boundary_d.is_positive(){
//both faces cannot pass this condition, return early if one does.
return FEV::Face(face_id);
}
}
FEV::Edge(edge_id)
},
}
}
pub fn closest_fev_not_inside<'a>(mesh:&MinkowskiMesh<'a>,point:Planar64Vec3)->Option<FEV<MinkowskiMesh<'a>>>{
const ENABLE_FAST_FAIL:bool=false;
// TODO: remove mesh negation
minimum_difference::<ENABLE_FAST_FAIL,_,_>(&-mesh,point,
// on_exact
|is_intersecting,simplex|{
if is_intersecting{
return None;
}
// Convert simplex to FEV
// Vertices must be inverted since the mesh is inverted
Some(match simplex{
Simplex1_3::Simplex1([v0])=>FEV::Vert(-v0),
Simplex1_3::Simplex2([v0,v1])=>{
// invert
let (v0,v1)=(-v0,-v1);
let ev=crawl_to_closest_ev(mesh,[v0,v1],point);
if !matches!(ev,EV::Edge(_)){
println!("I can't believe it's not an edge!");
}
ev.into()
},
Simplex1_3::Simplex3([v0,v1,v2])=>{
// invert
let (v0,v1,v2)=(-v0,-v1,-v2);
// Shimmy to the side until you find a face that contains the closest point
// it's ALWAYS representable as a face, but this algorithm may
// return E or V in edge cases but I don't think that will break the face crawler
let fev=crawl_to_closest_fev(mesh,[v0,v1,v2],point);
if !matches!(fev,FEV::Face(_)){
println!("I can't believe it's not a face!");
}
fev
},
})
},
// on_escape
|_simplex|{
// intersection is guaranteed at this point
// local norm, dist, u0, u1, v0, v1, w0, w1 = expand(queryP, queryQ, a0, a1, b0, b1, c0, c1, d0, d1, 1e-5)
// let simplex=refine_to_exact(mesh,simplex);
None
},
// fast_fail value is irrelevant and will never be returned!
||unreachable!()
)
}
// local function minimumDifference(
// queryP, radiusP,
// queryQ, radiusQ,
// exitRadius, testIntersection
// )
fn minimum_difference<const ENABLE_FAST_FAIL:bool,T,M:MeshQuery>(
mesh:&M,
point:Planar64Vec3,
on_exact:impl FnOnce(bool,Simplex1_3<M::Vert>)->T,
on_escape:impl FnOnce(Simplex<4,M::Vert>)->T,
on_fast_fail:impl FnOnce()->T,
)->T{
// local initialAxis = queryQ() - queryP()
// local new_point_p = queryP(initialAxis)
// local new_point_q = queryQ(-initialAxis)
// local direction, a0, a1, b0, b1, c0, c1, d0, d1
let mut initial_axis=mesh.hint_point()+point;
// degenerate case
if initial_axis==vec3::zero(){
initial_axis=choose_any_direction();
}
let last_point=mesh.farthest_vert(-initial_axis);
// this represents the 'a' value in the commented code
let mut last_pos=mesh.vert(last_point);
let Reduced{dir:mut direction,simplex:mut simplex_small}=reduce1([last_point],mesh,point);
// exitRadius = testIntersection and 0 or exitRadius or 1/0
// for _ = 1, 100 do
loop{
// new_point_p = queryP(-direction)
// new_point_q = queryQ(direction)
// local next_point = new_point_q - new_point_p
let next_point=mesh.farthest_vert(direction);
let next_pos=mesh.vert(next_point);
// if -direction:Dot(next_point) > (exitRadius + radiusP + radiusQ)*direction.magnitude then
if ENABLE_FAST_FAIL&&direction.dot(next_pos+point).is_negative(){
return on_fast_fail();
}
let simplex_big=simplex_small.push_front(next_point);
// if
// direction:Dot(next_point - a) <= 0 or
// absDet(next_point, a, b, c) < 1e-6
if !direction.dot(next_pos-last_pos).is_positive()
||simplex_big.det_is_zero(mesh){
// Found enough information to compute the exact closest point.
// local norm = direction.unit
// local dist = a:Dot(norm)
// local hits = -dist < radiusP + radiusQ
let is_intersecting=(last_pos+point).dot(direction).is_positive();
return on_exact(is_intersecting,simplex_small);
}
// direction, a0, a1, b0, b1, c0, c1, d0, d1 = reduceSimplex(new_point_p, new_point_q, a0, a1, b0, b1, c0, c1)
match simplex_big.reduce(mesh,point){
// if a and b and c and d then
Reduce::Escape(simplex)=>{
// Enough information to conclude that the meshes are intersecting.
// Topology information is computed if needed.
return on_escape(simplex);
},
Reduce::Reduced(reduced)=>{
direction=reduced.dir;
simplex_small=reduced.simplex;
},
}
// next loop this will be a
last_pos=next_pos;
}
}
#[cfg(test)]
mod test{
use super::*;
use crate::model::{PhysicsMesh,PhysicsMeshView};
fn mesh_contains_point(mesh:PhysicsMeshView<'_>,point:Planar64Vec3)->bool{
const ENABLE_FAST_FAIL:bool=true;
// TODO: remove mesh negation
minimum_difference::<ENABLE_FAST_FAIL,_,_>(&mesh,point,
// on_exact
|is_intersecting,_simplex|{
is_intersecting
},
// on_escape
|_simplex|{
// intersection is guaranteed at this point
true
},
// fast_fail value
||false
)
}
#[test]
fn test_cube_points(){
let mesh=PhysicsMesh::unit_cube();
let mesh_view=mesh.complete_mesh_view();
for x in -2..=2{
for y in -2..=2{
for z in -2..=2{
let point=vec3::int(x,y,z)>>1;
assert!(mesh_contains_point(mesh_view,point),"Mesh did not contain point {point}");
}
}
}
}
}

View File

@@ -0,0 +1,173 @@
use mlua::{Lua,FromLuaMulti,IntoLuaMulti,Function,Result as LuaResult,Vector};
use strafesnet_common::integer::{Planar64,Planar64Vec3,FixedFromFloatError};
use crate::model::{MeshQuery,MinkowskiMesh};
pub fn contains_point(
mesh:&MinkowskiMesh,
point:Planar64Vec3,
)->LuaResult<bool>{
Ok(minimum_difference(mesh,point,true)?.hits)
}
pub fn minimum_difference_details(
mesh:&MinkowskiMesh,
point:Planar64Vec3,
)->LuaResult<(bool,Option<Details>)>{
let md=minimum_difference(mesh,point,false)?;
Ok((md.hits,md.details))
}
fn p64v3(v:Vector)->Result<Planar64Vec3,FixedFromFloatError>{
Ok(Planar64Vec3::new([
v.x().try_into()?,
v.y().try_into()?,
v.z().try_into()?,
]))
}
fn vec(v:Planar64Vec3)->Vector{
Vector::new(v.x.into(),v.y.into(),v.z.into())
}
struct MinimumDifference{
hits:bool,
details:Option<Details>
}
pub struct Details{
pub distance:Planar64,
pub p_pos:Planar64Vec3,
pub p_norm:Planar64Vec3,
pub q_pos:Planar64Vec3,
pub q_norm:Planar64Vec3,
}
impl FromLuaMulti for MinimumDifference{
fn from_lua_multi(mut values:mlua::MultiValue,_lua:&Lua)->LuaResult<Self>{
match values.make_contiguous(){
&mut [
mlua::Value::Boolean(hits),
mlua::Value::Nil,
mlua::Value::Nil,
mlua::Value::Nil,
mlua::Value::Nil,
mlua::Value::Nil,
]=>Ok(Self{hits,details:None}),
&mut [
mlua::Value::Boolean(hits),
mlua::Value::Number(distance),
mlua::Value::Vector(p_pos),
mlua::Value::Vector(p_norm),
mlua::Value::Vector(q_pos),
mlua::Value::Vector(q_norm),
]=>Ok(Self{
hits,
details:Some(Details{
distance:distance.try_into().unwrap(),
p_pos:p64v3(p_pos).unwrap(),
p_norm:p64v3(p_norm).unwrap(),
q_pos:p64v3(q_pos).unwrap(),
q_norm:p64v3(q_norm).unwrap(),
}),
}),
&mut [
mlua::Value::Boolean(hits),
mlua::Value::Integer(distance),
mlua::Value::Vector(p_pos),
mlua::Value::Vector(p_norm),
mlua::Value::Vector(q_pos),
mlua::Value::Vector(q_norm),
]=>Ok(Self{
hits,
details:Some(Details{
distance:distance.into(),
p_pos:p64v3(p_pos).unwrap(),
p_norm:p64v3(p_norm).unwrap(),
q_pos:p64v3(q_pos).unwrap(),
q_norm:p64v3(q_norm).unwrap(),
}),
}),
values=>Err(mlua::Error::runtime(format!("Invalid return values: {values:?}"))),
}
}
}
struct Args{
query_p:Function,
radius_p:f64,
query_q:Function,
radius_q:f64,
test_intersection:bool,
}
impl Args{
fn new(
lua:&Lua,
mesh:&'static MinkowskiMesh<'static>,
point:Planar64Vec3,
test_intersection:bool,
)->LuaResult<Self>{
let radius_p=0.0;
let radius_q=0.0;
// Query the farthest point on the mesh in the given direction.
let query_p=lua.create_function(move|_,dir:Option<Vector>|{
let Some(dir)=dir else{
return Ok(vec(mesh.mesh0.hint_point()));
};
let dir=p64v3(dir).unwrap();
let vert_id=mesh.mesh0.farthest_vert(dir);
let dir=mesh.mesh0.vert(vert_id);
Ok(vec(dir))
})?;
// query_q is different since it includes the test point offset.
let query_q=lua.create_function(move|_,dir:Option<Vector>|{
let Some(dir)=dir else{
return Ok(vec(mesh.mesh1.hint_point()+point));
};
let dir=p64v3(dir).unwrap();
let vert_id=mesh.mesh1.farthest_vert(dir);
let dir=mesh.mesh1.vert(vert_id)+point;
Ok(vec(dir))
})?;
Ok(Args{
query_p,
radius_p,
query_q,
radius_q,
test_intersection,
})
}
}
impl IntoLuaMulti for Args{
fn into_lua_multi(self,lua:&Lua)->LuaResult<mlua::MultiValue>{
use mlua::IntoLua;
Ok(mlua::MultiValue::from_vec(vec![
self.query_p.into_lua(lua)?,
self.radius_p.into_lua(lua)?,
self.query_q.into_lua(lua)?,
self.radius_q.into_lua(lua)?,
mlua::Value::Nil,
self.test_intersection.into_lua(lua)?,
]))
}
}
fn minimum_difference(
mesh:&MinkowskiMesh,
point:Planar64Vec3,
test_intersection:bool,
)->LuaResult<MinimumDifference>{
let ctx=init_lua()?;
// SAFETY: mesh lifetime must outlive args usages
let mesh=unsafe{core::mem::transmute(mesh)};
let args=Args::new(&ctx.lua,mesh,point,test_intersection)?;
ctx.f.call(args)
}
struct Ctx{
lua:Lua,
f:Function,
}
fn init_lua()->LuaResult<Ctx>{
static SOURCE:std::sync::LazyLock<String>=std::sync::LazyLock::new(||std::fs::read_to_string("/home/quat/strafesnet/game/src/ReplicatedStorage/Shared/Trey-MinimumDifference.lua").unwrap());
let lua=Lua::new();
lua.sandbox(true)?;
let lib_f=lua.load(SOURCE.as_str()).set_name("Trey-MinimumDifference").into_function()?;
let lib:mlua::Table=lib_f.call(())?;
let f=lib.raw_get("difference")?;
Ok(Ctx{lua,f})
}

View File

@@ -1,5 +1,5 @@
use std::collections::{HashSet,HashMap};
use core::ops::Range;
use core::ops::{Bound,RangeBounds};
use strafesnet_common::integer::vec3::Vector3;
use strafesnet_common::model::{self,MeshId,PolygonIter};
use strafesnet_common::integer::{self,vec3,Fixed,Planar64,Planar64Vec3,Ratio};
@@ -68,11 +68,12 @@ pub enum FEV<M:MeshQuery>{
}
//use Unit32 #[repr(C)] for map files
#[derive(Clone,Hash,Eq,PartialEq)]
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
struct Face{
normal:Planar64Vec3,
dot:Planar64,
}
#[derive(Debug)]
struct Vert(Planar64Vec3);
pub trait MeshQuery{
type Face:Copy;
@@ -89,6 +90,9 @@ pub trait MeshQuery{
let &[v0,v1]=self.edge_verts(directed_edge_id.as_undirected()).as_ref();
(self.vert(v1)-self.vert(v0))*((directed_edge_id.parity() as i64)*2-1)
}
/// This must return a point inside the mesh.
fn hint_point(&self)->Planar64Vec3;
fn farthest_vert(&self,dir:Planar64Vec3)->Self::Vert;
fn vert(&self,vert_id:Self::Vert)->Planar64Vec3;
fn face_nd(&self,face_id:Self::Face)->(Self::Normal,Self::Offset);
fn face_edges(&self,face_id:Self::Face)->impl AsRef<[Self::Edge]>;
@@ -97,18 +101,22 @@ pub trait MeshQuery{
fn vert_edges(&self,vert_id:Self::Vert)->impl AsRef<[Self::Edge]>;
fn vert_faces(&self,vert_id:Self::Vert)->impl AsRef<[Self::Face]>;
}
#[derive(Debug)]
struct FaceRefs{
edges:Vec<SubmeshDirectedEdgeId>,
//verts:Vec<VertId>,
}
#[derive(Debug)]
struct EdgeRefs{
faces:[SubmeshFaceId;2],//left, right
verts:[SubmeshVertId;2],//bottom, top
}
#[derive(Debug)]
struct VertRefs{
faces:Vec<SubmeshFaceId>,
edges:Vec<SubmeshDirectedEdgeId>,
}
#[derive(Debug)]
pub struct PhysicsMeshData{
//this contains all real and virtual faces used in both the complete mesh and convex submeshes
//faces are sorted such that all faces that belong to the complete mesh appear first, and then
@@ -118,6 +126,7 @@ pub struct PhysicsMeshData{
faces:Vec<Face>,//MeshFaceId indexes this list
verts:Vec<Vert>,//MeshVertId indexes this list
}
#[derive(Debug)]
pub struct PhysicsMeshTopology{
//mapping of local ids to PhysicsMeshData ids
faces:Vec<MeshFaceId>,//SubmeshFaceId indexes this list
@@ -143,10 +152,12 @@ impl From<MeshId> for PhysicsMeshId{
pub struct PhysicsSubmeshId(u32);
pub struct PhysicsMesh{
data:PhysicsMeshData,
complete_mesh:PhysicsMeshTopology,
//Most objects in roblox maps are already convex, so the list length is 0
//as soon as the mesh is divided into 2 submeshes, the list length jumps to 2.
//length 1 is unnecessary since the complete mesh would be a duplicate of the only submesh, but would still function properly
// The complete mesh is unused at this time.
// complete_mesh:PhysicsMeshTopology,
// Submeshes are guaranteed to be convex and may contain
// "virtual" faces which are not part of the complete mesh.
// Physics calculations should never resolve to hitting
// a virtual face.
submeshes:Vec<PhysicsMeshTopology>,
}
impl PhysicsMesh{
@@ -210,19 +221,24 @@ impl PhysicsMesh{
};
Self{
data,
complete_mesh:mesh_topology,
submeshes:Vec::new(),
// complete_mesh:mesh_topology.clone(),
submeshes:vec![mesh_topology],
}
}
pub fn unit_cylinder()->Self{
Self::unit_cube()
}
#[inline]
pub const fn complete_mesh(&self)->&PhysicsMeshTopology{
&self.complete_mesh
pub fn complete_mesh(&self)->&PhysicsMeshTopology{
// If there is exactly one submesh, then the complete mesh is identical to it.
if self.submeshes.len()==1{
self.submeshes.first().unwrap()
}else{
panic!("PhysicsMesh complete mesh is not known");
}
}
#[inline]
pub const fn complete_mesh_view(&self)->PhysicsMeshView{
pub fn complete_mesh_view(&self)->PhysicsMeshView<'_>{
PhysicsMeshView{
data:&self.data,
topology:self.complete_mesh(),
@@ -230,21 +246,16 @@ impl PhysicsMesh{
}
#[inline]
pub fn submeshes(&self)->&[PhysicsMeshTopology]{
//the complete mesh is already a convex mesh when len()==0, len()==1 is invalid but will still work
if self.submeshes.len()==0{
std::slice::from_ref(&self.complete_mesh)
}else{
&self.submeshes.as_slice()
}
&self.submeshes
}
#[inline]
pub fn submesh_view(&self,submesh_id:PhysicsSubmeshId)->PhysicsMeshView{
pub fn submesh_view(&self,submesh_id:PhysicsSubmeshId)->PhysicsMeshView<'_>{
PhysicsMeshView{
data:&self.data,
topology:&self.submeshes()[submesh_id.get() as usize],
}
}
pub fn submesh_views(&self)->impl Iterator<Item=PhysicsMeshView>{
pub fn submesh_views(&self)->impl Iterator<Item=PhysicsMeshView<'_>>{
self.submeshes().iter().map(|topology|PhysicsMeshView{
data:&self.data,
topology,
@@ -313,14 +324,20 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
if mesh.unique_pos.len()==0{
return Err(PhysicsMeshError::ZeroVertices);
}
// An empty physics mesh is a waste of resources
if mesh.physics_groups.len()==0{
return Err(PhysicsMeshError::NoPhysicsGroups);
}
let verts=mesh.unique_pos.iter().copied().map(Vert).collect();
//TODO: fix submeshes
//flat map mesh.physics_groups[$1].groups.polys()[$2] as face_id
//lower face_id points to upper face_id
//the same face is not allowed to be in multiple polygon groups
// because SubmeshFaceId -> CompleteMeshFaceId -> SubmeshFaceId is ambiguous
// when multiple SubmeshFaceId point to one MeshFaceId
let mut faces=Vec::new();
let mut face_id_from_face=HashMap::new();
let mut mesh_topologies:Vec<PhysicsMeshTopology>=mesh.physics_groups.iter().map(|physics_group|{
let mesh_topologies:Vec<PhysicsMeshTopology>=mesh.physics_groups.iter().map(|physics_group|{
//construct submesh
let mut submesh_faces=Vec::new();//these contain a map from submeshId->meshId
let mut submesh_verts=Vec::new();
@@ -381,15 +398,11 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
normal:(normal/len as i64).divide().narrow_1().unwrap(),
dot:(dot/(len*len) as i64).narrow_1().unwrap(),
};
let face_id=match face_id_from_face.get(&face){
Some(&face_id)=>face_id,
None=>{
let face_id=MeshFaceId::new(faces.len() as u32);
face_id_from_face.insert(face.clone(),face_id);
faces.push(face);
face_id
}
};
let face_id=*face_id_from_face.entry(face).or_insert_with(||{
let face_id=MeshFaceId::new(faces.len() as u32);
faces.push(face);
face_id
});
submesh_faces.push(face_id);
face_ref_guys.push(FaceRefEdges(face_edges));
}
@@ -397,16 +410,16 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
PhysicsMeshTopology{
faces:submesh_faces,
verts:submesh_verts,
face_topology:face_ref_guys.into_iter().map(|face_ref_guy|{
FaceRefs{edges:face_ref_guy.0}
face_topology:face_ref_guys.into_iter().map(|FaceRefEdges(edges)|{
FaceRefs{edges}
}).collect(),
edge_topology:edge_pool.edge_guys.into_iter().map(|(edge_ref_verts,edge_ref_faces)|
EdgeRefs{faces:edge_ref_faces.0,verts:edge_ref_verts.0}
edge_topology:edge_pool.edge_guys.into_iter().map(|(EdgeRefVerts(verts),EdgeRefFaces(faces))|
EdgeRefs{faces,verts}
).collect(),
vert_topology:vert_ref_guys.into_iter().map(|vert_ref_guy|
vert_topology:vert_ref_guys.into_iter().map(|VertRefGuy{edges,faces}|
VertRefs{
edges:vert_ref_guy.edges.into_iter().collect(),
faces:vert_ref_guy.faces.into_iter().collect(),
edges:edges.into_iter().collect(),
faces:faces.into_iter().collect(),
}
).collect(),
}
@@ -416,12 +429,13 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
faces,
verts,
},
complete_mesh:mesh_topologies.pop().ok_or(PhysicsMeshError::NoPhysicsGroups)?,
// complete_mesh:None,
submeshes:mesh_topologies,
})
}
}
#[derive(Debug,Clone,Copy)]
pub struct PhysicsMeshView<'a>{
data:&'a PhysicsMeshData,
topology:&'a PhysicsMeshTopology,
@@ -436,6 +450,22 @@ impl MeshQuery for PhysicsMeshView<'_>{
let face_idx=self.topology.faces[face_id.get() as usize].get() as usize;
(self.data.faces[face_idx].normal,self.data.faces[face_idx].dot)
}
fn hint_point(&self)->Planar64Vec3{
// invariant: meshes always encompass the origin
vec3::zero()
}
fn farthest_vert(&self,dir:Planar64Vec3)->SubmeshVertId{
//this happens to be well-defined. there are no virtual virtices
SubmeshVertId::new(
self.topology.verts.iter()
.enumerate()
.max_by_key(|&(_,&vert_id)|
dir.dot(self.data.verts[vert_id.get() as usize].0)
)
//assume there is more than zero vertices.
.unwrap().0 as u32
)
}
//ideally I never calculate the vertex position, but I have to for the graphical meshes...
fn vert(&self,vert_id:SubmeshVertId)->Planar64Vec3{
let vert_idx=self.topology.verts[vert_id.get() as usize].get() as usize;
@@ -458,6 +488,7 @@ impl MeshQuery for PhysicsMeshView<'_>{
}
}
#[derive(Debug)]
pub struct PhysicsMeshTransform{
pub vertex:integer::Planar64Affine3,
pub normal:integer::mat3::Matrix3<Fixed<2,64>>,
@@ -473,6 +504,7 @@ impl PhysicsMeshTransform{
}
}
#[derive(Debug,Clone,Copy)]
pub struct TransformedMesh<'a>{
view:PhysicsMeshView<'a>,
transform:&'a PhysicsMeshTransform,
@@ -487,24 +519,9 @@ impl TransformedMesh<'_>{
transform,
}
}
pub fn verts<'a>(&'a self)->impl Iterator<Item=vec3::Vector3<Fixed<2,64>>>+'a{
pub fn verts<'a>(&'a self)->impl Iterator<Item=Vector3<Fixed<2,64>>>+'a{
self.view.data.verts.iter().map(|&Vert(pos)|self.transform.vertex.transform_point3(pos))
}
pub fn faces(&self)->impl Iterator<Item=SubmeshFaceId>{
(0..self.view.topology.faces.len() as u32).map(SubmeshFaceId::new)
}
fn farthest_vert(&self,dir:Planar64Vec3)->SubmeshVertId{
//this happens to be well-defined. there are no virtual virtices
SubmeshVertId::new(
self.view.topology.verts.iter()
.enumerate()
.max_by_key(|&(_,&vert_id)|
dir.dot(self.transform.vertex.transform_point3(self.view.data.verts[vert_id.get() as usize].0))
)
//assume there is more than zero vertices.
.unwrap().0 as u32
)
}
}
impl MeshQuery for TransformedMesh<'_>{
type Face=SubmeshFaceId;
@@ -522,6 +539,21 @@ impl MeshQuery for TransformedMesh<'_>{
// wrap for speed
self.transform.vertex.transform_point3(self.view.vert(vert_id)).wrap_1()
}
fn hint_point(&self)->Planar64Vec3{
self.transform.vertex.translation
}
fn farthest_vert(&self,dir:Planar64Vec3)->SubmeshVertId{
//this happens to be well-defined. there are no virtual virtices
SubmeshVertId::new(
self.view.topology.verts.iter()
.enumerate()
.max_by_key(|&(_,&vert_id)|
dir.dot(self.transform.vertex.transform_point3(self.view.data.verts[vert_id.get() as usize].0))
)
//assume there is more than zero vertices.
.unwrap().0 as u32
)
}
#[inline]
fn face_edges(&self,face_id:SubmeshFaceId)->impl AsRef<[SubmeshDirectedEdgeId]>{
self.view.face_edges(face_id)
@@ -548,11 +580,20 @@ impl MeshQuery for TransformedMesh<'_>{
//(face,vertex)
//(edge,edge)
//(vertex,face)
#[derive(Clone,Copy,Debug)]
#[derive(Clone,Copy,Debug,Eq,PartialEq)]
pub enum MinkowskiVert{
VertVert(SubmeshVertId,SubmeshVertId),
}
#[derive(Clone,Copy,Debug)]
// TODO: remove this
impl core::ops::Neg for MinkowskiVert{
type Output=Self;
fn neg(self)->Self::Output{
match self{
MinkowskiVert::VertVert(v0,v1)=>MinkowskiVert::VertVert(v1,v0),
}
}
}
#[derive(Clone,Copy,Debug,Eq,PartialEq)]
pub enum MinkowskiEdge{
VertEdge(SubmeshVertId,SubmeshEdgeId),
EdgeVert(SubmeshEdgeId,SubmeshVertId),
@@ -567,7 +608,7 @@ impl UndirectedEdge for MinkowskiEdge{
}
}
}
#[derive(Clone,Copy,Debug)]
#[derive(Clone,Copy,Debug,Eq,PartialEq)]
pub enum MinkowskiDirectedEdge{
VertEdge(SubmeshVertId,SubmeshDirectedEdgeId),
EdgeVert(SubmeshDirectedEdgeId,SubmeshVertId),
@@ -588,7 +629,7 @@ impl DirectedEdge for MinkowskiDirectedEdge{
}
}
}
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
#[derive(Clone,Copy,Debug,Hash)]
pub enum MinkowskiFace{
VertFace(SubmeshVertId,SubmeshFaceId),
EdgeEdge(SubmeshEdgeId,SubmeshEdgeId,bool),
@@ -598,9 +639,10 @@ pub enum MinkowskiFace{
//FaceFace
}
#[derive(Debug)]
pub struct MinkowskiMesh<'a>{
mesh0:TransformedMesh<'a>,
mesh1:TransformedMesh<'a>,
pub mesh0:TransformedMesh<'a>,
pub mesh1:TransformedMesh<'a>,
}
//infinity fev algorithm state transition
@@ -615,6 +657,18 @@ enum EV{
}
pub type GigaTime=Ratio<Fixed<4,128>,Fixed<4,128>>;
pub fn into_giga_time(time:Time,relative_to:Time)->GigaTime{
let r=(time-relative_to).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
}
// TODO: remove this
impl<'a> core::ops::Neg for &MinkowskiMesh<'a>{
type Output=MinkowskiMesh<'a>;
fn neg(self)->Self::Output{
MinkowskiMesh::minkowski_sum(self.mesh1,self.mesh0)
}
}
impl MinkowskiMesh<'_>{
pub fn minkowski_sum<'a>(mesh0:TransformedMesh<'a>,mesh1:TransformedMesh<'a>)->MinkowskiMesh<'a>{
@@ -685,7 +739,7 @@ impl MinkowskiMesh<'_>{
}
}
/// This function drops a vertex down to an edge or a face if the path from infinity did not cross any vertex-edge boundaries but the point is supposed to have already crossed a boundary down from a vertex
fn infinity_fev(&self,infinity_dir:Planar64Vec3,point:Planar64Vec3)->FEV::<MinkowskiMesh>{
fn infinity_fev(&self,infinity_dir:Planar64Vec3,point:Planar64Vec3)->FEV::<MinkowskiMesh<'_>>{
//start on any vertex
//cross uncrossable vertex-edge boundaries until you find the closest vertex or edge
//cross edge-face boundary if it's uncrossable
@@ -730,44 +784,42 @@ impl MinkowskiMesh<'_>{
//
// Most of the calculation time is just calculating the starting point
// for the "actual" crawling algorithm below (predict_collision_{in|out}).
fn closest_fev_not_inside(&self,mut infinity_body:Body,start_time:Time)->Option<FEV<MinkowskiMesh>>{
fn closest_fev_not_inside(&self,mut infinity_body:Body,start_time:Bound<&Time>)->Option<FEV<MinkowskiMesh<'_>>>{
infinity_body.infinity_dir().and_then(|dir|{
let infinity_fev=self.infinity_fev(-dir,infinity_body.position);
//a line is simpler to solve than a parabola
infinity_body.velocity=dir;
infinity_body.acceleration=vec3::ZERO;
infinity_body.acceleration=vec3::zero();
//crawl in from negative infinity along a tangent line to get the closest fev
// TODO: change crawl_fev args to delta time? Optional values?
infinity_fev.crawl(self,&infinity_body,Time::MIN/4,start_time).miss()
infinity_fev.crawl(self,&infinity_body,Bound::Unbounded,start_time).miss()
})
}
pub fn predict_collision_in(&self,relative_body:&Body,Range{start:start_time,end:time_limit}:Range<Time>)->Option<(MinkowskiFace,GigaTime)>{
self.closest_fev_not_inside(relative_body.clone(),start_time).and_then(|fev|{
//continue forwards along the body parabola
fev.crawl(self,relative_body,start_time,time_limit).hit()
})
pub fn predict_collision_in(&self,relative_body:&Body,range:impl RangeBounds<Time>)->Option<(MinkowskiFace,GigaTime)>{
let old_fev=self.closest_fev_not_inside(*relative_body,range.start_bound());
let fev=crate::minimum_difference::closest_fev_not_inside(self,relative_body.position)?;
println!("old_fev={old_fev:?}");
println!("fev={fev:?}");
//continue forwards along the body parabola
fev.crawl(self,relative_body,range.start_bound(),range.end_bound()).hit()
}
pub fn predict_collision_out(&self,relative_body:&Body,Range{start:start_time,end:time_limit}:Range<Time>)->Option<(MinkowskiFace,GigaTime)>{
//create an extrapolated body at time_limit
let infinity_body=-relative_body.clone();
self.closest_fev_not_inside(infinity_body,-time_limit).and_then(|fev|{
//continue backwards along the body parabola
fev.crawl(self,&infinity_body,-time_limit,-start_time).hit()
//no need to test -time<time_limit because of the first step
.map(|(face,time)|(face,-time))
})
pub fn predict_collision_out(&self,relative_body:&Body,range:impl RangeBounds<Time>)->Option<(MinkowskiFace,GigaTime)>{
let fev=crate::minimum_difference::closest_fev_not_inside(self,relative_body.position)?;
let (lower_bound,upper_bound)=(range.start_bound(),range.end_bound());
// swap and negate bounds to do a time inversion
let (lower_bound,upper_bound)=(upper_bound.map(|&t|-t),lower_bound.map(|&t|-t));
let infinity_body=-relative_body;
//continue backwards along the body parabola
fev.crawl(self,&infinity_body,lower_bound.as_ref(),upper_bound.as_ref()).hit()
//no need to test -time<time_limit because of the first step
.map(|(face,time)|(face,-time))
}
pub fn predict_collision_face_out(&self,relative_body:&Body,Range{start:start_time,end:time_limit}:Range<Time>,contact_face_id:MinkowskiFace)->Option<(MinkowskiEdge,GigaTime)>{
pub fn predict_collision_face_out(&self,relative_body:&Body,range:impl RangeBounds<Time>,contact_face_id:MinkowskiFace)->Option<(MinkowskiDirectedEdge,GigaTime)>{
// TODO: make better
use crate::face_crawler::{low,upp};
//no algorithm needed, there is only one state and two cases (Edge,None)
//determine when it passes an edge ("sliding off" case)
let start_time={
let r=(start_time-relative_body.time).to_ratio();
Ratio::new(r.num,r.den)
};
let mut best_time={
let r=(time_limit-relative_body.time).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
};
let start_time=range.start_bound().map(|&t|(t-relative_body.time).to_ratio());
let mut best_time=range.end_bound().map(|&t|into_giga_time(t,relative_body.time));
let mut best_edge=None;
let face_n=self.face_nd(contact_face_id).0;
for &directed_edge_id in self.face_edges(contact_face_id).as_ref(){
@@ -780,28 +832,17 @@ impl MinkowskiMesh<'_>{
//WARNING: truncated precision
//wrap for speed
for dt in Fixed::<4,128>::zeroes2(((n.dot(relative_body.position))*2-d).wrap_4(),n.dot(relative_body.velocity).wrap_4()*2,n.dot(relative_body.acceleration).wrap_4()){
if start_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(relative_body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
best_edge=Some(directed_edge_id);
if low(&start_time,&dt)&&upp(&dt,&best_time)&&n.dot(relative_body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=Bound::Included(dt);
best_edge=Some((directed_edge_id,dt));
break;
}
}
}
best_edge.map(|e|(e.as_undirected(),best_time))
best_edge
}
fn infinity_in(&self,infinity_body:Body)->Option<(MinkowskiFace,GigaTime)>{
let infinity_fev=self.infinity_fev(-infinity_body.velocity,infinity_body.position);
infinity_fev.crawl(self,&infinity_body,Time::MIN/4,infinity_body.time).hit()
}
pub fn is_point_in_mesh(&self,point:Planar64Vec3)->bool{
let infinity_body=Body::new(point,vec3::Y,vec3::ZERO,Time::ZERO);
//movement must escape the mesh forwards and backwards in time,
//otherwise the point is not inside the mesh
self.infinity_in(infinity_body)
.is_some_and(|_|
self.infinity_in(-infinity_body)
.is_some()
)
pub fn contains_point(&self,point:Planar64Vec3)->bool{
crate::minimum_difference::contains_point(self,point)
}
}
impl MeshQuery for MinkowskiMesh<'_>{
@@ -840,6 +881,12 @@ impl MeshQuery for MinkowskiMesh<'_>{
},
}
}
fn hint_point(&self)->Planar64Vec3{
self.mesh0.transform.vertex.translation-self.mesh1.transform.vertex.translation
}
fn farthest_vert(&self,dir:Planar64Vec3)->MinkowskiVert{
MinkowskiVert::VertVert(self.mesh0.farthest_vert(dir),self.mesh1.farthest_vert(-dir))
}
fn face_edges(&self,face_id:MinkowskiFace)->impl AsRef<[MinkowskiDirectedEdge]>{
match face_id{
MinkowskiFace::VertFace(v0,f1)=>{
@@ -929,10 +976,10 @@ impl MeshQuery for MinkowskiMesh<'_>{
}
fn edge_verts(&self,edge_id:MinkowskiEdge)->impl AsRef<[MinkowskiVert;2]>{
AsRefHelper(match edge_id{
MinkowskiEdge::VertEdge(v0,e1)=>(*self.mesh1.edge_verts(e1).as_ref()).map(|vert_id1|
MinkowskiEdge::VertEdge(v0,e1)=>self.mesh1.edge_verts(e1).as_ref().map(|vert_id1|
MinkowskiVert::VertVert(v0,vert_id1)
),
MinkowskiEdge::EdgeVert(e0,v1)=>(*self.mesh0.edge_verts(e0).as_ref()).map(|vert_id0|
MinkowskiEdge::EdgeVert(e0,v1)=>self.mesh0.edge_verts(e0).as_ref().map(|vert_id0|
MinkowskiVert::VertVert(vert_id0,v1)
),
})
@@ -942,38 +989,43 @@ impl MeshQuery for MinkowskiMesh<'_>{
MinkowskiVert::VertVert(v0,v1)=>{
let mut edges=Vec::new();
//detect shared volume when the other mesh is mirrored along a test edge dir
let v0f=self.mesh0.vert_faces(v0);
let v1f=self.mesh1.vert_faces(v1);
let v0f_n:Vec<_>=v0f.as_ref().iter().map(|&face_id|self.mesh0.face_nd(face_id).0).collect();
let v1f_n:Vec<_>=v1f.as_ref().iter().map(|&face_id|self.mesh1.face_nd(face_id).0).collect();
let the_len=v0f.as_ref().len()+v1f.as_ref().len();
let v0f_thing=self.mesh0.vert_faces(v0);
let v1f_thing=self.mesh1.vert_faces(v1);
let v0f=v0f_thing.as_ref();
let v1f=v1f_thing.as_ref();
let v0f_n:Vec<_>=v0f.iter().map(|&face_id|self.mesh0.face_nd(face_id).0).collect();
let v1f_n:Vec<_>=v1f.iter().map(|&face_id|self.mesh1.face_nd(face_id).0).collect();
// scratch vector
let mut face_normals=Vec::with_capacity(v0f.len()+v1f.len());
face_normals.clone_from(&v0f_n);
for &directed_edge_id in self.mesh0.vert_edges(v0).as_ref(){
let n=self.mesh0.directed_edge_n(directed_edge_id);
let nn=n.dot(n);
// TODO: there's gotta be a better way to do this
//make a set of faces
let mut face_normals=Vec::with_capacity(the_len);
//add mesh0 faces as-is
face_normals.clone_from(&v0f_n);
// drop faces beyond v0f_n
face_normals.truncate(v0f.len());
// make a set of faces from mesh0's perspective
for face_n in &v1f_n{
//add reflected mesh1 faces
//wrap for speed
face_normals.push(*face_n-(n*face_n.dot(n)*2/nn).divide().wrap_3());
}
if is_empty_volume(face_normals){
if is_empty_volume(&face_normals){
edges.push(MinkowskiDirectedEdge::EdgeVert(directed_edge_id,v1));
}
}
face_normals.clone_from(&v1f_n);
for &directed_edge_id in self.mesh1.vert_edges(v1).as_ref(){
let n=self.mesh1.directed_edge_n(directed_edge_id);
let nn=n.dot(n);
let mut face_normals=Vec::with_capacity(the_len);
face_normals.clone_from(&v1f_n);
// drop faces beyond v1f_n
face_normals.truncate(v1f.len());
// make a set of faces from mesh1's perspective
for face_n in &v0f_n{
//wrap for speed
face_normals.push(*face_n-(n*face_n.dot(n)*2/nn).divide().wrap_3());
}
if is_empty_volume(face_normals){
if is_empty_volume(&face_normals){
edges.push(MinkowskiDirectedEdge::VertEdge(v0,directed_edge_id));
}
}
@@ -983,11 +1035,12 @@ impl MeshQuery for MinkowskiMesh<'_>{
}
fn vert_faces(&self,_vert_id:MinkowskiVert)->impl AsRef<[MinkowskiFace]>{
unimplemented!();
vec![]
#[expect(unreachable_code)]
Vec::new()
}
}
fn is_empty_volume(normals:Vec<Vector3<Fixed<3,96>>>)->bool{
fn is_empty_volume(normals:&[Vector3<Fixed<3,96>>])->bool{
let len=normals.len();
for i in 0..len-1{
for j in i+1..len{
@@ -1013,6 +1066,6 @@ fn is_empty_volume(normals:Vec<Vector3<Fixed<3,96>>>)->bool{
#[test]
fn test_is_empty_volume(){
assert!(!is_empty_volume([vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3()].to_vec()));
assert!(is_empty_volume([vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3(),vec3::NEG_X.widen_3()].to_vec()));
assert!(!is_empty_volume(&[vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3()]));
assert!(is_empty_volume(&[vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3(),vec3::NEG_X.widen_3()]));
}

File diff suppressed because it is too large Load Diff

View File

@@ -39,20 +39,18 @@ impl Contact{
//note that this is horrible with fixed point arithmetic
fn solve1(c0:&Contact)->Option<Ratio<Vector3<Fixed<3,96>>,Fixed<2,64>>>{
const EPSILON:Fixed<2,64>=Fixed::from_bits(Fixed::<2,64>::ONE.to_bits().shr(10));
let det=c0.normal.dot(c0.velocity);
if det.abs()<EPSILON{
if det.abs()==Fixed::ZERO{
return None;
}
let d0=c0.normal.dot(c0.position);
Some(c0.normal*d0/det)
}
fn solve2(c0:&Contact,c1:&Contact)->Option<Ratio<Vector3<Fixed<5,160>>,Fixed<4,128>>>{
const EPSILON:Fixed<4,128>=Fixed::from_bits(Fixed::<4,128>::ONE.to_bits().shr(10));
let u0_u1=c0.velocity.cross(c1.velocity);
let n0_n1=c0.normal.cross(c1.normal);
let det=u0_u1.dot(n0_n1);
if det.abs()<EPSILON{
if det.abs()==Fixed::ZERO{
return None;
}
let d0=c0.normal.dot(c0.position);
@@ -60,10 +58,9 @@ fn solve2(c0:&Contact,c1:&Contact)->Option<Ratio<Vector3<Fixed<5,160>>,Fixed<4,1
Some((c1.normal.cross(u0_u1)*d0+u0_u1.cross(c0.normal)*d1)/det)
}
fn solve3(c0:&Contact,c1:&Contact,c2:&Contact)->Option<Ratio<Vector3<Fixed<4,128>>,Fixed<3,96>>>{
const EPSILON:Fixed<3,96>=Fixed::from_bits(Fixed::<3,96>::ONE.to_bits().shr(10));
let n0_n1=c0.normal.cross(c1.normal);
let det=c2.normal.dot(n0_n1);
if det.abs()<EPSILON{
if det.abs()==Fixed::ZERO{
return None;
}
let d0=c0.normal.dot(c0.position);
@@ -149,7 +146,7 @@ fn is_space_enclosed_4(
}
const fn get_push_ray_0(point:Planar64Vec3)->Ray{
Ray{origin:point,direction:vec3::ZERO}
Ray{origin:point,direction:vec3::zero()}
}
fn get_push_ray_1(point:Planar64Vec3,c0:&Contact)->Option<Ray>{
//wrap for speed
@@ -192,7 +189,7 @@ fn get_push_ray_3(point:Planar64Vec3,c0:&Contact,c1:&Contact,c2:&Contact)->Optio
const fn get_best_push_ray_and_conts_0<'a>(point:Planar64Vec3)->(Ray,Conts<'a>){
(get_push_ray_0(point),Conts::new_const())
}
fn get_best_push_ray_and_conts_1(point:Planar64Vec3,c0:&Contact)->Option<(Ray,Conts)>{
fn get_best_push_ray_and_conts_1(point:Planar64Vec3,c0:&Contact)->Option<(Ray,Conts<'_>)>{
get_push_ray_1(point,c0)
.map(|ray|(ray,Conts::from_iter([c0])))
}
@@ -321,13 +318,13 @@ mod tests{
fn test_push_solve(){
let contacts=vec![
Contact{
position:vec3::ZERO,
position:vec3::zero(),
velocity:vec3::Y,
normal:vec3::Y,
}
];
assert_eq!(
vec3::ZERO,
vec3::zero(),
push_solve(&contacts,vec3::NEG_Y)
);
}

View File

@@ -10,3 +10,6 @@ strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
strafesnet_physics = { path = "../physics", registry = "strafesnet" }
strafesnet_settings = { path = "../settings", registry = "strafesnet" }
strafesnet_snf = { path = "../../lib/snf", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -31,7 +31,7 @@ pub enum SessionInputInstruction{
Mouse(glam::IVec2),
SetControl(strafesnet_common::physics::SetControlInstruction),
Mode(ImplicitModeInstruction),
Misc(strafesnet_common::physics::MiscInstruction),
Misc(MiscInstruction),
}
/// Implicit mode instruction are fed separately to session.
/// Session generates the explicit mode instructions interlaced with a SetSensitivity instruction
@@ -152,10 +152,10 @@ enum ViewState{
pub struct Session{
directories:Directories,
user_settings:UserSettings,
mouse_interpolator:crate::mouse_interpolator::MouseInterpolator,
mouse_interpolator:MouseInterpolator,
view_state:ViewState,
//gui:GuiState
geometry_shared:physics::PhysicsData,
geometry_shared:PhysicsData,
simulation:Simulation,
// below fields not included in lite session
recording:Recording,
@@ -172,7 +172,7 @@ impl Session{
user_settings,
directories,
mouse_interpolator:MouseInterpolator::new(),
geometry_shared:Default::default(),
geometry_shared:PhysicsData::empty(),
simulation,
view_state:ViewState::Play,
recording:Default::default(),
@@ -184,7 +184,7 @@ impl Session{
}
fn change_map(&mut self,map:&strafesnet_common::map::CompleteMap){
self.simulation.physics.clear();
self.geometry_shared.generate_models(map);
self.geometry_shared=PhysicsData::new(map);
}
pub fn get_frame_state(&self,time:SessionTime)->Option<FrameState>{
match &self.view_state{

View File

@@ -8,3 +8,6 @@ configparser = "3.0.2"
directories = "6.0.0"
glam = "0.30.0"
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
[lints]
workspace = true

1
integration-testing/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/test_files

View File

@@ -4,6 +4,12 @@ version = "0.1.0"
edition = "2024"
[dependencies]
glam = "0.30.0"
strafesnet_common = { path = "../lib/common", registry = "strafesnet" }
strafesnet_physics = { path = "../engine/physics", registry = "strafesnet" }
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet" }
# this is just for the primitive constructor
strafesnet_rbx_loader = { path = "../lib/rbx_loader", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -0,0 +1,28 @@
#[expect(dead_code)]
#[derive(Debug)]
pub enum ReplayError{
IO(std::io::Error),
SNF(strafesnet_snf::Error),
SNFM(strafesnet_snf::map::Error),
SNFB(strafesnet_snf::bot::Error),
}
impl From<std::io::Error> for ReplayError{
fn from(value:std::io::Error)->Self{
Self::IO(value)
}
}
impl From<strafesnet_snf::Error> for ReplayError{
fn from(value:strafesnet_snf::Error)->Self{
Self::SNF(value)
}
}
impl From<strafesnet_snf::map::Error> for ReplayError{
fn from(value:strafesnet_snf::map::Error)->Self{
Self::SNFM(value)
}
}
impl From<strafesnet_snf::bot::Error> for ReplayError{
fn from(value:strafesnet_snf::bot::Error)->Self{
Self::SNFB(value)
}
}

View File

@@ -1,7 +1,15 @@
use std::io::Cursor;
use std::path::Path;
mod error;
mod util;
#[cfg(test)]
mod tests;
#[cfg(test)]
mod test_scenes;
use std::time::Instant;
use error::ReplayError;
use util::read_entire_file;
use strafesnet_physics::physics::{PhysicsData,PhysicsState,PhysicsContext};
fn main(){
@@ -13,40 +21,6 @@ fn main(){
}
}
#[allow(unused)]
#[derive(Debug)]
enum ReplayError{
IO(std::io::Error),
SNF(strafesnet_snf::Error),
SNFM(strafesnet_snf::map::Error),
SNFB(strafesnet_snf::bot::Error),
}
impl From<std::io::Error> for ReplayError{
fn from(value:std::io::Error)->Self{
Self::IO(value)
}
}
impl From<strafesnet_snf::Error> for ReplayError{
fn from(value:strafesnet_snf::Error)->Self{
Self::SNF(value)
}
}
impl From<strafesnet_snf::map::Error> for ReplayError{
fn from(value:strafesnet_snf::map::Error)->Self{
Self::SNFM(value)
}
}
impl From<strafesnet_snf::bot::Error> for ReplayError{
fn from(value:strafesnet_snf::bot::Error)->Self{
Self::SNFB(value)
}
}
fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
let data=std::fs::read(path)?;
Ok(Cursor::new(data))
}
fn run_replay()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("../tools/bhop_maps/5692113331.snfm")?;
@@ -57,9 +31,8 @@ fn run_replay()->Result<(),ReplayError>{
let bot=strafesnet_snf::read_bot(data)?.read_all()?;
// create recording
let mut physics_data=PhysicsData::default();
println!("generating models..");
physics_data.generate_models(&map);
let physics_data=PhysicsData::new(&map);
println!("simulating...");
let mut physics=PhysicsState::default();
for ins in bot.instructions{
@@ -167,9 +140,8 @@ fn test_determinism()->Result<(),ReplayError>{
let data=read_entire_file("../tools/bhop_maps/5692113331.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
let mut physics_data=PhysicsData::default();
println!("generating models..");
physics_data.generate_models(&map);
let physics_data=PhysicsData::new(&map);
let (send,recv)=std::sync::mpsc::channel();

View File

@@ -0,0 +1,117 @@
use strafesnet_physics::physics::{InternalInstruction,PhysicsData,PhysicsState,PhysicsContext};
use strafesnet_common::gameplay_modes::NormalizedModes;
use strafesnet_common::gameplay_attributes::{CollisionAttributes,CollisionAttributesId};
use strafesnet_common::integer::{vec3,mat3,Planar64Affine3,Time};
use strafesnet_common::model::{Mesh,Model,MeshId,ModelId,RenderConfigId};
use strafesnet_common::map::CompleteMap;
use strafesnet_rbx_loader::primitives::{unit_cube,CubeFaceDescription};
struct TestSceneBuilder{
meshes:Vec<Mesh>,
models:Vec<Model>,
}
impl TestSceneBuilder{
fn new()->Self{
Self{
meshes:Vec::new(),
models:Vec::new(),
}
}
fn push_mesh(&mut self,mesh:Mesh)->MeshId{
let mesh_id=self.meshes.len();
self.meshes.push(mesh);
MeshId::new(mesh_id as u32)
}
fn push_mesh_instance(&mut self,mesh:MeshId,transform:Planar64Affine3)->ModelId{
let model=Model{
mesh,
attributes:CollisionAttributesId::new(0),
color:glam::Vec4::ONE,
transform,
};
let model_id=self.models.len();
self.models.push(model);
ModelId::new(model_id as u32)
}
fn build(self)->PhysicsData{
let modes=NormalizedModes::new(Vec::new());
let attributes=vec![CollisionAttributes::contact_default()];
let meshes=self.meshes;
let models=self.models;
let textures=Vec::new();
let render_configs=Vec::new();
PhysicsData::new(&CompleteMap{
modes,
attributes,
meshes,
models,
textures,
render_configs,
})
}
}
fn test_scene()->PhysicsData{
let mut builder=TestSceneBuilder::new();
let cube_face_description=CubeFaceDescription::new(Default::default(),RenderConfigId::new(0));
let mesh=builder.push_mesh(unit_cube(cube_face_description));
// place two 5x5x5 cubes.
builder.push_mesh_instance(mesh,Planar64Affine3::new(
mat3::from_diagonal(vec3::int(5,5,5)>>1),
vec3::int(0,0,0)
));
builder.push_mesh_instance(mesh,Planar64Affine3::new(
mat3::from_diagonal(vec3::int(5,5,5)>>1),
vec3::int(5,-5,0)
));
builder.build()
}
#[test]
fn simultaneous_collision(){
let physics_data=test_scene();
let body=strafesnet_physics::physics::Body::new(
(vec3::int(5+2,0,0)>>1)+vec3::int(1,1,0),
vec3::int(-1,-1,0),
vec3::int(0,0,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
physics.style_mut().gravity=vec3::zero();
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(2))
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick));
// the order that they hit does matter, but we aren't currently worrying about that.
// See multi-collision branch
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
assert!(phys_iter.next().is_none());
let body=physics.body();
assert_eq!(body.position,vec3::int(5,0,0));
assert_eq!(body.velocity,vec3::int(0,0,0));
assert_eq!(body.acceleration,vec3::int(0,0,0));
assert_eq!(body.time,Time::from_secs(1));
}
#[test]
fn bug_3(){
let physics_data=test_scene();
let body=strafesnet_physics::physics::Body::new(
(vec3::int(5+2,0,0)>>1)+vec3::int(1,2,0),
vec3::int(-1,-1,0),
vec3::int(0,0,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
physics.style_mut().gravity=vec3::zero();
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(3))
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick));
// touch side of part at 0,0,0
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
// touch top of part at 5,-5,0
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(2));
assert!(phys_iter.next().is_none());
let body=physics.body();
assert_eq!(body.position,vec3::int(5+2,0,0)>>1);
assert_eq!(body.velocity,vec3::int(0,0,0));
assert_eq!(body.acceleration,vec3::int(0,0,0));
assert_eq!(body.time,Time::from_secs(2));
}

View File

@@ -0,0 +1,76 @@
use crate::error::ReplayError;
use crate::util::read_entire_file;
use strafesnet_physics::physics::{PhysicsData,PhysicsState,PhysicsContext};
#[test]
#[ignore]
fn physics_bug_2()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("test_files/bhop_monster_jam.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
// create recording
println!("generating models..");
let physics_data=PhysicsData::new(&map);
println!("simulating...");
//teleport to bug
// body pos = Vector { array: [Fixed { bits: 554895163352 }, Fixed { bits: 1485633089990 }, Fixed { bits: 1279601007173 }] }
// after the fix it's still happening, possibly for a different reason, new position to evince:
// body pos = Vector { array: [Fixed { bits: 555690659654 }, Fixed { bits: 1490485868773 }, Fixed { bits: 1277783839382 }] }
use strafesnet_common::integer::{vec3,Time};
let body=strafesnet_physics::physics::Body::new(
vec3::raw_xyz(555690659654,1490485868773,1277783839382),
vec3::int(0,0,0),
vec3::int(0,-100,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
// wait one second to activate the bug
// hit=Some(ModelId(2262))
PhysicsContext::run_input_instruction(&mut physics,&physics_data,strafesnet_common::instruction::TimedInstruction{
time:Time::from_millis(500),
instruction:strafesnet_common::physics::Instruction::Idle,
});
Ok(())
}
#[test]
#[ignore]
fn physics_bug_3()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("../tools/bhop_maps/5692152916.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
// create recording
println!("generating models..");
let physics_data=PhysicsData::new(&map);
println!("simulating...");
//teleport to bug
use strafesnet_common::integer::{vec3,Time};
let body=strafesnet_physics::physics::Body::new(
// bhop_toc corner position after wall hits
// vec3::raw_xyz(-1401734815424,3315081280280,-2466057177493),
// vec3::raw_xyz(0,-96915585363,1265),
// vec3::raw_xyz(0,-429496729600,0),
// alternate room center position
// vec3::raw_xyz(-1129043783837,3324870327882,-2014012350212),
// vec3::raw_xyz(0,-96915585363,1265),
// vec3::raw_xyz(0,-429496729600,0),
// corner setup before wall hits
vec3::raw_xyz(-1392580080675,3325402529458,-2444727738679),
vec3::raw_xyz(-30259028820,-22950929553,-71141663007),
vec3::raw_xyz(0,-429496729600,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
// wait one second to activate the bug
PhysicsContext::run_input_instruction(&mut physics,&physics_data,strafesnet_common::instruction::TimedInstruction{
time:Time::from_millis(500),
instruction:strafesnet_common::physics::Instruction::Idle,
});
Ok(())
}

View File

@@ -0,0 +1,7 @@
use std::io::Cursor;
use std::path::Path;
pub fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
let data=std::fs::read(path)?;
Ok(Cursor::new(data))
}

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_bsp_loader"
version = "0.3.0"
version = "0.3.1"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -11,9 +11,12 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
glam = "0.30.0"
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../deferred_loader", registry = "strafesnet" }
vbsp = "0.9.1"
vbsp-entities-css = "0.6.0"
vmdl = "0.2.0"
vpk = "0.3.0"
[lints]
workspace = true

View File

@@ -7,7 +7,7 @@ use crate::{valve_transform_normal,valve_transform_dist};
#[derive(Hash,Eq,PartialEq)]
struct Face{
normal:integer::Planar64Vec3,
dot:integer::Planar64,
dot:Planar64,
}
#[derive(Debug)]
@@ -187,7 +187,7 @@ fn planes_to_faces(face_list:std::collections::HashSet<Face>)->Result<Faces,Plan
}
}
#[allow(dead_code)]
#[expect(dead_code)]
#[derive(Debug)]
pub enum BrushToMeshError{
SliceBrushSides,
@@ -210,7 +210,7 @@ pub fn faces_to_mesh(faces:Vec<Vec<integer::Planar64Vec3>>)->model::Mesh{
let color=mb.acquire_color_id(glam::Vec4::ONE);
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
// normals are ignored by physics
let normal=mb.acquire_normal_id(integer::vec3::ZERO);
let normal=mb.acquire_normal_id(integer::vec3::zero());
let polygon_list=faces.into_iter().map(|face|{
face.into_iter().map(|pos|{

View File

@@ -105,7 +105,7 @@ pub fn convert<'a>(
water:Some(attr::IntersectingWater{
viscosity:integer::Planar64::ONE,
density:integer::Planar64::ONE,
velocity:integer::vec3::ZERO,
velocity:integer::vec3::zero(),
}),
},
general:attr::GeneralAttributes::default(),
@@ -295,7 +295,7 @@ pub fn convert<'a>(
attributes,
transform:integer::Planar64Affine3::new(
integer::mat3::identity(),
integer::vec3::ZERO,
integer::vec3::zero(),
),
color:glam::Vec4::ONE,
});
@@ -347,9 +347,9 @@ pub struct PartialMap1{
modes:NormalizedModes,
}
impl PartialMap1{
pub fn add_prop_meshes<'a>(
pub fn add_prop_meshes(
self,
prop_meshes:Meshes,
prop_meshes:Meshes<model::Mesh>,
)->PartialMap2{
PartialMap2{
attributes:self.attributes,

View File

@@ -5,7 +5,6 @@ use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
use crate::{Bsp,Vpk};
#[allow(dead_code)]
#[derive(Debug)]
pub enum TextureError{
Io(std::io::Error),
@@ -32,7 +31,7 @@ impl Loader for TextureLoader{
type Error=TextureError;
type Index<'a>=Cow<'a,str>;
type Resource=Texture;
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>{
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
let file_name=format!("textures/{}.dds",index);
let mut file=std::fs::File::open(file_name)?;
let mut data=Vec::new();
@@ -41,7 +40,6 @@ impl Loader for TextureLoader{
}
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum MeshError{
Io(std::io::Error),
@@ -113,7 +111,7 @@ impl ModelLoader<'_,'_>{
}
}
}
impl<'bsp,'vpk> Loader for ModelLoader<'bsp,'vpk>{
impl Loader for ModelLoader<'_,'_>{
type Error=MeshError;
type Index<'a>=&'a str where Self:'a;
type Resource=vmdl::Model;
@@ -153,7 +151,7 @@ impl MeshLoader<'_,'_,'_,'_>{
}
}
}
impl<'str,'bsp,'vpk,'load> Loader for MeshLoader<'bsp,'vpk,'load,'str>{
impl Loader for MeshLoader<'_,'_,'_,'_>{
type Error=MeshError;
type Index<'a>=&'a str where Self:'a;
type Resource=Mesh;

View File

@@ -61,7 +61,7 @@ pub fn convert_mesh(model:vmdl::Model,deferred_loader:&mut RenderConfigDeferredL
_=>None,
}
})
}).flat_map(|[v1,v2,v3]|{
}).filter_map(|[v1,v2,v3]|{
// this should probably be a fatal error :D
let v1=model_vertices.get(v1)?;
let v2=model_vertices.get(v2)?;

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_common"
version = "0.6.0"
version = "0.7.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -17,3 +17,6 @@ linear_ops = { version = "0.1.1", path = "../linear_ops", registry = "strafesnet
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet" }
glam = "0.30.0"
id = { version = "0.1.0", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -2,7 +2,9 @@ use crate::integer::{vec3,Planar64Vec3};
#[derive(Clone)]
pub struct Aabb{
// min is inclusive
min:Planar64Vec3,
// max is not inclusive
max:Planar64Vec3,
}
@@ -43,7 +45,7 @@ impl Aabb{
}
#[inline]
pub fn contains(&self,point:Planar64Vec3)->bool{
let bvec=self.min.lt(point)&point.lt(self.max);
let bvec=self.min.le(point)&point.lt(self.max);
bvec.all()
}
#[inline]
@@ -59,11 +61,11 @@ impl Aabb{
pub fn center(&self)->Planar64Vec3{
self.min.map_zip(self.max,|(min,max)|min.midpoint(max))
}
//probably use floats for area & volume because we don't care about precision
// pub fn area_weight(&self)->f32{
// let d=self.max-self.min;
// d.x*d.y+d.y*d.z+d.z*d.x
// }
#[inline]
pub fn area_weight(&self)->fixed_wide::fixed::Fixed<2,64>{
let d=self.max-self.min;
d.x*d.y+d.y*d.z+d.z*d.x
}
// pub fn volume(&self)->f32{
// let d=self.max-self.min;
// d.x*d.y*d.z

View File

@@ -245,18 +245,19 @@ pub fn generate_bvh<T>(boxen:Vec<(T,Aabb)>)->BvhNode<T>{
fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
let n=boxen.len();
if force||n<20{
let mut aabb=Aabb::default();
let nodes=boxen.into_iter().map(|b|{
aabb.join(&b.1);
const MAX_TERMINAL_BRANCH_LEAF_NODES:usize=20;
if force||n<MAX_TERMINAL_BRANCH_LEAF_NODES{
let mut aabb_outer=Aabb::default();
let nodes=boxen.into_iter().map(|(data,aabb)|{
aabb_outer.join(&aabb);
BvhNode{
content:RecursiveContent::Leaf(b.0),
aabb:b.1,
content:RecursiveContent::Leaf(data),
aabb,
}
}).collect();
BvhNode{
content:RecursiveContent::Branch(nodes),
aabb,
aabb:aabb_outer,
}
}else{
let mut sort_x=Vec::with_capacity(n);
@@ -272,9 +273,9 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
sort_y.sort_by_key(|&(_,c)|c);
sort_z.sort_by_key(|&(_,c)|c);
let h=n/2;
let median_x=sort_x[h].1;
let median_y=sort_y[h].1;
let median_z=sort_z[h].1;
let (_,median_x)=sort_x[h];
let (_,median_y)=sort_y[h];
let (_,median_z)=sort_z[h];
//locate a run of values equal to the median
//partition point gives the first index for which the predicate evaluates to false
let first_index_eq_median_x=sort_x.partition_point(|&(_,x)|x<median_x);
@@ -313,10 +314,10 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
};
list_list[list_id].push((data,aabb));
}
let mut aabb=Aabb::default();
if list_list.len()==1{
generate_bvh_node(list_list.remove(0),true)
}else{
let mut aabb=Aabb::default();
BvhNode{
content:RecursiveContent::Branch(
list_list.into_iter().map(|b|{

View File

@@ -140,6 +140,15 @@ impl ModeId{
pub const MAIN:Self=Self(0);
pub const BONUS:Self=Self(1);
}
impl core::fmt::Display for ModeId{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->core::fmt::Result{
match self{
&Self::MAIN=>write!(f,"Main"),
&Self::BONUS=>write!(f,"Bonus"),
&Self(mode_id)=>write!(f,"Bonus{mode_id}"),
}
}
}
#[derive(Clone)]
pub struct Mode{
style:gameplay_style::StyleModifiers,

View File

@@ -34,7 +34,7 @@ pub struct StyleModifiers{
//unused
pub mass:Planar64,
}
impl std::default::Default for StyleModifiers{
impl Default for StyleModifiers{
fn default()->Self{
Self::roblox_bhop()
}
@@ -319,7 +319,7 @@ impl WalkSettings{
self.accelerate.accel.min((-gravity.y*friction).clamp_1())
}
pub fn get_walk_target_velocity(&self,control_dir:Planar64Vec3,normal:Planar64Vec3)->Planar64Vec3{
if control_dir==crate::integer::vec3::ZERO{
if control_dir==crate::integer::vec3::zero(){
return control_dir;
}
let nn=normal.length_squared();
@@ -329,13 +329,13 @@ impl WalkSettings{
let dd=d*d;
if dd<nnmm{
let cr=normal.cross(control_dir);
if cr==crate::integer::vec3::ZERO_2{
crate::integer::vec3::ZERO
if cr==crate::integer::vec3::zero(){
crate::integer::vec3::zero()
}else{
(cr.cross(normal)*self.accelerate.topspeed/((nn*(nnmm-dd)).sqrt())).divide().clamp_1()
}
}else{
crate::integer::vec3::ZERO
crate::integer::vec3::zero()
}
}
pub fn is_slope_walkable(&self,normal:Planar64Vec3,up:Planar64Vec3)->bool{
@@ -360,7 +360,7 @@ impl LadderSettings{
self.accelerate.accel
}
pub fn get_ladder_target_velocity(&self,mut control_dir:Planar64Vec3,normal:Planar64Vec3)->Planar64Vec3{
if control_dir==crate::integer::vec3::ZERO{
if control_dir==crate::integer::vec3::zero(){
return control_dir;
}
let nn=normal.length_squared();
@@ -382,13 +382,13 @@ impl LadderSettings{
//- fix the underlying issue
if dd<nnmm{
let cr=normal.cross(control_dir);
if cr==crate::integer::vec3::ZERO_2{
crate::integer::vec3::ZERO
if cr==crate::integer::vec3::zero(){
crate::integer::vec3::zero()
}else{
(cr.cross(normal)*self.accelerate.topspeed/((nn*(nnmm-dd)).sqrt())).divide().clamp_1()
}
}else{
crate::integer::vec3::ZERO
crate::integer::vec3::zero()
}
}
}

View File

@@ -34,12 +34,41 @@ pub trait InstructionFeedback<I,T>:InstructionEmitter<I,Time=T>+InstructionConsu
self.process_instruction(instruction);
}
}
#[inline]
fn into_iter(self,time_limit:T)->InstructionIter<I,T,Self>
where
Self:Sized
{
InstructionIter{
time_limit,
feedback:self,
_phantom:core::marker::PhantomData,
}
}
}
impl<I,T,X> InstructionFeedback<I,T> for X
impl<I,T,F> InstructionFeedback<I,T> for F
where
T:Copy,
X:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
F:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
{}
pub struct InstructionIter<I,T:Copy,F:InstructionFeedback<I,T>>{
time_limit:T,
feedback:F,
_phantom:core::marker::PhantomData<I>,
}
impl<I,T,F> Iterator for InstructionIter<I,T,F>
where
I:Clone,
T:Clone+Copy,
F:InstructionFeedback<I,T>,
{
type Item=TimedInstruction<I,T>;
fn next(&mut self)->Option<Self::Item>{
let instruction=self.feedback.next_instruction(self.time_limit)?;
self.feedback.process_instruction(instruction.clone());
Some(instruction)
}
}
//PROPER PRIVATE FIELDS!!!
pub struct InstructionCollector<I,T>{

View File

@@ -1,5 +1,5 @@
pub use fixed_wide::fixed::*;
pub use ratio_ops::ratio::{Ratio,Divide};
pub use ratio_ops::ratio::{Ratio,Divide,Parity};
//integer units
@@ -86,7 +86,7 @@ impl<T> std::fmt::Display for Time<T>{
write!(f,"{}s+{:09}ns",self.0/Self::ONE_SECOND.0,self.0%Self::ONE_SECOND.0)
}
}
impl<T> std::default::Default for Time<T>{
impl<T> Default for Time<T>{
fn default()->Self{
Self::raw(0)
}
@@ -126,26 +126,27 @@ impl_time_additive_assign_operator!(core::ops::AddAssign,add_assign);
impl_time_additive_assign_operator!(core::ops::SubAssign,sub_assign);
impl_time_additive_assign_operator!(core::ops::RemAssign,rem_assign);
impl<T> std::ops::Mul for Time<T>{
type Output=Ratio<fixed_wide::fixed::Fixed<2,64>,fixed_wide::fixed::Fixed<2,64>>;
type Output=Ratio<Fixed<2,64>,Fixed<2,64>>;
#[inline]
fn mul(self,rhs:Self)->Self::Output{
Ratio::new(Fixed::raw(self.0)*Fixed::raw(rhs.0),Fixed::raw_digit(1_000_000_000i64.pow(2)))
}
}
impl<T> std::ops::Div<i64> for Time<T>{
type Output=Self;
#[inline]
fn div(self,rhs:i64)->Self::Output{
Self::raw(self.0/rhs)
}
}
impl<T> std::ops::Mul<i64> for Time<T>{
type Output=Self;
#[inline]
fn mul(self,rhs:i64)->Self::Output{
Self::raw(self.0*rhs)
macro_rules! impl_time_i64_rhs_operator {
($op:ident,$method:ident)=>{
impl<T> core::ops::$op<i64> for Time<T>{
type Output=Self;
#[inline]
fn $method(self,rhs:i64)->Self::Output{
Self::raw(self.0.$method(rhs))
}
}
}
}
impl_time_i64_rhs_operator!(Div,div);
impl_time_i64_rhs_operator!(Mul,mul);
impl_time_i64_rhs_operator!(Shr,shr);
impl_time_i64_rhs_operator!(Shl,shl);
impl<T> core::ops::Mul<Time<T>> for Planar64{
type Output=Ratio<Fixed<2,64>,Planar64>;
fn mul(self,rhs:Time<T>)->Self::Output{
@@ -155,7 +156,7 @@ impl<T> core::ops::Mul<Time<T>> for Planar64{
#[cfg(test)]
mod test_time{
use super::*;
type Time=super::AbsoluteTime;
type Time=AbsoluteTime;
#[test]
fn time_from_planar64(){
let a:Time=Planar64::from(1).into();
@@ -551,7 +552,7 @@ impl TryFrom<[f32;3]> for Unit32Vec3{
}
*/
pub type Planar64TryFromFloatError=fixed_wide::fixed::FixedFromFloatError;
pub type Planar64TryFromFloatError=FixedFromFloatError;
pub type Planar64=fixed_wide::types::I32F32;
pub type Planar64Vec3=linear_ops::types::Vector3<Planar64>;
pub type Planar64Mat3=linear_ops::types::Matrix3<Planar64>;
@@ -560,12 +561,6 @@ pub mod vec3{
pub use linear_ops::types::Vector3;
pub const MIN:Planar64Vec3=Planar64Vec3::new([Planar64::MIN;3]);
pub const MAX:Planar64Vec3=Planar64Vec3::new([Planar64::MAX;3]);
pub const ZERO:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO;3]);
pub const ZERO_2:linear_ops::types::Vector3<Fixed::<2,64>>=linear_ops::types::Vector3::new([Fixed::<2,64>::ZERO;3]);
pub const ZERO_3:linear_ops::types::Vector3<Fixed::<3,96>>=linear_ops::types::Vector3::new([Fixed::<3,96>::ZERO;3]);
pub const ZERO_4:linear_ops::types::Vector3<Fixed::<4,128>>=linear_ops::types::Vector3::new([Fixed::<4,128>::ZERO;3]);
pub const ZERO_5:linear_ops::types::Vector3<Fixed::<5,160>>=linear_ops::types::Vector3::new([Fixed::<5,160>::ZERO;3]);
pub const ZERO_6:linear_ops::types::Vector3<Fixed::<6,192>>=linear_ops::types::Vector3::new([Fixed::<6,192>::ZERO;3]);
pub const X:Planar64Vec3=Planar64Vec3::new([Planar64::ONE,Planar64::ZERO,Planar64::ZERO]);
pub const Y:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::ONE,Planar64::ZERO]);
pub const Z:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::ZERO,Planar64::ONE]);
@@ -574,6 +569,10 @@ pub mod vec3{
pub const NEG_Y:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::NEG_ONE,Planar64::ZERO]);
pub const NEG_Z:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::ZERO,Planar64::NEG_ONE]);
pub const NEG_ONE:Planar64Vec3=Planar64Vec3::new([Planar64::NEG_ONE,Planar64::NEG_ONE,Planar64::NEG_ONE]);
// TODO: use #![feature(generic_const_items)] when stabilized https://github.com/rust-lang/rust/issues/113521
pub const fn zero<const N:usize,const F:usize>()->Vector3<Fixed<N,F>>{
Vector3::new([Fixed::ZERO;3])
}
#[inline]
pub const fn int(x:i32,y:i32,z:i32)->Planar64Vec3{
Planar64Vec3::new([Planar64::raw((x as i64)<<32),Planar64::raw((y as i64)<<32),Planar64::raw((z as i64)<<32)])
@@ -656,13 +655,13 @@ pub mod mat3{
}
}
#[derive(Clone,Copy,Default,Hash,Eq,PartialEq)]
#[derive(Clone,Copy,Debug,Default,Hash,Eq,PartialEq)]
pub struct Planar64Affine3{
pub matrix3:Planar64Mat3,//includes scale above 1
pub translation:Planar64Vec3,
}
impl Planar64Affine3{
pub const IDENTITY:Self=Self::new(mat3::identity(),vec3::ZERO);
pub const IDENTITY:Self=Self::new(mat3::identity(),vec3::zero());
#[inline]
pub const fn new(matrix3:Planar64Mat3,translation:Planar64Vec3)->Self{
Self{matrix3,translation}

View File

@@ -1,6 +1,6 @@
use std::collections::HashMap;
use crate::integer::{Planar64Vec3,Planar64Affine3};
use crate::integer::{Planar64,Planar64Vec3,Planar64Affine3};
use crate::gameplay_attributes;
pub type TextureCoordinate=glam::Vec2;
@@ -168,6 +168,11 @@ impl MeshBuilder{
}
}
pub fn acquire_pos_id(&mut self,pos:Planar64Vec3)->PositionId{
// Truncate the 16 most precise bits of the vertex positions.
// This allows the normal vectors to exactly represent the face.
// Remove this in Mesh V2
const MASK:Planar64=Planar64::raw(!((1<<16)-1));
let pos=pos.map(|c|c&MASK);
*self.pos_id_from.entry(pos).or_insert_with(||{
let pos_id=PositionId::new(self.unique_pos.len() as u32);
self.unique_pos.push(pos);

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_deferred_loader"
version = "0.5.0"
version = "0.5.1"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -10,4 +10,7 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -2,7 +2,7 @@ use std::collections::HashMap;
use crate::loader::Loader;
use crate::mesh::Meshes;
use crate::texture::{RenderConfigs,Texture};
use strafesnet_common::model::{Mesh,MeshId,RenderConfig,RenderConfigId,TextureId};
use strafesnet_common::model::{MeshId,RenderConfig,RenderConfigId,TextureId};
#[derive(Clone,Copy,Debug)]
pub enum LoadFailureMode{
@@ -93,7 +93,7 @@ impl<H:core::hash::Hash+Eq> MeshDeferredLoader<H>{
pub fn into_indices(self)->impl Iterator<Item=H>{
self.mesh_id_from_asset_id.into_keys()
}
pub fn into_meshes<'a,L:Loader<Resource=Mesh,Index<'a>=H>+'a>(self,loader:&mut L,failure_mode:LoadFailureMode)->Result<Meshes,L::Error>{
pub fn into_meshes<'a,M:Clone,L:Loader<Resource=M,Index<'a>=H>+'a>(self,loader:&mut L,failure_mode:LoadFailureMode)->Result<Meshes<M>,L::Error>{
let mut mesh_list=vec![None;self.mesh_id_from_asset_id.len()];
for (index,mesh_id) in self.mesh_id_from_asset_id{
let resource_result=loader.load(index);

View File

@@ -4,5 +4,5 @@ pub trait Loader{
type Error:Error;
type Index<'a> where Self:'a;
type Resource;
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>;
}

View File

@@ -1,15 +1,15 @@
use strafesnet_common::model::{Mesh,MeshId};
use strafesnet_common::model::MeshId;
pub struct Meshes{
meshes:Vec<Option<Mesh>>,
pub struct Meshes<M>{
meshes:Vec<Option<M>>,
}
impl Meshes{
pub(crate) const fn new(meshes:Vec<Option<Mesh>>)->Self{
impl<M> Meshes<M>{
pub(crate) const fn new(meshes:Vec<Option<M>>)->Self{
Self{
meshes,
}
}
pub fn consume(self)->impl Iterator<Item=(MeshId,Mesh)>{
pub fn consume(self)->impl Iterator<Item=(MeshId,M)>{
self.meshes.into_iter().enumerate().filter_map(|(mesh_id,maybe_mesh)|
maybe_mesh.map(|mesh|(MeshId::new(mesh_id as u32),mesh))
)

View File

@@ -1,6 +1,6 @@
[package]
name = "fixed_wide"
version = "0.2.0"
version = "0.2.1"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -18,3 +18,6 @@ bnum = "0.13.0"
arrayvec = { version = "0.7.6", optional = true }
paste = "1.0.15"
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet", optional = true }
[lints]
workspace = true

View File

@@ -1,6 +1,8 @@
use bnum::{BInt,cast::As};
#[derive(Clone,Copy,Debug,Default,Hash,PartialEq,Eq,PartialOrd,Ord)]
const BNUM_DIGIT_WIDTH:usize=64;
#[derive(Clone,Copy,Default,Hash,PartialEq,Eq,PartialOrd,Ord)]
/// A Fixed point number for which multiply operations widen the bits in the output. (when the wide-mul feature is enabled)
/// N is the number of u64s to use
/// F is the number of fractional bits (always N*32 lol)
@@ -68,6 +70,34 @@ impl<const N:usize,const F:usize> Fixed<N,F>{
pub const fn midpoint(self,other:Self)->Self{
Self::from_bits(self.bits.midpoint(other.bits))
}
#[inline]
pub const fn min(self,other:Self)->Self{
Self::from_bits(self.bits.min(other.bits))
}
#[inline]
pub const fn max(self,other:Self)->Self{
Self::from_bits(self.bits.max(other.bits))
}
/// return the result of self*sign(other)
#[inline]
pub const fn mul_sign<const N1:usize,const F1:usize>(self,other:Fixed<N1,F1>)->Self{
if other.is_negative(){
Self::from_bits(self.bits.neg())
}else if other.is_zero(){
Fixed::ZERO
}else{
self
}
}
/// return the result of self/sign(other) (divide by zero does not change the sign)
#[inline]
pub const fn div_sign<const N1:usize,const F1:usize>(self,other:Fixed<N1,F1>)->Self{
if other.is_negative(){
Self::from_bits(self.bits.neg())
}else{
self
}
}
}
impl<const F:usize> Fixed<1,F>{
/// My old code called this function everywhere so let's provide it
@@ -99,28 +129,6 @@ impl_from!(
i8,i16,i32,i64,i128,isize
);
impl<const N:usize,const F:usize,T> PartialEq<T> for Fixed<N,F>
where
T:Copy,
BInt::<N>:From<T>,
{
#[inline]
fn eq(&self,&other:&T)->bool{
self.bits.eq(&other.into())
}
}
impl<const N:usize,const F:usize,T> PartialOrd<T> for Fixed<N,F>
where
T:Copy,
BInt::<N>:From<T>,
{
#[inline]
fn partial_cmp(&self,&other:&T)->Option<std::cmp::Ordering>{
self.bits.partial_cmp(&other.into())
}
}
impl<const N:usize,const F:usize> std::ops::Neg for Fixed<N,F>{
type Output=Self;
#[inline]
@@ -233,6 +241,11 @@ impl FixedFromFloatError{
}
}
}
impl core::fmt::Display for FixedFromFloatError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
macro_rules! impl_from_float {
( $decode:ident, $input: ty, $mantissa_bits:expr ) => {
impl<const N:usize,const F:usize> TryFrom<$input> for Fixed<N,F>{
@@ -281,6 +294,23 @@ macro_rules! impl_from_float {
impl_from_float!(integer_decode_f32,f32,24);
impl_from_float!(integer_decode_f64,f64,53);
impl<const N:usize,const F:usize> core::fmt::Debug for Fixed<N,F>{
#[inline]
fn fmt(&self,f:&mut core::fmt::Formatter)->Result<(),core::fmt::Error>{
let integral=self.as_bits().unsigned_abs()>>F;
let fractional=self.as_bits().unsigned_abs()&((bnum::BUint::<N>::ONE<<F)-bnum::BUint::<N>::ONE);
let leading_zeroes=(fractional.leading_zeros() as usize).saturating_sub(N*BNUM_DIGIT_WIDTH-F)>>2;
if self.is_negative(){
core::write!(f,"-")?;
}
if fractional.is_zero(){
core::write!(f,"{integral:x}.{}","0".repeat(leading_zeroes))
}else{
core::write!(f,"{integral:x}.{}{fractional:x}","0".repeat(leading_zeroes))
}
}
}
impl<const N:usize,const F:usize> core::fmt::Display for Fixed<N,F>{
#[inline]
fn fmt(&self,f:&mut core::fmt::Formatter)->Result<(),core::fmt::Error>{
@@ -304,16 +334,6 @@ macro_rules! impl_additive_operator {
self.$method(other)
}
}
impl<const N:usize,const F:usize,U> core::ops::$trait<U> for $struct<N,F>
where
BInt::<N>:From<U>,
{
type Output = $output;
#[inline]
fn $method(self, other: U) -> Self::Output {
Self::from_bits(self.bits.$method(BInt::<N>::from(other).shl(F as u32)))
}
}
};
}
macro_rules! impl_additive_assign_operator {
@@ -324,15 +344,6 @@ macro_rules! impl_additive_assign_operator {
self.bits.$method(other.bits);
}
}
impl<const N:usize,const F:usize,U> core::ops::$trait<U> for $struct<N,F>
where
BInt::<N>:From<U>,
{
#[inline]
fn $method(&mut self, other: U) {
self.bits.$method(BInt::<N>::from(other).shl(F as u32));
}
}
};
}

View File

@@ -229,3 +229,16 @@ fn test_zeroes_deferred_division(){
])
);
}
#[test]
fn test_debug(){
assert_eq!(format!("{:?}",I32F32::EPSILON),"0.00000001");
assert_eq!(format!("{:?}",I32F32::ONE),"1.00000000");
assert_eq!(format!("{:?}",I32F32::TWO),"2.00000000");
assert_eq!(format!("{:?}",I32F32::MAX),"7fffffff.ffffffff");
assert_eq!(format!("{:?}",I32F32::try_from(core::f64::consts::PI).unwrap()),"3.243f6a88");
assert_eq!(format!("{:?}",I32F32::NEG_EPSILON),"-0.00000001");
assert_eq!(format!("{:?}",I32F32::NEG_ONE),"-1.00000000");
assert_eq!(format!("{:?}",I32F32::NEG_TWO),"-2.00000000");
assert_eq!(format!("{:?}",I32F32::MIN),"-80000000.00000000");
}

View File

@@ -20,3 +20,6 @@ paste = { version = "1.0.15", optional = true }
[dev-dependencies]
fixed_wide = { path = "../fixed_wide", registry = "strafesnet", features = ["wide-mul"] }
[lints]
workspace = true

View File

@@ -205,7 +205,8 @@ macro_rules! impl_matrix_named_fields_shape {
#[inline]
fn deref(&self)->&Self::Target{
// This cast is valid because Matrix has #[repr(transparent)]
let ptr=&self.array as *const [[T;$size_inner];$size_outer] as *const Self::Target;
let ptr:*const [[T;$size_inner];$size_outer]=&self.array;
let ptr=ptr as *const Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&*ptr}
}
@@ -214,7 +215,8 @@ macro_rules! impl_matrix_named_fields_shape {
#[inline]
fn deref_mut(&mut self)->&mut Self::Target{
// This cast is valid because Matrix has #[repr(transparent)]
let ptr=&mut self.array as *mut [[T;$size_inner];$size_outer] as *mut Self::Target;
let ptr:*mut [[T;$size_inner];$size_outer]=&mut self.array;
let ptr=ptr as *mut Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&mut*ptr}
}

View File

@@ -331,7 +331,8 @@ macro_rules! impl_vector_named_fields {
#[inline]
fn deref(&self)->&Self::Target{
// This cast is valid because Vector has #[repr(transparent)]
let ptr=&self.array as *const [T;$size] as *const Self::Target;
let ptr:*const [T;$size]=&self.array;
let ptr=ptr as *const Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&*ptr}
}
@@ -340,7 +341,8 @@ macro_rules! impl_vector_named_fields {
#[inline]
fn deref_mut(&mut self)->&mut Self::Target{
// This cast is valid because Vector has #[repr(transparent)]
let ptr=&mut self.array as *mut [T;$size] as *mut Self::Target;
let ptr:*mut [T;$size]=&mut self.array;
let ptr=ptr as *mut Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&mut*ptr}
}

View File

@@ -8,3 +8,6 @@ description = "Ratio operations using trait bounds for avoiding division like th
authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_rbx_loader"
version = "0.6.0"
version = "0.7.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -12,13 +12,17 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
bytemuck = "1.14.3"
glam = "0.30.0"
lazy-regex = "3.1.0"
rbx_binary = { version = "1.1.0-sn4", registry = "strafesnet" }
rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet", features = ["instance-userdata"] }
rbx_mesh = "0.3.1"
regex = { version = "1.11.3", default-features = false }
rbx_binary = { version = "1.0.1-sn5", registry = "strafesnet" }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
rbx_mesh = "0.5.0"
rbx_reflection = "5.0.0"
rbx_reflection_database = "1.0.0"
rbx_xml = { version = "1.1.0-sn4", registry = "strafesnet" }
rbx_xml = { version = "1.0.1-sn5", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", path = "../rbxassetid", registry = "strafesnet" }
roblox_emulator = { version = "0.5.0", path = "../roblox_emulator", default-features = false, registry = "strafesnet" }
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }
roblox_emulator = { version = "0.5.1", path = "../roblox_emulator", default-features = false, registry = "strafesnet" }
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../deferred_loader", registry = "strafesnet" }
[lints]
workspace = true

245
lib/rbx_loader/src/error.rs Normal file
View File

@@ -0,0 +1,245 @@
use std::collections::HashSet;
use std::num::ParseIntError;
use strafesnet_common::gameplay_modes::{StageId,ModeId};
use strafesnet_common::integer::{FixedFromFloatError,Planar64TryFromFloatError};
/// A collection of errors which can be ignored at your peril
#[derive(Debug,Default)]
pub struct RecoverableErrors{
/// A basepart has an invalid / missing property.
pub basepart_property:Vec<InstancePath>,
/// A part has an unconvertable CFrame.
pub basepart_cframe:Vec<CFrameError>,
/// A part has an unconvertable Velocity.
pub basepart_velocity:Vec<Planar64ConvertError>,
/// A part has an invalid / missing property.
pub part_property:Vec<InstancePath>,
/// A part has an invalid shape.
pub part_shape:Vec<ShapeError>,
/// A meshpart has an invalid / missing property.
pub meshpart_property:Vec<InstancePath>,
/// A meshpart has no mesh.
pub meshpart_content:Vec<InstancePath>,
/// A basepart has an unsupported subclass.
pub unsupported_class:HashSet<String>,
/// A decal has an invalid / missing property.
pub decal_property:Vec<InstancePath>,
/// A decal has an invalid normal_id.
pub normal_id:Vec<NormalIdError>,
/// A texture has an invalid / missing property.
pub texture_property:Vec<InstancePath>,
/// A mode_id failed to parse.
pub mode_id_parse_int:Vec<ParseIntContext>,
/// There is a duplicate mode.
pub duplicate_mode:HashSet<ModeId>,
/// A mode_id failed to parse.
pub stage_id_parse_int:Vec<ParseIntContext>,
/// A Stage was duplicated leading to undefined behaviour.
pub duplicate_stage:HashSet<DuplicateStageError>,
/// A WormholeOut id failed to parse.
pub wormhole_out_id_parse_int:Vec<ParseIntContext>,
/// A WormholeOut was duplicated leading to undefined behaviour.
pub duplicate_wormhole_out:HashSet<u32>,
/// A WormholeIn id failed to parse.
pub wormhole_in_id_parse_int:Vec<ParseIntContext>,
/// A jump limit failed to parse.
pub jump_limit_parse_int:Vec<ParseIntContext>,
}
impl RecoverableErrors{
pub fn count(&self)->usize{
self.basepart_property.len()+
self.basepart_cframe.len()+
self.basepart_velocity.len()+
self.part_property.len()+
self.part_shape.len()+
self.meshpart_property.len()+
self.meshpart_content.len()+
self.unsupported_class.len()+
self.decal_property.len()+
self.normal_id.len()+
self.texture_property.len()+
self.mode_id_parse_int.len()+
self.duplicate_mode.len()+
self.stage_id_parse_int.len()+
self.duplicate_stage.len()+
self.wormhole_out_id_parse_int.len()+
self.duplicate_wormhole_out.len()+
self.wormhole_in_id_parse_int.len()+
self.jump_limit_parse_int.len()
}
}
fn write_comma_separated<T>(
f:&mut std::fmt::Formatter<'_>,
mut it:impl Iterator<Item=T>,
custom_write:impl Fn(&mut std::fmt::Formatter<'_>,T)->std::fmt::Result
)->std::fmt::Result{
if let Some(t)=it.next(){
custom_write(f,t)?;
for t in it{
write!(f,", ")?;
custom_write(f,t)?;
}
}
Ok(())
}
macro_rules! write_instance_path_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal,$problem:literal)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} {}: ",$problem)?;
write_comma_separated($f,$self.$field.iter(),|f,InstancePath(path)|
write!(f,"{path}")
)?;
writeln!($f)?;
}
};
}
macro_rules! write_duplicate_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} duplicates: ")?;
write_comma_separated($f,$self.$field.iter(),|f,id|
write!(f,"{id}")
)?;
writeln!($f)?;
}
};
}
macro_rules! write_bespoke_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal,$problem:literal,$path_field:ident,$error_field:ident)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} {}: ",$problem)?;
write_comma_separated($f,$self.$field.iter(),|f,context|
write!(f,"{} ({})",context.$path_field,context.$error_field)
)?;
writeln!($f)?;
}
};
}
impl core::fmt::Display for RecoverableErrors{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write_instance_path_error!(f,self,basepart_property,"BasePart is","BaseParts are","missing a property");
write_bespoke_error!(f,self,basepart_cframe,"BasePart","BaseParts","CFrame float convert failed",path,error);
write_bespoke_error!(f,self,basepart_velocity,"BasePart","BaseParts","Velocity float convert failed",path,error);
write_instance_path_error!(f,self,part_property,"Part is","Parts are","missing a property");
write_bespoke_error!(f,self,part_shape,"Part","Parts","Shape is invalid",path,shape);
write_instance_path_error!(f,self,meshpart_property,"MeshPart is","MeshParts are","missing a property");
write_instance_path_error!(f,self,meshpart_content,"MeshPart has","MeshParts have","no mesh");
{
let len=self.unsupported_class.len();
if len!=0{
let plural=if len==1{"Class is"}else{"Classes are"};
write!(f,"The following {plural} not supported: ")?;
write_comma_separated(f,self.unsupported_class.iter(),|f,classname|write!(f,"{classname}"))?;
writeln!(f)?;
}
}
write_instance_path_error!(f,self,decal_property,"Decal is","Decals are","missing a property");
write_bespoke_error!(f,self,normal_id,"Decal","Decals","NormalId is invalid",path,normal_id);
write_instance_path_error!(f,self,texture_property,"Texture is","Textures are","missing a property");
write_bespoke_error!(f,self,mode_id_parse_int,"ModeId","ModeIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_mode,"ModeId has","ModeIds have");
write_bespoke_error!(f,self,stage_id_parse_int,"StageId","StageIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_stage,"StageId has","StageIds have");
write_bespoke_error!(f,self,wormhole_out_id_parse_int,"WormholeOutId","WormholeOutIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_wormhole_out,"WormholeOutId has","WormholeOutIds have");
write_bespoke_error!(f,self,wormhole_in_id_parse_int,"WormholeInId","WormholeInIds","failed to parse",context,error);
write_bespoke_error!(f,self,jump_limit_parse_int,"jump limit","jump limits","failed to parse",context,error);
Ok(())
}
}
/// A Decal was missing required properties
#[derive(Debug)]
pub struct InstancePath(pub String);
impl core::fmt::Display for InstancePath{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
self.0.fmt(f)
}
}
impl InstancePath{
pub fn new(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)->InstancePath{
let mut names:Vec<_>=core::iter::successors(
Some(instance),
|i|dom.get_by_ref(i.parent())
).map(
|i|i.name.as_str()
).collect();
// discard the name of the root object
names.pop();
names.reverse();
InstancePath(names.join("."))
}
}
#[derive(Debug)]
pub struct ParseIntContext{
pub context:String,
pub error:ParseIntError,
}
impl ParseIntContext{
pub fn parse<T:core::str::FromStr<Err=ParseIntError>>(input:&str)->Result<T,Self>{
input.parse().map_err(|error|ParseIntContext{
context:input.to_owned(),
error,
})
}
}
#[derive(Debug)]
pub struct NormalIdError{
pub path:InstancePath,
pub normal_id:u32,
}
#[derive(Debug)]
pub struct ShapeError{
pub path:InstancePath,
pub shape:u32,
}
#[derive(Debug)]
pub enum CFrameErrorType{
ZeroDeterminant,
Convert(FixedFromFloatError),
}
impl core::fmt::Display for CFrameErrorType{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
#[derive(Debug)]
pub struct CFrameError{
pub path:InstancePath,
pub error:CFrameErrorType,
}
#[derive(Debug)]
pub struct Planar64ConvertError{
pub path:InstancePath,
pub error:Planar64TryFromFloatError,
}
#[derive(Debug,Hash,Eq,PartialEq)]
pub struct DuplicateStageError{
pub mode_id:ModeId,
pub stage_id:StageId,
}
impl core::fmt::Display for DuplicateStageError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{}-Spawn{}",self.mode_id,self.stage_id.get())
}
}

View File

@@ -1,13 +1,18 @@
use std::io::Read;
use rbx_dom_weak::WeakDom;
use roblox_emulator::context::Context;
use strafesnet_common::map::CompleteMap;
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
pub use error::RecoverableErrors;
pub use roblox_emulator::runner::Error as RunnerError;
mod rbx;
mod mesh;
mod error;
mod union;
pub mod loader;
mod primitives;
pub mod primitives;
pub mod data{
pub struct RobloxMeshBytes(Vec<u8>);
@@ -28,7 +33,7 @@ impl Model{
fn new(dom:WeakDom)->Self{
Self{dom}
}
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
to_snf(self,failure_mode)
}
}
@@ -48,18 +53,20 @@ impl Place{
context,
})
}
pub fn run_scripts(&mut self){
pub fn run_scripts(&mut self)->Result<Vec<RunnerError>,RunnerError>{
let Place{context}=self;
let runner=roblox_emulator::runner::Runner::new().unwrap();
let runner=roblox_emulator::runner::Runner::new()?;
let scripts=context.scripts();
let runnable=runner.runnable_context(context).unwrap();
let runnable=runner.runnable_context(context)?;
let mut errors=Vec::new();
for script in scripts{
if let Err(e)=runnable.run_script(script){
println!("runner error: {e}");
errors.push(e);
}
}
Ok(errors)
}
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
to_snf(self,failure_mode)
}
}
@@ -123,7 +130,7 @@ impl From<loader::MeshError> for LoadError{
}
}
fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
let dom=dom.as_ref();
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
@@ -143,7 +150,5 @@ fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<strafesn
let mut texture_loader=loader::TextureLoader::new();
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,failure_mode).map_err(LoadError::Texture)?;
let map=map_step2.add_render_configs_and_textures(render_configs);
Ok(map)
Ok(map_step2.add_render_configs_and_textures(render_configs))
}

View File

@@ -18,7 +18,6 @@ fn read_entire_file(path:impl AsRef<std::path::Path>)->Result<Vec<u8>,std::io::E
Ok(data)
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum TextureError{
Io(std::io::Error),
@@ -51,7 +50,7 @@ impl Loader for TextureLoader{
type Error=TextureError;
type Index<'a>=&'a str;
type Resource=Texture;
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>{
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
let RobloxAssetId(asset_id)=index.parse()?;
let file_name=format!("textures/{}.dds",asset_id);
let data=read_entire_file(file_name)?;
@@ -59,7 +58,6 @@ impl Loader for TextureLoader{
}
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum MeshError{
Io(std::io::Error),
@@ -118,7 +116,7 @@ pub struct MeshIndex<'a>{
content:&'a str,
}
impl MeshIndex<'_>{
pub fn file_mesh(content:&str)->MeshIndex{
pub fn file_mesh(content:&str)->MeshIndex<'_>{
MeshIndex{
mesh_type:MeshType::FileMesh,
content,
@@ -143,6 +141,12 @@ impl MeshIndex<'_>{
}
}
#[derive(Clone)]
pub struct MeshWithSize{
pub(crate) mesh:Mesh,
pub(crate) size:strafesnet_common::integer::Planar64Vec3,
}
pub struct MeshLoader;
impl MeshLoader{
pub fn new()->Self{
@@ -152,8 +156,8 @@ impl MeshLoader{
impl Loader for MeshLoader{
type Error=MeshError;
type Index<'a>=MeshIndex<'a>;
type Resource=Mesh;
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>{
type Resource=MeshWithSize;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
let mesh=match index.mesh_type{
MeshType::FileMesh=>{
let RobloxAssetId(asset_id)=index.content.parse()?;

View File

@@ -1,12 +1,14 @@
use std::collections::HashMap;
use rbx_mesh::mesh::{Vertex2,Vertex2Truncated};
use strafesnet_common::{integer::vec3,model::{self,ColorId,IndexedVertex,NormalId,PolygonGroup,PolygonList,PositionId,RenderConfigId,TextureCoordinateId,VertexId}};
use strafesnet_common::aabb::Aabb;
use strafesnet_common::integer::vec3;
use strafesnet_common::model::{self,ColorId,IndexedVertex,PolygonGroup,PolygonList,RenderConfigId,VertexId};
use crate::loader::MeshWithSize;
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Planar64Vec3(strafesnet_common::integer::Planar64TryFromFloatError),
RbxMesh(rbx_mesh::mesh::Error)
}
impl std::fmt::Display for Error{
@@ -16,69 +18,46 @@ impl std::fmt::Display for Error{
}
impl std::error::Error for Error{}
fn ingest_vertices2<
AcquirePosId,
AcquireTexId,
AcquireNormalId,
AcquireColorId,
AcquireVertexId,
>(
fn ingest_vertices2(
vertices:Vec<Vertex2>,
acquire_pos_id:&mut AcquirePosId,
acquire_tex_id:&mut AcquireTexId,
acquire_normal_id:&mut AcquireNormalId,
acquire_color_id:&mut AcquireColorId,
acquire_vertex_id:&mut AcquireVertexId,
)->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>
where
AcquirePosId:FnMut([f32;3])->Result<PositionId,Error>,
AcquireTexId:FnMut([f32;2])->TextureCoordinateId,
AcquireNormalId:FnMut([f32;3])->Result<NormalId,Error>,
AcquireColorId:FnMut([f32;4])->ColorId,
AcquireVertexId:FnMut(IndexedVertex)->VertexId,
{
mb:&mut model::MeshBuilder,
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{
//this monster is collecting a map of old_vertices_index -> unique_vertices_index
//while also doing the inserting unique entries into lists simultaneously
Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
// vertex positions that fail to convert are DROPPED
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
rbx_mesh::mesh::VertexId2(vertex_id as u32),
acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id(vertex.tex),
normal:acquire_normal_id(vertex.norm)?,
color:acquire_color_id(vertex.color.map(|f|f as f32/255.0f32))
}),
))).collect::<Result<_,_>>()?)
{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32))),
};
mb.acquire_vertex_id(vertex)
}
))).collect()
}
fn ingest_vertices_truncated2<
AcquirePosId,
AcquireTexId,
AcquireNormalId,
AcquireVertexId,
>(
fn ingest_vertices_truncated2(
vertices:Vec<Vertex2Truncated>,
acquire_pos_id:&mut AcquirePosId,
acquire_tex_id:&mut AcquireTexId,
acquire_normal_id:&mut AcquireNormalId,
mb:&mut model::MeshBuilder,
static_color_id:ColorId,//pick one color and fill everything with it
acquire_vertex_id:&mut AcquireVertexId,
)->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>
where
AcquirePosId:FnMut([f32;3])->Result<PositionId,Error>,
AcquireTexId:FnMut([f32;2])->TextureCoordinateId,
AcquireNormalId:FnMut([f32;3])->Result<NormalId,Error>,
AcquireVertexId:FnMut(IndexedVertex)->VertexId,
{
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{
//this monster is collecting a map of old_vertices_index -> unique_vertices_index
//while also doing the inserting unique entries into lists simultaneously
Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
// vertex positions that fail to convert are DROPPED
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
rbx_mesh::mesh::VertexId2(vertex_id as u32),
acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id(vertex.tex),
normal:acquire_normal_id(vertex.norm)?,
color:static_color_id
}),
))).collect::<Result<_,_>>()?)
{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:static_color_id,
};
mb.acquire_vertex_id(vertex)
}
))).collect()
}
fn ingest_faces2_lods3(
@@ -89,129 +68,80 @@ fn ingest_faces2_lods3(
){
//faces have to be split into polygon groups based on lod
polygon_groups.extend(lods.windows(2).map(|lod_pair|
PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().map(|rbx_mesh::mesh::Face2(v0,v1,v2)|
vec![vertex_id_map[&v0],vertex_id_map[&v1],vertex_id_map[&v2]]
PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().filter_map(|rbx_mesh::mesh::Face2(v0,v1,v2)|
Some(vec![*vertex_id_map.get(&v0)?,*vertex_id_map.get(&v1)?,*vertex_id_map.get(&v2)?])
).collect()))
))
}
pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<model::Mesh,Error>{
pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<MeshWithSize,Error>{
//generate that mesh boi
let mut unique_pos=Vec::new();
let mut pos_id_from=HashMap::new();
let mut unique_tex=Vec::new();
let mut tex_id_from=HashMap::new();
let mut unique_normal=Vec::new();
let mut normal_id_from=HashMap::new();
let mut unique_color=Vec::new();
let mut color_id_from=HashMap::new();
let mut unique_vertices=Vec::new();
let mut vertex_id_from=HashMap::new();
let mut polygon_groups=Vec::new();
let mut acquire_pos_id=|pos|{
let p=vec3::try_from_f32_array(pos).map_err(Error::Planar64Vec3)?;
Ok(PositionId::new(*pos_id_from.entry(p).or_insert_with(||{
let pos_id=unique_pos.len();
unique_pos.push(p);
pos_id
}) as u32))
};
let mut acquire_tex_id=|tex|{
let h=bytemuck::cast::<[f32;2],[u32;2]>(tex);
TextureCoordinateId::new(*tex_id_from.entry(h).or_insert_with(||{
let tex_id=unique_tex.len();
unique_tex.push(glam::Vec2::from_array(tex));
tex_id
}) as u32)
};
let mut acquire_normal_id=|normal|{
let n=vec3::try_from_f32_array(normal).map_err(Error::Planar64Vec3)?;
Ok(NormalId::new(*normal_id_from.entry(n).or_insert_with(||{
let normal_id=unique_normal.len();
unique_normal.push(n);
normal_id
}) as u32))
};
let mut acquire_color_id=|color|{
let h=bytemuck::cast::<[f32;4],[u32;4]>(color);
ColorId::new(*color_id_from.entry(h).or_insert_with(||{
let color_id=unique_color.len();
unique_color.push(glam::Vec4::from_array(color));
color_id
}) as u32)
};
let mut acquire_vertex_id=|vertex:IndexedVertex|{
VertexId::new(*vertex_id_from.entry(vertex.clone()).or_insert_with(||{
let vertex_id=unique_vertices.len();
unique_vertices.push(vertex);
vertex_id
}) as u32)
};
let mut mb=model::MeshBuilder::new();
match rbx_mesh::read_versioned(roblox_mesh_bytes.cursor()).map_err(Error::RbxMesh)?{
rbx_mesh::mesh::VersionedMesh::Version1(mesh)=>{
let color_id=acquire_color_id([1.0f32;4]);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).map(|trip|{
let mut ingest_vertex1=|vertex:&rbx_mesh::mesh::Vertex1|Ok(acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id([vertex.tex[0],vertex.tex[1]]),
normal:acquire_normal_id(vertex.norm)?,
color:color_id,
}));
Ok(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?])
}).collect::<Result<_,_>>()?)));
rbx_mesh::mesh::Mesh::V1(mesh)=>{
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).filter_map(|trip|{
let mut ingest_vertex1=|vertex:&rbx_mesh::mesh::Vertex1|{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::vec2(vertex.tex[0],vertex.tex[1])),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:color_id,
};
Some(mb.acquire_vertex_id(vertex))
};
Some(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?])
}).collect())));
},
rbx_mesh::mesh::VersionedMesh::Version2(mesh)=>{
rbx_mesh::mesh::Mesh::V2(mesh)=>{
let vertex_id_map=match mesh.header.sizeof_vertex{
rbx_mesh::mesh::SizeOfVertex2::Truncated=>{
//pick white and make all the vertices white
let color_id=acquire_color_id([1.0f32;4]);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,color_id,&mut acquire_vertex_id)
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
},
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id),
}?;
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
};
//one big happy group for all the faces
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|face|
vec![vertex_id_map[&face.0],vertex_id_map[&face.1],vertex_id_map[&face.2]]
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().filter_map(|face|
Some(vec![*vertex_id_map.get(&face.0)?,*vertex_id_map.get(&face.1)?,*vertex_id_map.get(&face.2)?])
).collect())));
},
rbx_mesh::mesh::VersionedMesh::Version3(mesh)=>{
rbx_mesh::mesh::Mesh::V3(mesh)=>{
let vertex_id_map=match mesh.header.sizeof_vertex{
rbx_mesh::mesh::SizeOfVertex2::Truncated=>{
let color_id=acquire_color_id([1.0f32;4]);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,color_id,&mut acquire_vertex_id)
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
},
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id),
}?;
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
};
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
rbx_mesh::mesh::VersionedMesh::Version4(mesh)=>{
let vertex_id_map=ingest_vertices2(
mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id
)?;
rbx_mesh::mesh::Mesh::V4(mesh)=>{
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb);
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
rbx_mesh::mesh::VersionedMesh::Version5(mesh)=>{
let vertex_id_map=ingest_vertices2(
mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id
)?;
rbx_mesh::mesh::Mesh::V5(mesh)=>{
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb);
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
}
Ok(model::Mesh{
unique_pos,
unique_normal,
unique_tex,
unique_color,
unique_vertices,
let mesh=mb.build(
polygon_groups,
//these should probably be moved to the model...
//but what if models want to use the same texture
graphics_groups:vec![model::IndexedGraphicsGroup{
vec![model::IndexedGraphicsGroup{
render:RenderConfigId::new(0),
//the lowest lod is highest quality
groups:vec![model::PolygonGroupId::new(0)]
}],
//disable physics
physics_groups:Vec::new(),
})
Vec::new(),
);
let mut aabb=Aabb::default();
for &point in &mesh.unique_pos{
aabb.grow(point);
}
Ok(MeshWithSize{mesh,size:aabb.size()})
}

View File

@@ -56,7 +56,7 @@ const CUBE_DEFAULT_TEXTURE_COORDS:[TextureCoordinate;4]=[
TextureCoordinate::new(1.0,1.0),
TextureCoordinate::new(0.0,1.0),
];
const CUBE_DEFAULT_VERTICES:[Planar64Vec3;8]=[
pub const CUBE_DEFAULT_VERTICES:[Planar64Vec3;8]=[
vec3::int(-1,-1, 1),//0 left bottom back
vec3::int( 1,-1, 1),//1 right bottom back
vec3::int( 1, 1, 1),//2 right top back
@@ -66,7 +66,7 @@ const CUBE_DEFAULT_VERTICES:[Planar64Vec3;8]=[
vec3::int( 1,-1,-1),//6 right bottom front
vec3::int(-1,-1,-1),//7 left bottom front
];
const CUBE_DEFAULT_NORMALS:[Planar64Vec3;6]=[
pub const CUBE_DEFAULT_NORMALS:[Planar64Vec3;6]=[
vec3::int( 1, 0, 0),//CubeFace::Right
vec3::int( 0, 1, 0),//CubeFace::Top
vec3::int( 0, 0, 1),//CubeFace::Back
@@ -121,8 +121,7 @@ impl FaceDescription{
}
}
}
pub fn unit_cube(CubeFaceDescription(face_descriptions):CubeFaceDescription)->Mesh{
const CUBE_DEFAULT_POLYS:[[[u32;2];4];6]=[
pub const CUBE_DEFAULT_POLYS:[[[u32;2];4];6]=[
// right (1, 0, 0)
[
[6,2],//[vertex,tex]
@@ -166,6 +165,7 @@ pub fn unit_cube(CubeFaceDescription(face_descriptions):CubeFaceDescription)->Me
[7,2],
],
];
pub fn unit_cube(CubeFaceDescription(face_descriptions):CubeFaceDescription)->Mesh{
let mut generated_pos=Vec::new();
let mut generated_tex=Vec::new();
let mut generated_normal=Vec::new();

View File

@@ -1,13 +1,13 @@
use std::collections::HashMap;
use crate::loader::MeshIndex;
use crate::error::{RecoverableErrors,CFrameError,CFrameErrorType,DuplicateStageError,InstancePath,NormalIdError,Planar64ConvertError,ParseIntContext,ShapeError};
use crate::loader::{MeshWithSize,MeshIndex};
use crate::primitives::{self,CubeFace,CubeFaceDescription,WedgeFaceDescription,CornerWedgeFaceDescription,FaceDescription,Primitives};
use strafesnet_common::aabb::Aabb;
use strafesnet_common::map;
use strafesnet_common::model;
use strafesnet_common::gameplay_modes::{NormalizedModes,Mode,ModeId,ModeUpdate,ModesBuilder,Stage,StageElement,StageElementBehaviour,StageId,Zone};
use strafesnet_common::gameplay_style;
use strafesnet_common::gameplay_attributes as attr;
use strafesnet_common::integer::{self,vec3,Planar64,Planar64Vec3,Planar64Mat3,Planar64Affine3};
use strafesnet_common::integer::{self,vec3,Planar64TryFromFloatError,Planar64,Planar64Vec3,Planar64Mat3,Planar64Affine3};
use strafesnet_common::model::RenderConfigId;
use strafesnet_deferred_loader::deferred_loader::{RenderConfigDeferredLoader,MeshDeferredLoader};
use strafesnet_deferred_loader::mesh::Meshes;
@@ -18,54 +18,59 @@ fn static_ustr(s:&'static str)->rbx_dom_weak::Ustr{
rbx_dom_weak::ustr(s)
}
fn recursive_collect_superclass(
objects:&mut std::vec::Vec<rbx_dom_weak::types::Ref>,
dom:&rbx_dom_weak::WeakDom,
instance:&rbx_dom_weak::Instance,
superclass:&str
){
let instance=instance;
let db=rbx_reflection_database::get();
let Some(superclass)=db.classes.get(superclass)else{
return;
};
objects.extend(
dom.descendants_of(instance.referent()).filter_map(|instance|{
let class=db.classes.get(instance.class.as_str())?;
db.has_superclass(class,superclass).then(||instance.referent())
})
);
macro_rules! lazy_regex{
($r:literal)=>{{
use regex::Regex;
use std::sync::LazyLock;
static RE:LazyLock<Regex>=LazyLock::new(||Regex::new($r).unwrap());
&RE
}};
}
fn planar64_affine3_from_roblox(cf:&rbx_dom_weak::types::CFrame,size:&rbx_dom_weak::types::Vector3)->Planar64Affine3{
Planar64Affine3::new(
fn planar64_affine3_from_roblox(cf:&rbx_dom_weak::types::CFrame,size:&rbx_dom_weak::types::Vector3)->Result<Planar64Affine3,Planar64TryFromFloatError>{
Ok(Planar64Affine3::new(
Planar64Mat3::from_cols([
vec3::try_from_f32_array([cf.orientation.x.x,cf.orientation.y.x,cf.orientation.z.x]).unwrap()
*integer::try_from_f32(size.x/2.0).unwrap(),
vec3::try_from_f32_array([cf.orientation.x.y,cf.orientation.y.y,cf.orientation.z.y]).unwrap()
*integer::try_from_f32(size.y/2.0).unwrap(),
vec3::try_from_f32_array([cf.orientation.x.z,cf.orientation.y.z,cf.orientation.z.z]).unwrap()
*integer::try_from_f32(size.z/2.0).unwrap(),
].map(|t|t.narrow_1().unwrap())),
vec3::try_from_f32_array([cf.position.x,cf.position.y,cf.position.z]).unwrap()
)
(vec3::try_from_f32_array([cf.orientation.x.x,cf.orientation.y.x,cf.orientation.z.x])?
*integer::try_from_f32(size.x/2.0)?).narrow_1().unwrap(),//.map_err(Planar64ConvertError::Narrow)?
(vec3::try_from_f32_array([cf.orientation.x.y,cf.orientation.y.y,cf.orientation.z.y])?
*integer::try_from_f32(size.y/2.0)?).narrow_1().unwrap(),//.map_err(Planar64ConvertError::Narrow)?
(vec3::try_from_f32_array([cf.orientation.x.z,cf.orientation.y.z,cf.orientation.z.z])?
*integer::try_from_f32(size.z/2.0)?).narrow_1().unwrap(),//.map_err(Planar64ConvertError::Narrow)?
]),
vec3::try_from_f32_array([cf.position.x,cf.position.y,cf.position.z])?
))
}
fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:model::ModelId,modes_builder:&mut ModesBuilder,wormhole_in_model_to_id:&mut HashMap<model::ModelId,u32>,wormhole_id_to_out_model:&mut HashMap<u32,model::ModelId>)->attr::CollisionAttributes{
enum GetAttributesError{
ModeIdParseInt(ParseIntContext),
DuplicateMode(ModeId),
StageIdParseInt(ParseIntContext),
DuplicateStage(DuplicateStageError),
WormholeOutIdParseInt(ParseIntContext),
DuplicateWormholeOut(u32),
WormholeInIdParseInt(ParseIntContext),
JumpLimitParseInt(ParseIntContext),
}
fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:model::ModelId,modes_builder:&mut ModesBuilder,wormhole_in_model_to_id:&mut HashMap<model::ModelId,u32>,wormhole_id_to_out_model:&mut HashMap<u32,model::ModelId>)->Result<attr::CollisionAttributes,GetAttributesError>{
let mut general=attr::GeneralAttributes::default();
let mut intersecting=attr::IntersectingAttributes::default();
let mut contacting=attr::ContactingAttributes::default();
let mut force_can_collide=can_collide;
let mut force_intersecting=false;
let mut allow_booster=true;
match name{
"Water"=>{
force_can_collide=false;
allow_booster=false;
//TODO: read stupid CustomPhysicalProperties
intersecting.water=Some(attr::IntersectingWater{density:Planar64::ONE,viscosity:Planar64::ONE/10,velocity});
},
"Accelerator"=>{
//although the new game supports collidable accelerators, this is a roblox compatability map loader
force_can_collide=false;
// Accelerator is not allowed to be booster in roblox
allow_booster=false;
general.accelerator=Some(attr::Accelerator{acceleration:velocity});
},
// "UnorderedCheckpoint"=>general.teleport_behaviour=Some(model::TeleportBehaviour::StageElement(attr::StageElement{
@@ -74,17 +79,21 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
// force:false,
// behaviour:model::StageElementBehaviour::Unordered
// })),
"SetVelocity"=>general.trajectory=Some(attr::SetTrajectory::Velocity(velocity)),
"SetVelocity"=>{
allow_booster=false;
general.trajectory=Some(attr::SetTrajectory::Velocity(velocity));
},
"MapStart"=>{
force_can_collide=false;
force_intersecting=true;
let mode_id=ModeId::MAIN;
modes_builder.insert_mode(
ModeId::MAIN,
mode_id,
Mode::empty(
gameplay_style::StyleModifiers::roblox_bhop(),
model_id
)
).unwrap();
).map_err(|_|GetAttributesError::DuplicateMode(mode_id))?;
},
"MapFinish"=>{
force_can_collide=false;
@@ -118,32 +127,36 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
);
},
other=>{
let regman=lazy_regex::regex!(r"^(BonusStart|WormholeOut)(\d+)$");
let regman=lazy_regex!(r"^(BonusStart|WormholeOut)(\d+)$");
if let Some(captures)=regman.captures(other){
match &captures[1]{
"BonusStart"=>{
force_can_collide=false;
force_intersecting=true;
let mode_id=ModeId::new(ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::ModeIdParseInt)?);
modes_builder.insert_mode(
ModeId::new(captures[2].parse::<u32>().unwrap()),
mode_id,
Mode::empty(
gameplay_style::StyleModifiers::roblox_bhop(),
model_id
)
).unwrap();
).map_err(|_|GetAttributesError::DuplicateMode(mode_id))?;
},
"WormholeOut"=>{
//the PhysicsModelId has to exist for it to be teleported to!
force_intersecting=true;
//this object is not special in strafe client, but the roblox mapping needs to be converted to model id
assert!(wormhole_id_to_out_model.insert(captures[2].parse::<u32>().unwrap(),model_id).is_none(),"Cannot have multiple WormholeOut with same id");
let wormhole_id=ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::WormholeOutIdParseInt)?;
if wormhole_id_to_out_model.insert(wormhole_id,model_id).is_some(){
return Err(GetAttributesError::DuplicateWormholeOut(wormhole_id));
}
},
_=>(),
}
}else if let Some(captures)=lazy_regex::regex!(r"^(Force)?(Spawn|SpawnAt|Trigger|Teleport|Platform)(\d+)$")
}else if let Some(captures)=lazy_regex!(r"^(Force)?(Spawn|SpawnAt|Trigger|Teleport|Platform)(\d+)$")
.captures(other){
force_intersecting=true;
let stage_id=StageId::new(captures[3].parse::<u32>().unwrap());
let stage_id=StageId::new(ParseIntContext::parse(&captures[3]).map_err(GetAttributesError::StageIdParseInt)?);
let stage_element=StageElement::new(
//stage_id:
stage_id,
@@ -155,11 +168,12 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
//behaviour:
match &captures[2]{
"Spawn"=>{
let mode_id=ModeId::MAIN;
modes_builder.insert_stage(
ModeId::MAIN,
mode_id,
stage_id,
Stage::empty(model_id),
).unwrap();
).map_err(|_|GetAttributesError::DuplicateStage(DuplicateStageError{mode_id,stage_id}))?;
//TODO: let denormalize handle this
StageElementBehaviour::SpawnAt
},
@@ -169,7 +183,7 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
"Trigger"=>{force_can_collide=false;StageElementBehaviour::Trigger},
"Teleport"=>{force_can_collide=false;StageElementBehaviour::Teleport},
"Platform"=>StageElementBehaviour::Platform,
_=>panic!("regex1[2] messed up bad"),
_=>unreachable!("regex1[2] messed up bad"),
},
None
);
@@ -180,7 +194,7 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
stage_element,
),
);
}else if let Some(captures)=lazy_regex::regex!(r"^(Jump|WormholeIn)(\d+)$")
}else if let Some(captures)=lazy_regex!(r"^(Jump|WormholeIn)(\d+)$")
.captures(other){
match &captures[1]{
"Jump"=>modes_builder.push_mode_update(
@@ -192,30 +206,33 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
StageId::FIRST,
false,
StageElementBehaviour::Check,
Some(captures[2].parse::<u8>().unwrap())
Some(ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::JumpLimitParseInt)?)
)
),
),
"WormholeIn"=>{
force_can_collide=false;
force_intersecting=true;
assert!(wormhole_in_model_to_id.insert(model_id,captures[2].parse::<u32>().unwrap()).is_none(),"Impossible");
let wormhole_id=ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::WormholeInIdParseInt)?;
// It is impossible for two different objects to have the same model id
assert!(wormhole_in_model_to_id.insert(model_id,wormhole_id).is_none(),"Impossible");
},
_=>panic!("regex2[1] messed up bad"),
_=>unreachable!("regex2[1] messed up bad"),
}
}else if let Some(captures)=lazy_regex::regex!(r"^Bonus(Finish|Anticheat)(\d+)$")
}else if let Some(captures)=lazy_regex!(r"^Bonus(Finish|Anticheat)(\d+)$")
.captures(other){
force_can_collide=false;
force_intersecting=true;
let mode_id=ModeId::new(ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::ModeIdParseInt)?);
modes_builder.push_mode_update(
ModeId::new(captures[2].parse::<u32>().unwrap()),
mode_id,
ModeUpdate::zone(
model_id,
//zone:
match &captures[1]{
"Finish"=>Zone::Finish,
"Anticheat"=>Zone::Anticheat,
_=>panic!("regex3[1] messed up bad"),
_=>unreachable!("regex3[1] messed up bad"),
},
),
);
@@ -234,10 +251,10 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
}
}
//need some way to skip this
if velocity!=vec3::ZERO{
if allow_booster&&velocity!=vec3::zero(){
general.booster=Some(attr::Booster::Velocity(velocity));
}
match force_can_collide{
Ok(match force_can_collide{
true=>{
match name{
"Bounce"=>contacting.contact_behaviour=Some(attr::ContactingBehaviour::Elastic(u32::MAX)),
@@ -255,7 +272,7 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
}else{
attr::CollisionAttributes::Decoration
},
}
})
}
#[derive(Clone,Copy)]
@@ -327,12 +344,12 @@ pub struct RobloxFaceTextureDescription{
pub color:glam::Vec4,
pub transform:RobloxTextureTransform,
}
impl core::cmp::PartialEq for RobloxFaceTextureDescription{
impl PartialEq for RobloxFaceTextureDescription{
fn eq(&self,other:&Self)->bool{
self.to_bits().eq(&other.to_bits())
}
}
impl core::cmp::Eq for RobloxFaceTextureDescription{}
impl Eq for RobloxFaceTextureDescription{}
impl core::hash::Hash for RobloxFaceTextureDescription{
fn hash<H:core::hash::Hasher>(&self,state:&mut H){
self.to_bits().hash(state);
@@ -401,21 +418,25 @@ fn get_content_url(content:&rbx_dom_weak::types::Content)->Option<&str>{
}
fn get_texture_description<'a>(
temp_objects:&mut Vec<rbx_dom_weak::types::Ref>,
render_config_deferred_loader:&mut RenderConfigDeferredLoader<&'a str>,
recoverable_errors:&mut RecoverableErrors,
db:&rbx_reflection::ReflectionDatabase,
dom:&'a rbx_dom_weak::WeakDom,
object:&rbx_dom_weak::Instance,
size:&rbx_dom_weak::types::Vector3,
)->RobloxPartDescription{
//use the biggest one and cut it down later...
let mut part_texture_description=RobloxPartDescription::default();
temp_objects.clear();
recursive_collect_superclass(temp_objects,&dom,object,"Decal");
for &mut decal_ref in temp_objects{
let Some(decal)=dom.get_by_ref(decal_ref) else{
println!("Decal get_by_ref failed");
continue;
};
let decal=&db.classes["Decal"];
let decals=object.children().iter().filter_map(|&referent|{
let instance=dom.get_by_ref(referent)?;
db.classes.get(instance.class.as_str()).is_some_and(|class|
db.has_superclass(class,decal)
).then_some(instance)
});
for decal in decals{
// decals should always have these properties,
// but it is not guaranteed by the rbx_dom_weak data structure.
let (
Some(rbx_dom_weak::types::Variant::Content(content)),
Some(rbx_dom_weak::types::Variant::Enum(normalid)),
@@ -427,16 +448,16 @@ fn get_texture_description<'a>(
decal.properties.get(&static_ustr("Color3")),
decal.properties.get(&static_ustr("Transparency")),
)else{
println!("Decal is missing a required property");
recoverable_errors.decal_property.push(InstancePath::new(dom,decal));
continue;
};
let texture_id=match content.value(){
rbx_dom_weak::types::ContentType::Uri(uri)=>Some(uri.as_str()),
_=>None,
};
let texture_id=get_content_url(content);
let render_id=render_config_deferred_loader.acquire_render_config_id(texture_id);
let Ok(cube_face)=normalid.to_u32().try_into()else{
println!("NormalId is invalid");
recoverable_errors.normal_id.push(NormalIdError{
path:InstancePath::new(dom,decal),
normal_id:normalid.to_u32(),
});
continue;
};
let (roblox_texture_color,roblox_texture_transform)=if decal.class=="Texture"{
@@ -473,6 +494,7 @@ fn get_texture_description<'a>(
}
)
}else{
recoverable_errors.texture_property.push(InstancePath::new(dom,decal));
(glam::Vec4::ONE,RobloxTextureTransform::identity())
}
}else{
@@ -526,6 +548,8 @@ pub fn convert<'a>(
render_config_deferred_loader:&mut RenderConfigDeferredLoader<&'a str>,
mesh_deferred_loader:&mut MeshDeferredLoader<MeshIndex<'a>>,
)->PartialMap1<'a>{
let mut recoverable_errors=RecoverableErrors::default();
let mut deferred_models_deferred_attributes=Vec::new();
let mut deferred_unions_deferred_attributes=Vec::new();
let mut primitive_models_deferred_attributes=Vec::new();
@@ -535,63 +559,83 @@ pub fn convert<'a>(
//just going to leave it like this for now instead of reworking the data structures for this whole thing
let textureless_render_group=render_config_deferred_loader.acquire_render_config_id(None);
let mut object_refs=Vec::new();
let mut temp_objects=Vec::new();
recursive_collect_superclass(&mut object_refs, &dom, dom.root(),"BasePart");
for object_ref in object_refs {
if let Some(object)=dom.get_by_ref(object_ref){
if let (
Some(rbx_dom_weak::types::Variant::CFrame(cf)),
Some(rbx_dom_weak::types::Variant::Vector3(size)),
Some(rbx_dom_weak::types::Variant::Vector3(velocity)),
Some(rbx_dom_weak::types::Variant::Float32(transparency)),
Some(rbx_dom_weak::types::Variant::Color3uint8(color3)),
Some(rbx_dom_weak::types::Variant::Bool(can_collide)),
) = (
object.properties.get(&static_ustr("CFrame")),
object.properties.get(&static_ustr("Size")),
object.properties.get(&static_ustr("Velocity")),
object.properties.get(&static_ustr("Transparency")),
object.properties.get(&static_ustr("Color")),
object.properties.get(&static_ustr("CanCollide")),
)
{
let model_transform=planar64_affine3_from_roblox(cf,size);
let db=rbx_reflection_database::get();
let basepart=&db.classes["BasePart"];
let baseparts=dom.descendants().filter(|&instance|
db.classes.get(instance.class.as_str()).is_some_and(|class|
db.has_superclass(class,basepart)
)
);
for object in baseparts{
let (
Some(rbx_dom_weak::types::Variant::CFrame(cf)),
Some(rbx_dom_weak::types::Variant::Vector3(size)),
Some(rbx_dom_weak::types::Variant::Vector3(velocity)),
Some(rbx_dom_weak::types::Variant::Float32(transparency)),
Some(rbx_dom_weak::types::Variant::Color3uint8(color3)),
Some(&rbx_dom_weak::types::Variant::Bool(can_collide)),
) = (
object.properties.get(&static_ustr("CFrame")),
object.properties.get(&static_ustr("Size")),
object.properties.get(&static_ustr("Velocity")),
object.properties.get(&static_ustr("Transparency")),
object.properties.get(&static_ustr("Color")),
object.properties.get(&static_ustr("CanCollide")),
)else{
recoverable_errors.basepart_property.push(InstancePath::new(dom,object));
continue;
};
let model_transform=match planar64_affine3_from_roblox(cf,size){
Ok(model_transform)=>{
if model_transform.matrix3.det().is_zero(){
let mut parent_ref=object.parent();
let mut full_path=object.name.clone();
while let Some(parent)=dom.get_by_ref(parent_ref){
full_path=format!("{}.{}",parent.name,full_path);
parent_ref=parent.parent();
}
println!("Zero determinant CFrame at location {}",full_path);
println!("matrix3:{}",model_transform.matrix3);
recoverable_errors.basepart_cframe.push(CFrameError{
path:InstancePath::new(dom,object),
error:CFrameErrorType::ZeroDeterminant,
});
continue;
}
model_transform
},
Err(e)=>{
recoverable_errors.basepart_cframe.push(CFrameError{
path:InstancePath::new(dom,object),
error:CFrameErrorType::Convert(e),
});
continue;
}
};
//TODO: also detect "CylinderMesh" etc here
let shape=match object.class.as_str(){
"Part"=>if let Some(rbx_dom_weak::types::Variant::Enum(shape))=object.properties.get(&static_ustr("Shape")){
Shape::Primitive(shape.to_u32().try_into().expect("Funky roblox PartType"))
}else{
panic!("Part has no Shape!");
},
"TrussPart"=>Shape::Primitive(Primitives::Cube),
"WedgePart"=>Shape::Primitive(Primitives::Wedge),
"CornerWedgePart"=>Shape::Primitive(Primitives::CornerWedge),
"MeshPart"=>Shape::MeshPart,
"UnionOperation"=>Shape::PhysicsData,
_=>{
println!("Unsupported BasePart ClassName={}; defaulting to cube",object.class);
Shape::Primitive(Primitives::Cube)
}
//TODO: also detect "CylinderMesh" etc here
let shape=match object.class.as_str(){
"Part"|"Seat"|"SpawnLocation"=>{
let Some(rbx_dom_weak::types::Variant::Enum(shape))=object.properties.get(&static_ustr("Shape"))else{
recoverable_errors.part_property.push(InstancePath::new(dom,object));
continue;
};
let Ok(shape)=shape.to_u32().try_into()else{
recoverable_errors.part_shape.push(ShapeError{
path:InstancePath::new(dom,object),
shape:shape.to_u32(),
});
continue;
};
Shape::Primitive(shape)
},
"TrussPart"|"VehicleSeat"=>Shape::Primitive(Primitives::Cube),
"WedgePart"=>Shape::Primitive(Primitives::Wedge),
"CornerWedgePart"=>Shape::Primitive(Primitives::CornerWedge),
"MeshPart"=>Shape::MeshPart,
"UnionOperation"=>Shape::PhysicsData,
"Terrain"=>continue,
_=>{
recoverable_errors.unsupported_class.insert(object.class.as_str().to_owned());
Shape::Primitive(Primitives::Cube)
}
};
let (availability,mesh_id)=match shape{
Shape::Primitive(primitive_shape)=>{
//TODO: TAB TAB
let part_texture_description=get_texture_description(&mut temp_objects,render_config_deferred_loader,dom,object,size);
let (availability,mesh_id)=match shape{
Shape::Primitive(primitive_shape)=>{
let part_texture_description=get_texture_description(render_config_deferred_loader,&mut recoverable_errors,db,dom,object,size);
//obscure rust syntax "slice pattern"
let RobloxPartDescription([
f0,//Cube::Right
@@ -640,66 +684,83 @@ pub fn convert<'a>(
mesh_id
};
(MeshAvailability::Immediate,mesh_id)
},
Shape::MeshPart=>if let (
Some(rbx_dom_weak::types::Variant::Content(mesh_content)),
Some(rbx_dom_weak::types::Variant::Content(texture_content)),
)=(
// mesh must exist
object.properties.get(&static_ustr("MeshContent")),
// texture is allowed to be none
object.properties.get(&static_ustr("TextureContent")),
){
let mesh_asset_id=get_content_url(mesh_content).expect("MeshPart Mesh is not a Uri");
let texture_asset_id=get_content_url(texture_content);
(
MeshAvailability::DeferredMesh(render_config_deferred_loader.acquire_render_config_id(texture_asset_id)),
mesh_deferred_loader.acquire_mesh_id(MeshIndex::file_mesh(mesh_asset_id)),
)
}else{
panic!("Mesh has no Mesh or Texture");
},
Shape::PhysicsData=>{
let mut content="";
let mut mesh_data:&[u8]=&[];
let mut physics_data:&[u8]=&[];
if let Some(rbx_dom_weak::types::Variant::ContentId(asset_id))=object.properties.get(&static_ustr("AssetId")){
content=asset_id.as_ref();
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(&static_ustr("MeshData")){
mesh_data=data.as_ref();
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(&static_ustr("PhysicsData")){
physics_data=data.as_ref();
}
let part_texture_description=get_texture_description(&mut temp_objects,render_config_deferred_loader,dom,object,size);
let mesh_index=MeshIndex::union(content,mesh_data,physics_data,size,part_texture_description.clone());
let mesh_id=mesh_deferred_loader.acquire_mesh_id(mesh_index);
(MeshAvailability::DeferredUnion(part_texture_description),mesh_id)
},
},
Shape::MeshPart=>{
let (
Some(rbx_dom_weak::types::Variant::Content(mesh_content)),
Some(rbx_dom_weak::types::Variant::Content(texture_content)),
)=(
// mesh must exist
object.properties.get(&static_ustr("MeshContent")),
// texture is allowed to be none
object.properties.get(&static_ustr("TextureContent")),
)else{
recoverable_errors.meshpart_property.push(InstancePath::new(dom,object));
continue;
};
let model_deferred_attributes=ModelDeferredAttributes{
mesh:mesh_id,
transform:model_transform,
color:glam::vec4(color3.r as f32/255f32, color3.g as f32/255f32, color3.b as f32/255f32, 1.0-*transparency),
deferred_attributes:GetAttributesArgs{
name:object.name.as_str(),
can_collide:*can_collide,
velocity:vec3::try_from_f32_array([velocity.x,velocity.y,velocity.z]).unwrap(),
},
let mesh_asset_id=match get_content_url(mesh_content){
Some(mesh_asset_id)=>mesh_asset_id,
None=>{
recoverable_errors.meshpart_content.push(InstancePath::new(dom,object));
// Return an empty string which will fail to parse as an asset id
""
}
};
match availability{
MeshAvailability::Immediate=>primitive_models_deferred_attributes.push(model_deferred_attributes),
MeshAvailability::DeferredMesh(render)=>deferred_models_deferred_attributes.push(DeferredModelDeferredAttributes{
render,
model:model_deferred_attributes
}),
MeshAvailability::DeferredUnion(part_texture_description)=>deferred_unions_deferred_attributes.push(DeferredUnionDeferredAttributes{
render:part_texture_description,
model:model_deferred_attributes,
}),
let texture_asset_id=get_content_url(texture_content);
(
MeshAvailability::DeferredMesh(render_config_deferred_loader.acquire_render_config_id(texture_asset_id)),
mesh_deferred_loader.acquire_mesh_id(MeshIndex::file_mesh(mesh_asset_id)),
)
},
Shape::PhysicsData=>{
let mut content="";
let mut mesh_data:&[u8]=&[];
let mut physics_data:&[u8]=&[];
if let Some(rbx_dom_weak::types::Variant::ContentId(asset_id))=object.properties.get(&static_ustr("AssetId")){
content=asset_id.as_ref();
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(&static_ustr("MeshData")){
mesh_data=data.as_ref();
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(&static_ustr("PhysicsData")){
physics_data=data.as_ref();
}
let part_texture_description=get_texture_description(render_config_deferred_loader,&mut recoverable_errors,db,dom,object,size);
let mesh_index=MeshIndex::union(content,mesh_data,physics_data,size,part_texture_description.clone());
let mesh_id=mesh_deferred_loader.acquire_mesh_id(mesh_index);
(MeshAvailability::DeferredUnion(part_texture_description),mesh_id)
},
};
let velocity=match vec3::try_from_f32_array([velocity.x,velocity.y,velocity.z]){
Ok(velocity)=>velocity,
Err(e)=>{
recoverable_errors.basepart_velocity.push(Planar64ConvertError{
path:InstancePath::new(dom,object),
error:e,
});
continue;
}
};
let model_deferred_attributes=ModelDeferredAttributes{
mesh:mesh_id,
transform:model_transform,
color:glam::vec4(color3.r as f32/255f32,color3.g as f32/255f32,color3.b as f32/255f32,1.0-*transparency),
deferred_attributes:GetAttributesArgs{
name:object.name.as_str(),
can_collide,
velocity,
},
};
match availability{
MeshAvailability::Immediate=>primitive_models_deferred_attributes.push(model_deferred_attributes),
MeshAvailability::DeferredMesh(render)=>deferred_models_deferred_attributes.push(DeferredModelDeferredAttributes{
render,
model:model_deferred_attributes
}),
MeshAvailability::DeferredUnion(part_texture_description)=>deferred_unions_deferred_attributes.push(DeferredUnionDeferredAttributes{
render:part_texture_description,
model:model_deferred_attributes,
}),
}
}
PartialMap1{
@@ -707,25 +768,26 @@ pub fn convert<'a>(
primitive_models_deferred_attributes,
deferred_models_deferred_attributes,
deferred_unions_deferred_attributes,
recoverable_errors,
}
}
struct MeshWithAabb{
mesh:model::Mesh,
aabb:Aabb,
struct MeshIdWithSize{
mesh:model::MeshId,
size:Planar64Vec3,
}
fn acquire_mesh_id_from_render_config_id<'a>(
fn acquire_mesh_id_from_render_config_id(
primitive_meshes:&mut Vec<model::Mesh>,
mesh_id_from_render_config_id:&mut HashMap<model::MeshId,HashMap<RenderConfigId,model::MeshId>>,
loaded_meshes:&'a HashMap<model::MeshId,MeshWithAabb>,
loaded_meshes:&HashMap<model::MeshId,MeshWithSize>,
old_mesh_id:model::MeshId,
render:RenderConfigId,
)->Option<(model::MeshId,&'a Aabb)>{
)->Option<MeshIdWithSize>{
//ignore meshes that fail to load completely for now
loaded_meshes.get(&old_mesh_id).map(|mesh_with_aabb|(
*mesh_id_from_render_config_id.entry(old_mesh_id).or_insert_with(||HashMap::new())
loaded_meshes.get(&old_mesh_id).map(|&MeshWithSize{ref mesh,size}|MeshIdWithSize{
mesh:*mesh_id_from_render_config_id.entry(old_mesh_id).or_insert_with(||HashMap::new())
.entry(render).or_insert_with(||{
let mesh_id=model::MeshId::new(primitive_meshes.len() as u32);
let mut mesh_clone=mesh_with_aabb.mesh.clone();
let mut mesh_clone=mesh.clone();
//set the render group lool
if let Some(graphics_group)=mesh_clone.graphics_groups.first_mut(){
graphics_group.render=render;
@@ -733,22 +795,22 @@ fn acquire_mesh_id_from_render_config_id<'a>(
primitive_meshes.push(mesh_clone);
mesh_id
}),
&mesh_with_aabb.aabb,
))
size,
})
}
fn acquire_union_id_from_render_config_id<'a>(
fn acquire_union_id_from_render_config_id(
primitive_meshes:&mut Vec<model::Mesh>,
union_id_from_render_config_id:&mut HashMap<model::MeshId,HashMap<RobloxPartDescription,model::MeshId>>,
loaded_meshes:&'a HashMap<model::MeshId,MeshWithAabb>,
loaded_meshes:&HashMap<model::MeshId,MeshWithSize>,
old_union_id:model::MeshId,
part_texture_description:RobloxPartDescription,
)->Option<(model::MeshId,&'a Aabb)>{
)->Option<MeshIdWithSize>{
//ignore uniones that fail to load completely for now
loaded_meshes.get(&old_union_id).map(|union_with_aabb|(
*union_id_from_render_config_id.entry(old_union_id).or_insert_with(||HashMap::new())
loaded_meshes.get(&old_union_id).map(|&MeshWithSize{ref mesh,size}|MeshIdWithSize{
mesh:*union_id_from_render_config_id.entry(old_union_id).or_insert_with(||HashMap::new())
.entry(part_texture_description.clone()).or_insert_with(||{
let union_id=model::MeshId::new(primitive_meshes.len() as u32);
let mut union_clone=union_with_aabb.mesh.clone();
let mut union_clone=mesh.clone();
//set the render groups
for (graphics_group,maybe_face_texture_description) in union_clone.graphics_groups.iter_mut().zip(part_texture_description.0){
if let Some(face_texture_description)=maybe_face_texture_description{
@@ -758,19 +820,20 @@ fn acquire_union_id_from_render_config_id<'a>(
primitive_meshes.push(union_clone);
union_id
}),
&union_with_aabb.aabb,
))
size,
})
}
pub struct PartialMap1<'a>{
primitive_meshes:Vec<model::Mesh>,
primitive_models_deferred_attributes:Vec<ModelDeferredAttributes<'a>>,
deferred_models_deferred_attributes:Vec<DeferredModelDeferredAttributes<'a>>,
deferred_unions_deferred_attributes:Vec<DeferredUnionDeferredAttributes<'a>>,
recoverable_errors:RecoverableErrors,
}
impl PartialMap1<'_>{
pub fn add_meshpart_meshes_and_calculate_attributes(
mut self,
meshpart_meshes:Meshes,
meshpart_meshes:Meshes<MeshWithSize>,
)->PartialMap2{
//calculate attributes
let mut modes_builder=ModesBuilder::default();
@@ -782,22 +845,14 @@ impl PartialMap1<'_>{
//decode roblox meshes
//generate mesh_id_map based on meshes that failed to load
let loaded_meshes:HashMap<model::MeshId,MeshWithAabb>=
meshpart_meshes.consume().map(|(old_mesh_id,mesh)|{
let mut aabb=strafesnet_common::aabb::Aabb::default();
for &pos in &mesh.unique_pos{
aabb.grow(pos);
}
(old_mesh_id,MeshWithAabb{
mesh,
aabb,
})
}).collect();
let loaded_meshes:HashMap<model::MeshId,MeshWithSize>=
meshpart_meshes.consume().collect();
// SAFETY: I have no idea what I'm doing and this is definitely unsound in some subtle way
// I just want to chain iterators together man
let aint_no_way=core::cell::UnsafeCell::new(&mut self.primitive_meshes);
let mut model_counter=0;
let mut mesh_id_from_render_config_id=HashMap::new();
let mut union_id_from_render_config_id=HashMap::new();
//now that the meshes are loaded, these models can be generated
@@ -805,23 +860,22 @@ impl PartialMap1<'_>{
self.deferred_models_deferred_attributes.into_iter().flat_map(|deferred_model_deferred_attributes|{
//meshes need to be cloned from loaded_meshes with a new id when they are used with a new render_id
//insert into primitive_meshes
let (mesh,aabb)=acquire_mesh_id_from_render_config_id(
let MeshIdWithSize{mesh,size:mesh_size}=acquire_mesh_id_from_render_config_id(
unsafe{*aint_no_way.get()},
&mut mesh_id_from_render_config_id,
&loaded_meshes,
deferred_model_deferred_attributes.model.mesh,
deferred_model_deferred_attributes.render
)?;
let size=aabb.size();
Some(ModelDeferredAttributes{
mesh,
deferred_attributes:deferred_model_deferred_attributes.model.deferred_attributes,
color:deferred_model_deferred_attributes.model.color,
transform:Planar64Affine3::new(
Planar64Mat3::from_cols([
(deferred_model_deferred_attributes.model.transform.matrix3.x_axis*2/size.x).divide().narrow_1().unwrap(),
(deferred_model_deferred_attributes.model.transform.matrix3.y_axis*2/size.y).divide().narrow_1().unwrap(),
(deferred_model_deferred_attributes.model.transform.matrix3.z_axis*2/size.z).divide().narrow_1().unwrap(),
(deferred_model_deferred_attributes.model.transform.matrix3.x_axis*2/mesh_size.x).divide().narrow_1().unwrap(),
(deferred_model_deferred_attributes.model.transform.matrix3.y_axis*2/mesh_size.y).divide().narrow_1().unwrap(),
(deferred_model_deferred_attributes.model.transform.matrix3.z_axis*2/mesh_size.z).divide().narrow_1().unwrap(),
]),
deferred_model_deferred_attributes.model.transform.translation
),
@@ -829,14 +883,13 @@ impl PartialMap1<'_>{
}).chain(self.deferred_unions_deferred_attributes.into_iter().flat_map(|deferred_union_deferred_attributes|{
//meshes need to be cloned from loaded_meshes with a new id when they are used with a new render_id
//insert into primitive_meshes
let (mesh,aabb)=acquire_union_id_from_render_config_id(
let MeshIdWithSize{mesh,size}=acquire_union_id_from_render_config_id(
unsafe{*aint_no_way.get()},
&mut union_id_from_render_config_id,
&loaded_meshes,
deferred_union_deferred_attributes.model.mesh,
deferred_union_deferred_attributes.render
)?;
let size=aabb.size();
Some(ModelDeferredAttributes{
mesh,
deferred_attributes:deferred_union_deferred_attributes.model.deferred_attributes,
@@ -852,61 +905,78 @@ impl PartialMap1<'_>{
})
}))
.chain(self.primitive_models_deferred_attributes.into_iter())
.enumerate().map(|(model_id,model_deferred_attributes)|{
let model_id=model::ModelId::new(model_id as u32);
ModelOwnedAttributes{
.filter_map(|model_deferred_attributes|{
let model_id=model::ModelId::new(model_counter);
let attributes=match get_attributes(
&model_deferred_attributes.deferred_attributes.name,
model_deferred_attributes.deferred_attributes.can_collide,
model_deferred_attributes.deferred_attributes.velocity,
model_id,
&mut modes_builder,
&mut wormhole_in_model_to_id,
&mut wormhole_id_to_out_model,
){
Ok(attributes)=>attributes,
Err(e)=>{
match e{
GetAttributesError::ModeIdParseInt(e)=>self.recoverable_errors.mode_id_parse_int.push(e),
GetAttributesError::DuplicateMode(mode_id)=>{self.recoverable_errors.duplicate_mode.insert(mode_id);},
GetAttributesError::StageIdParseInt(e)=>self.recoverable_errors.stage_id_parse_int.push(e),
GetAttributesError::DuplicateStage(duplicate_stage)=>{self.recoverable_errors.duplicate_stage.insert(duplicate_stage);},
GetAttributesError::WormholeOutIdParseInt(e)=>self.recoverable_errors.wormhole_out_id_parse_int.push(e),
GetAttributesError::DuplicateWormholeOut(wormhole_id)=>{self.recoverable_errors.duplicate_wormhole_out.insert(wormhole_id);},
GetAttributesError::WormholeInIdParseInt(e)=>self.recoverable_errors.wormhole_in_id_parse_int.push(e),
GetAttributesError::JumpLimitParseInt(e)=>self.recoverable_errors.jump_limit_parse_int.push(e),
}
return None;
}
};
model_counter+=1;
Some(ModelOwnedAttributes{
mesh:model_deferred_attributes.mesh,
attributes:get_attributes(
&model_deferred_attributes.deferred_attributes.name,
model_deferred_attributes.deferred_attributes.can_collide,
model_deferred_attributes.deferred_attributes.velocity,
model_id,
&mut modes_builder,
&mut wormhole_in_model_to_id,
&mut wormhole_id_to_out_model,
),
attributes,
color:model_deferred_attributes.color,
transform:model_deferred_attributes.transform,
}
})
}).collect();
let models=models_owned_attributes.into_iter().enumerate().map(|(model_id,mut model_owned_attributes)|{
//TODO: TAB
let model_id=model::ModelId::new(model_id as u32);
//update attributes with wormhole id
//TODO: errors/prints
if let Some(wormhole_id)=wormhole_in_model_to_id.get(&model_id){
if let Some(&wormhole_out_model_id)=wormhole_id_to_out_model.get(wormhole_id){
match &mut model_owned_attributes.attributes{
attr::CollisionAttributes::Contact(attr::ContactAttributes{contacting:_,general})
|attr::CollisionAttributes::Intersect(attr::IntersectAttributes{intersecting:_,general})
=>general.wormhole=Some(attr::Wormhole{destination_model:wormhole_out_model_id}),
attr::CollisionAttributes::Decoration=>println!("Not a wormhole"),
let model_id=model::ModelId::new(model_id as u32);
//update attributes with wormhole id
//TODO: errors/prints
if let Some(wormhole_id)=wormhole_in_model_to_id.get(&model_id){
if let Some(&wormhole_out_model_id)=wormhole_id_to_out_model.get(wormhole_id){
match &mut model_owned_attributes.attributes{
attr::CollisionAttributes::Contact(attr::ContactAttributes{contacting:_,general})
|attr::CollisionAttributes::Intersect(attr::IntersectAttributes{intersecting:_,general})
=>general.wormhole=Some(attr::Wormhole{destination_model:wormhole_out_model_id}),
attr::CollisionAttributes::Decoration=>println!("Not a wormhole"),
}
}
}
}
//index the attributes
let attributes_id=if let Some(&attributes_id)=attributes_id_from_attributes.get(&model_owned_attributes.attributes){
attributes_id
}else{
let attributes_id=attr::CollisionAttributesId::new(unique_attributes.len() as u32);
attributes_id_from_attributes.insert(model_owned_attributes.attributes.clone(),attributes_id);
unique_attributes.push(model_owned_attributes.attributes);
attributes_id
};
model::Model{
mesh:model_owned_attributes.mesh,
transform:model_owned_attributes.transform,
color:model_owned_attributes.color,
attributes:attributes_id,
//index the attributes
let attributes_id=if let Some(&attributes_id)=attributes_id_from_attributes.get(&model_owned_attributes.attributes){
attributes_id
}else{
let attributes_id=attr::CollisionAttributesId::new(unique_attributes.len() as u32);
attributes_id_from_attributes.insert(model_owned_attributes.attributes.clone(),attributes_id);
unique_attributes.push(model_owned_attributes.attributes);
attributes_id
};
model::Model{
mesh:model_owned_attributes.mesh,
transform:model_owned_attributes.transform,
color:model_owned_attributes.color,
attributes:attributes_id,
}
}).collect();
PartialMap2{
meshes:self.primitive_meshes,
models,
modes:modes_builder.build_normalized(),
attributes:unique_attributes,
recoverable_errors:self.recoverable_errors,
}
}).collect();
PartialMap2{
meshes:self.primitive_meshes,
models,
modes:modes_builder.build_normalized(),
attributes:unique_attributes,
}
}
}
@@ -915,12 +985,13 @@ pub struct PartialMap2{
models:Vec<model::Model>,
modes:NormalizedModes,
attributes:Vec<strafesnet_common::gameplay_attributes::CollisionAttributes>,
recoverable_errors:RecoverableErrors,
}
impl PartialMap2{
pub fn add_render_configs_and_textures(
self,
render_configs:RenderConfigs,
)->map::CompleteMap{
)->(map::CompleteMap,RecoverableErrors){
let (textures,render_configs)=render_configs.consume();
let (textures,texture_id_map):(Vec<Vec<u8>>,HashMap<model::TextureId,model::TextureId>)
=textures.into_iter().enumerate().map(|(new_texture_id,(old_texture_id,Texture::ImageDDS(texture)))|{
@@ -939,14 +1010,17 @@ impl PartialMap2{
);
render_config
}).collect();
map::CompleteMap{
modes:self.modes,
attributes:self.attributes,
meshes:self.meshes,
models:self.models,
//the roblox legacy texture thing always works
textures,
render_configs,
}
(
map::CompleteMap{
modes:self.modes,
attributes:self.attributes,
meshes:self.meshes,
models:self.models,
//the roblox legacy texture thing always works
textures,
render_configs,
},
self.recoverable_errors,
)
}
}

View File

@@ -1,9 +1,12 @@
use rbx_mesh::mesh_data::{NormalId2 as MeshDataNormalId2,VertexId as MeshDataVertexId};
use crate::loader::MeshWithSize;
use crate::rbx::RobloxPartDescription;
use crate::primitives::{CUBE_DEFAULT_VERTICES,CUBE_DEFAULT_POLYS,FaceDescription};
use rbx_mesh::mesh_data::{VertexId as MeshDataVertexId,NormalId as MeshDataNormalId,NormalId2 as MeshDataNormalId2,NormalId5 as MeshDataNormalId5};
use rbx_mesh::physics_data::VertexId as PhysicsDataVertexId;
use strafesnet_common::model::{self,IndexedVertex,PolygonGroup,PolygonGroupId,PolygonList,RenderConfigId};
use strafesnet_common::model::{self,IndexedVertex,MeshBuilder,PolygonGroup,PolygonGroupId,PolygonList,RenderConfigId,VertexId};
use strafesnet_common::integer::vec3;
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Block,
@@ -21,7 +24,7 @@ impl std::fmt::Display for Error{
// wacky state machine to make sure all vertices in a face agree upon what NormalId to use.
// Roblox duplicates this information per vertex when it should only exist per-face.
enum MeshDataNormalStatus{
Agree(MeshDataNormalId2),
Agree(MeshDataNormalId),
Conflicting,
}
struct MeshDataNormalChecker{
@@ -31,7 +34,7 @@ impl MeshDataNormalChecker{
fn new()->Self{
Self{status:None}
}
fn check(&mut self,normal:MeshDataNormalId2){
fn check(&mut self,normal:MeshDataNormalId){
self.status=match self.status.take(){
None=>Some(MeshDataNormalStatus::Agree(normal)),
Some(MeshDataNormalStatus::Agree(old_normal))=>{
@@ -44,7 +47,7 @@ impl MeshDataNormalChecker{
Some(MeshDataNormalStatus::Conflicting)=>Some(MeshDataNormalStatus::Conflicting),
};
}
fn into_agreed_normal(self)->Option<MeshDataNormalId2>{
fn into_agreed_normal(self)->Option<MeshDataNormalId>{
self.status.and_then(|status|match status{
MeshDataNormalStatus::Agree(normal)=>Some(normal),
MeshDataNormalStatus::Conflicting=>None,
@@ -52,18 +55,127 @@ impl MeshDataNormalChecker{
}
}
fn build_mesh2(
mb:&mut MeshBuilder,
polygon_groups_normal_id:&mut [Vec<Vec<VertexId>>;NORMAL_FACES],
cube_face_description:&[Option<FaceDescription>;NORMAL_FACES],
mesh:rbx_mesh::mesh_data::Mesh2,
)->Result<(),Error>{
//autoscale to size, idk what roblox is doing with the graphics mesh size
let mut pos_min=glam::Vec3::MAX;
let mut pos_max=glam::Vec3::MIN;
for vertex in &mesh.vertices{
let p=vertex.pos.into();
pos_min=pos_min.min(p);
pos_max=pos_max.max(p);
}
let graphics_size=pos_max-pos_min;
for [MeshDataVertexId(vertex_id0),MeshDataVertexId(vertex_id1),MeshDataVertexId(vertex_id2)] in mesh.faces{
let face=[
mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
];
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|vertex|{
let MeshDataNormalId2(normal_id)=vertex.normal_id;
normal_agreement_checker.check(normal_id);
let pos=glam::Vec3::from_array(vertex.pos)/graphics_size;
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(pos.to_array())?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?);
let tex_coord=glam::Vec2::from_array(vertex.tex);
let maybe_face_description=&cube_face_description[normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>().map_err(Error::Planar64Vec3)?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
Ok(())
}
fn build_mesh5(
mb:&mut MeshBuilder,
polygon_groups_normal_id:&mut [Vec<Vec<VertexId>>;NORMAL_FACES],
cube_face_description:&[Option<FaceDescription>;NORMAL_FACES],
mesh:rbx_mesh::mesh_data::CSGMDL5,
)->Result<(),Error>{
//autoscale to size, idk what roblox is doing with the graphics mesh size
let mut pos_min=glam::Vec3::MAX;
let mut pos_max=glam::Vec3::MIN;
for &pos in &mesh.positions{
let p=pos.into();
pos_min=pos_min.min(p);
pos_max=pos_max.max(p);
}
let graphics_size=pos_max-pos_min;
for face in mesh.faces.indices.chunks_exact(3){
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|&vertex_id|{
let vertex_index=vertex_id as usize;
let &pos=mesh.positions.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &MeshDataNormalId5(normal_id)=mesh.normal_ids.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &norm=mesh.normals.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &tex=mesh.tex.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &color=mesh.colors.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
normal_agreement_checker.check(normal_id);
let pos=glam::Vec3::from_array(pos)/graphics_size;
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(pos.to_array()).map_err(Error::Planar64Vec3)?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(norm).map_err(Error::Planar64Vec3)?);
let tex_coord=glam::Vec2::from_array(tex);
let maybe_face_description=&cube_face_description[normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>()?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
Ok(())
}
const NORMAL_FACES:usize=6;
impl std::error::Error for Error{}
pub fn convert(
roblox_physics_data:&[u8],
roblox_mesh_data:&[u8],
size:glam::Vec3,
crate::rbx::RobloxPartDescription(part_texture_description):crate::rbx::RobloxPartDescription,
)->Result<model::Mesh,Error>{
const NORMAL_FACES:usize=6;
let mut polygon_groups_normal_id=vec![Vec::new();NORMAL_FACES];
RobloxPartDescription(part_texture_description):RobloxPartDescription,
)->Result<MeshWithSize,Error>{
let mut polygon_groups_normal_id:[_;NORMAL_FACES]=[vec![],vec![],vec![],vec![],vec![],vec![]];
// build graphics and physics meshes
let mut mb=strafesnet_common::model::MeshBuilder::new();
let mut mb=MeshBuilder::new();
// graphics
let graphics_groups=if !roblox_mesh_data.is_empty(){
// create per-face texture coordinate affine transforms
@@ -75,46 +187,12 @@ pub fn convert(
let mesh_data=rbx_mesh::read_mesh_data_versioned(
std::io::Cursor::new(roblox_mesh_data)
).map_err(Error::RobloxMeshData)?;
let graphics_mesh=match mesh_data{
match mesh_data{
rbx_mesh::mesh_data::MeshData::CSGK(_)=>return Err(Error::Block),
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL2(mesh_data2))=>mesh_data2.mesh,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL4(mesh_data4))=>mesh_data4.mesh,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V2(mesh_data2))=>build_mesh2(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data2.mesh)?,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V4(mesh_data4))=>build_mesh2(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data4.mesh)?,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V5(mesh_data4))=>build_mesh5(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data4)?,
};
for [MeshDataVertexId(vertex_id0),MeshDataVertexId(vertex_id1),MeshDataVertexId(vertex_id2)] in graphics_mesh.faces{
let face=[
graphics_mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
graphics_mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
graphics_mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
];
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|vertex|{
normal_agreement_checker.check(vertex.normal_id);
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos)?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?);
let tex_coord=glam::Vec2::from_array(vertex.tex);
let maybe_face_description=&cube_face_description[vertex.normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>().map_err(Error::Planar64Vec3)?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
(0..NORMAL_FACES).map(|polygon_group_id|{
model::IndexedGraphicsGroup{
render:cube_face_description[polygon_group_id].as_ref().map_or(RenderConfigId::new(0),|face_description|face_description.render),
@@ -126,7 +204,11 @@ pub fn convert(
};
//physics
let physics_convex_meshes=if !roblox_physics_data.is_empty(){
let polygon_groups_normal_it=polygon_groups_normal_id.into_iter().map(|faces|
// graphics polygon groups (to be rendered)
Ok(PolygonGroup::PolygonList(PolygonList::new(faces)))
);
let polygon_groups:Vec<PolygonGroup>=if !roblox_physics_data.is_empty(){
let physics_data=rbx_mesh::read_physics_data_versioned(
std::io::Cursor::new(roblox_physics_data)
).map_err(Error::RobloxPhysicsData)?;
@@ -135,44 +217,56 @@ pub fn convert(
// have not seen this format in practice
|rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Block)
=>return Err(Error::Block),
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Meshes(meshes))
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V3(meshes))
|rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V5(meshes))
=>meshes.meshes,
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V6(meshes))
=>vec![meshes.mesh],
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V7(meshes))
=>meshes.meshes,
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::PhysicsInfoMesh(pim))
=>vec![pim.mesh],
};
physics_convex_meshes
let physics_convex_meshes_it=physics_convex_meshes.into_iter().map(|mesh|{
// this can be factored out of the loop but I am lazy
let color=mb.acquire_color_id(glam::Vec4::ONE);
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
// physics polygon groups (to do physics)
Ok(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|[PhysicsDataVertexId(vertex_id0),PhysicsDataVertexId(vertex_id1),PhysicsDataVertexId(vertex_id2)]|{
let face=[
mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
].map(|v|glam::Vec3::from_slice(v)/size);
let vertex_norm=(face[1]-face[0])
.cross(face[2]-face[0]);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex_norm.to_array()).map_err(Error::Planar64Vec3)?);
face.into_iter().map(|vertex_pos|{
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex_pos.to_array()).map_err(Error::Planar64Vec3)?);
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect()
}).collect::<Result<_,_>>()?)))
});
polygon_groups_normal_it.chain(physics_convex_meshes_it).collect::<Result<_,_>>()?
}else{
Vec::new()
};
let polygon_groups:Vec<PolygonGroup>=polygon_groups_normal_id.into_iter().map(|faces|
// graphics polygon groups (to be rendered)
Ok(PolygonGroup::PolygonList(PolygonList::new(faces)))
).chain(physics_convex_meshes.into_iter().map(|mesh|{
// this can be factored out of the loop but I am lazy
let color=mb.acquire_color_id(glam::Vec4::ONE);
// generate a unit cube as default physics
let pos_list=CUBE_DEFAULT_VERTICES.map(|pos|mb.acquire_pos_id(pos>>1));
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
// physics polygon groups (to do physics)
Ok(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|[PhysicsDataVertexId(vertex_id0),PhysicsDataVertexId(vertex_id1),PhysicsDataVertexId(vertex_id2)]|{
let face=[
mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
].map(|v|glam::Vec3::from_slice(v)/size);
let vertex_norm=(face[1]-face[0])
.cross(face[2]-face[0]);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex_norm.to_array()).map_err(Error::Planar64Vec3)?);
face.into_iter().map(|vertex_pos|{
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex_pos.to_array()).map_err(Error::Planar64Vec3)?);
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect()
}).collect::<Result<_,_>>()?)))
})).collect::<Result<_,_>>()?;
let normal=mb.acquire_normal_id(vec3::zero());
let color=mb.acquire_color_id(glam::Vec4::ONE);
let polygon_group=PolygonGroup::PolygonList(PolygonList::new(CUBE_DEFAULT_POLYS.map(|poly|poly.map(|[pos_id,_]|
mb.acquire_vertex_id(IndexedVertex{pos:pos_list[pos_id as usize],tex,normal,color})
).to_vec()).to_vec()));
polygon_groups_normal_it.chain([Ok(polygon_group)]).collect::<Result<_,_>>()?
};
let physics_groups=(NORMAL_FACES..polygon_groups.len()).map(|id|model::IndexedPhysicsGroup{
groups:vec![PolygonGroupId::new(id as u32)]
}).collect();
Ok(mb.build(
let mesh=mb.build(
polygon_groups,
graphics_groups,
physics_groups,
))
);
Ok(MeshWithSize{
mesh,
size:vec3::ONE,
})
}

View File

@@ -9,3 +9,6 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
url = "2.5.4"
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "roblox_emulator"
version = "0.5.0"
version = "0.5.1"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -13,9 +13,12 @@ run-service=[]
[dependencies]
glam = "0.30.0"
mlua = { version = "0.10.1", features = ["luau"] }
phf = { version = "0.11.2", features = ["macros"] }
rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet", features = ["instance-userdata"] }
mlua = { version = "0.11.3", features = ["luau"] }
phf = { version = "0.13.1", features = ["macros"] }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
rbx_reflection = "5.0.0"
rbx_reflection_database = "1.0.0"
rbx_types = "2.0.0"
[lints]
workspace = true

View File

@@ -79,7 +79,15 @@ impl Context{
//insert services
let game=dom.root_ref();
let terrain_bldr=InstanceBuilder::new("Terrain");
let terrain_bldr=InstanceBuilder::new("Terrain")
.with_properties([
("CFrame",rbx_dom_weak::types::Variant::CFrame(rbx_dom_weak::types::CFrame::new(rbx_dom_weak::types::Vector3::new(0.0,0.0,0.0),rbx_dom_weak::types::Matrix3::identity()))),
("Size",rbx_dom_weak::types::Variant::Vector3(rbx_dom_weak::types::Vector3::new(1.0,1.0,1.0))),
("Velocity",rbx_dom_weak::types::Variant::Vector3(rbx_dom_weak::types::Vector3::new(0.0,0.0,0.0))),
("Transparency",rbx_dom_weak::types::Variant::Float32(0.0)),
("Color",rbx_dom_weak::types::Variant::Color3uint8(rbx_dom_weak::types::Color3uint8::new(255,255,255))),
("CanCollide",rbx_dom_weak::types::Variant::Bool(true)),
]);
let workspace=dom.insert(game,
InstanceBuilder::new("Workspace")
//Set Workspace.Terrain property equal to Terrain

View File

@@ -8,7 +8,7 @@ impl<'a> EnumItem<'a>{
Self{name:Some(name.as_ref()),value}
}
}
impl<'a> From<rbx_types::Enum> for EnumItem<'a>{
impl From<rbx_types::Enum> for EnumItem<'_>{
fn from(e:rbx_types::Enum)->Self{
EnumItem{
name:None,
@@ -65,7 +65,7 @@ impl<'a> EnumItems<'a>{
}
pub enum CoerceEnum<'a>{
Integer(i32),
Integer(i64),
String(mlua::String),
Enum(EnumItem<'a>),
}

View File

@@ -5,12 +5,12 @@ use rbx_types::Ref;
use rbx_dom_weak::{Ustr,InstanceBuilder,WeakDom};
use crate::util::static_ustr;
use crate::runner::vector3::Vector3;
use crate::runner::number::Number;
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
//class functions store
lua.set_app_data(ClassMethodsStore::default());
lua.set_app_data(InstanceValueStore::default());
let table=lua.create_table()?;
@@ -43,7 +43,7 @@ pub fn class_is_a(class:&str,superclass:&str)->bool{
};
db.has_superclass(class,superclass)
}
fn get_full_name(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)->String{
fn get_full_name(dom:&WeakDom,instance:&rbx_dom_weak::Instance)->String{
let mut full_name=instance.name.clone();
let mut pref=instance.parent();
while let Some(parent)=dom.get_by_ref(pref){
@@ -65,28 +65,28 @@ pub fn get_name_source(lua:&mlua::Lua,script:Instance)->Result<(String,String),m
})
}
pub fn find_first_child<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_child<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|inst.name==name)
}
pub fn find_first_descendant<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_descendant<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
dom.descendants_of(instance.referent()).find(|&inst|inst.name==name)
}
pub fn find_first_child_of_class<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_child_of_class<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|inst.class==class)
}
pub fn find_first_descendant_of_class<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_descendant_of_class<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
dom.descendants_of(instance.referent()).find(|&inst|inst.class==class)
}
pub fn find_first_child_which_is_a<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_child_which_is_a<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
let db=rbx_reflection_database::get();
let superclass_descriptor=db.classes.get(superclass)?;
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|{
db.classes.get(inst.class.as_str()).is_some_and(|descriptor|db.has_superclass(descriptor,superclass_descriptor))
})
}
pub fn find_first_descendant_which_is_a<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_descendant_which_is_a<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
let db=rbx_reflection_database::get();
let superclass_descriptor=db.classes.get(superclass)?;
dom.descendants_of(instance.referent()).find(|inst|{
@@ -325,13 +325,16 @@ impl mlua::UserData for Instance{
}
//find or create an associated userdata object
let instance=this.get_mut(dom)?;
if let Some(value)=get_or_create_userdata(instance,lua,index_str)?{
if let Some(value)=instance_value_store_mut(lua,|ivs|{
//TODO: walk class tree somehow
match ivs.get_or_create_instance_values(&instance){
Some(mut instance_values)=>instance_values.get_or_create_value(lua,index_str),
None=>Ok(None)
}
})?{
return value.into_lua(lua);
}
// drop mutable borrow
//find a child with a matching name
let instance=this.get(dom)?;
find_first_child(dom,instance,index_str)
.map(|instance|Instance::new_unchecked(instance.referent()))
.into_lua(lua)
@@ -419,7 +422,7 @@ impl mlua::UserData for Instance{
rbx_types::Variant::CFrame(typed_value.clone().into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::ContentId)=>{
let typed_value=value.as_str().ok_or_else(||mlua::Error::runtime("Expected string"))?.to_owned();
let typed_value=value.as_string().ok_or_else(||mlua::Error::runtime("Expected string"))?.to_str()?.to_owned();
rbx_types::Variant::ContentId(typed_value.into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::Ref)=>{
@@ -484,8 +487,8 @@ static CLASS_FUNCTION_DATABASE:CFD=phf::phf_map!{
"GetService"=>GET_SERVICE,
},
"Terrain"=>phf::phf_map!{
"FillBall"=>cf!(|_lua,_,_:(Vector3,Number,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillBlock"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,Vector3,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillBall"=>cf!(|_lua,_,_:(crate::runner::vector3::Vector3,Number,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillBlock"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,crate::runner::vector3::Vector3,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillCylinder"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,Number,Number,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"SetMaterialColor"=>cf!(|_lua,_,_:(crate::runner::r#enum::CoerceEnum,crate::runner::color3::Color3)|mlua::Result::Ok(())),
},
@@ -519,7 +522,7 @@ struct ClassMethodsStore{
}
impl ClassMethodsStore{
/// return self.classes[class] or create the ClassMethods and then return it
fn get_or_create_class_methods(&mut self,class:&str)->Option<ClassMethods>{
fn get_or_create_class_methods(&mut self,class:&str)->Option<ClassMethods<'_>>{
// Use get_entry to get the &'static str keys of the database
// and use it as a key for the classes hashmap
CLASS_FUNCTION_DATABASE.get_entry(class)
@@ -606,6 +609,8 @@ fn find_virtual_property(
}
// lazy-loaded per-instance userdata values
// This whole thing is a bad idea and a garbage collection nightmare.
// TODO: recreate rbx_dom_weak with my own instance type that owns this data.
type CreateUserData=fn(&mlua::Lua)->mlua::Result<mlua::AnyUserData>;
type LUD=phf::Map<&'static str,// Class name
phf::Map<&'static str,// Value name
@@ -638,22 +643,47 @@ static LAZY_USER_DATA:LUD=phf::phf_map!{
"MouseClick"=>create_script_signal,
},
};
fn get_or_create_userdata(instance:&mut rbx_dom_weak::Instance,lua:&mlua::Lua,index:&str)->mlua::Result<Option<mlua::AnyUserData>>{
use std::collections::hash_map::Entry;
let db=rbx_reflection_database::get();
let Some(class)=db.classes.get(instance.class.as_str())else{
return Ok(None)
};
if let Some((&static_str,create_userdata))=db.superclasses_iter(class).find_map(|superclass|
// find pair (class,index)
LAZY_USER_DATA.get(&superclass.name)
.and_then(|map|map.get_entry(index))
){
let index_ustr=static_ustr(static_str);
return Ok(Some(match instance.userdata.entry(index_ustr){
Entry::Occupied(entry)=>entry.get().clone(),
Entry::Vacant(entry)=>entry.insert(create_userdata(lua)?).clone(),
}));
}
Ok(None)
#[derive(Default)]
pub struct InstanceValueStore{
values:HashMap<Ref,
HashMap<&'static str,
mlua::AnyUserData
>
>,
}
pub struct InstanceValues<'a>{
named_values:&'static phf::Map<&'static str,CreateUserData>,
values:&'a mut HashMap<&'static str,mlua::AnyUserData>,
}
impl InstanceValueStore{
pub fn get_or_create_instance_values(&mut self,instance:&rbx_dom_weak::Instance)->Option<InstanceValues<'_>>{
LAZY_USER_DATA.get(instance.class.as_str())
.map(|named_values|
InstanceValues{
named_values,
values:self.values.entry(instance.referent())
.or_insert_with(||HashMap::new()),
}
)
}
}
impl InstanceValues<'_>{
pub fn get_or_create_value(&mut self,lua:&mlua::Lua,index:&str)->mlua::Result<Option<mlua::AnyUserData>>{
Ok(match self.named_values.get_entry(index){
Some((&static_index_str,&function_pointer))=>Some(
match self.values.entry(static_index_str){
Entry::Occupied(entry)=>entry.get().clone(),
Entry::Vacant(entry)=>entry.insert(
function_pointer(lua)?
).clone(),
}
),
None=>None,
})
}
}
pub fn instance_value_store_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut InstanceValueStore)->mlua::Result<T>)->mlua::Result<T>{
let mut cf=lua.app_data_mut::<InstanceValueStore>().ok_or_else(||mlua::Error::runtime("InstanceValueStore missing"))?;
f(&mut *cf)
}

View File

@@ -4,7 +4,7 @@
#[derive(Clone,Copy)]
pub enum Number{
Integer(i32),
Integer(i64),
Number(f64),
}
macro_rules! impl_ty{

View File

@@ -1,5 +1,4 @@
use crate::context::Context;
use crate::util::static_ustr;
#[cfg(feature="run-service")]
use crate::scheduler::scheduler_mut;
@@ -13,14 +12,12 @@ pub enum Error{
error:mlua::Error
},
RustLua(mlua::Error),
Services(crate::context::ServicesError),
}
impl std::fmt::Display for Error{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
match self{
Self::Lua{source,error}=>write!(f,"lua error: source:\n{source}\n{error}"),
Self::RustLua(error)=>write!(f,"rust-side lua error: {error}"),
other=>write!(f,"{other:?}"),
}
}
}
@@ -51,6 +48,11 @@ fn init(lua:&mlua::Lua)->mlua::Result<()>{
Ok(())
}
unsafe fn extend_lifetime_mut<'a,T>(src:&mut T)->&'a mut T{
let ptr:*mut T=src;
unsafe{&mut*ptr}
}
impl Runner{
pub fn new()->Result<Self,Error>{
let runner=Self{
@@ -68,8 +70,7 @@ impl Runner{
// SAFETY: This is not a &'static mut WeakDom,
// but as long as Runnable<'a> holds the lifetime of &'a mut Context
// it is a valid unique reference.
let ptr=&mut context.dom as *mut rbx_dom_weak::WeakDom;
self.lua.set_app_data::<crate::context::LuaAppData>(unsafe{&mut*ptr});
self.lua.set_app_data::<crate::context::LuaAppData>(unsafe{extend_lifetime_mut(&mut context.dom)});
#[cfg(feature="run-service")]
self.lua.set_app_data::<crate::scheduler::Scheduler>(crate::scheduler::Scheduler::default());
Ok(Runnable{
@@ -128,15 +129,20 @@ impl Runnable<'_>{
}
#[cfg(feature="run-service")]
pub fn run_service_step(&self)->Result<(),mlua::Error>{
let render_stepped_signal=super::instance::instance::dom_mut(&self.lua,|dom|{
let render_stepped=super::instance::instance::dom_mut(&self.lua,|dom|{
let run_service=super::instance::instance::find_first_child_of_class(dom,dom.root(),"RunService").ok_or_else(||mlua::Error::runtime("RunService missing"))?;
Ok(match run_service.userdata.get(&static_ustr("RenderStepped")){
Some(render_stepped)=>Some(render_stepped.borrow::<super::script_signal::ScriptSignal>()?.clone()),
None=>None
super::instance::instance::instance_value_store_mut(&self.lua,|instance_value_store|{
//unwrap because I trust my find_first_child_of_class function to
let mut instance_values=instance_value_store.get_or_create_instance_values(run_service).ok_or_else(||mlua::Error::runtime("RunService InstanceValues missing"))?;
let render_stepped=instance_values.get_or_create_value(&self.lua,"RenderStepped")?;
//let stepped=instance_values.get_or_create_value(&self.lua,"Stepped")?;
//let heartbeat=instance_values.get_or_create_value(&self.lua,"Heartbeat")?;
Ok(render_stepped)
})
})?;
if let Some(render_stepped_signal)=render_stepped_signal{
render_stepped_signal.fire(&mlua::MultiValue::new());
if let Some(render_stepped)=render_stepped{
let signal:&super::script_signal::ScriptSignal=&*render_stepped.borrow()?;
signal.fire(&mlua::MultiValue::new());
}
Ok(())
}

View File

@@ -117,7 +117,7 @@ impl mlua::FromLua for ScriptSignal{
}
impl mlua::UserData for ScriptConnection{
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fn add_fields<F:UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("Connected",|_,this|{
Ok(this.position().is_some())
});

View File

@@ -1,7 +1,7 @@
use super::instance::Instance;
use super::tween_info::TweenInfo;
#[allow(dead_code)]
#[expect(dead_code)]
#[derive(Clone)]
pub struct Tween{
instance:Instance,

View File

@@ -1,7 +1,7 @@
use super::number::Number;
use super::r#enum::{CoerceEnum,Enums};
#[allow(dead_code)]
#[expect(dead_code)]
#[derive(Clone)]
pub struct TweenInfo{
time:f64,

View File

@@ -46,8 +46,8 @@ impl Scheduler{
}
}
pub fn scheduler_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut crate::scheduler::Scheduler)->mlua::Result<T>)->mlua::Result<T>{
let mut scheduler=lua.app_data_mut::<crate::scheduler::Scheduler>().ok_or_else(||mlua::Error::runtime("Scheduler missing"))?;
pub fn scheduler_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut Scheduler)->mlua::Result<T>)->mlua::Result<T>{
let mut scheduler=lua.app_data_mut::<Scheduler>().ok_or_else(||mlua::Error::runtime("Scheduler missing"))?;
f(&mut *scheduler)
}

View File

@@ -1,11 +1,14 @@
[package]
name = "strafesnet_snf"
version = "0.3.0"
version = "0.3.1"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
binrw = "0.14.0"
binrw = "0.15.0"
id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -6,7 +6,7 @@ use strafesnet_common::physics::Time;
const VERSION:u32=0;
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::Time>;
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,Time>;
#[derive(Debug)]
pub enum Error{
@@ -85,6 +85,7 @@ pub struct Segment{
#[derive(Clone,Copy,Debug)]
pub struct SegmentInfo{
/// time of the first instruction in this segment.
#[expect(dead_code)]
time:Time,
instruction_count:u32,
/// How many total instructions in segments up to and including this segment
@@ -116,6 +117,7 @@ impl<R:BinReaderExt> StreamableBot<R>{
segment_map,
})
}
#[expect(dead_code)]
fn get_segment_info(&self,segment_id:SegmentId)->Result<SegmentInfo,Error>{
Ok(*self.segment_map.get(segment_id.get() as usize).ok_or(Error::InvalidSegmentId(segment_id))?)
}
@@ -272,7 +274,7 @@ pub fn write_bot<W:BinWriterExt>(mut writer:W,physics_version:u32,instructions:i
//probe header length
let mut bot_header_data=Vec::new();
binrw::BinWrite::write_le(&header,&mut std::io::Cursor::new(&mut bot_header_data)).map_err(Error::InvalidData)?;
header.write_le(&mut std::io::Cursor::new(&mut bot_header_data)).map_err(Error::InvalidData)?;
// the first block location is the map header
block_location.push(offset);

View File

@@ -53,8 +53,6 @@ pub(crate) enum FourCC{
Map,
#[brw(magic=b"SNFB")]
Bot,
#[brw(magic=b"SNFD")]
Demo,
}
#[binrw]
#[brw(little)]

View File

@@ -5,7 +5,6 @@ mod newtypes;
mod file;
pub mod map;
pub mod bot;
pub mod demo;
#[derive(Debug)]
pub enum Error{
@@ -13,7 +12,6 @@ pub enum Error{
Header(file::Error),
Map(map::Error),
Bot(bot::Error),
Demo(demo::Error),
}
impl std::fmt::Display for Error{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
@@ -25,7 +23,6 @@ impl std::error::Error for Error{}
pub enum SNF<R:BinReaderExt>{
Map(map::StreamableMap<R>),
Bot(bot::StreamableBot<R>),
Demo(demo::StreamableDemo<R>),
}
pub fn read_snf<R:BinReaderExt>(input:R)->Result<SNF<R>,Error>{
@@ -33,7 +30,6 @@ pub fn read_snf<R:BinReaderExt>(input:R)->Result<SNF<R>,Error>{
Ok(match file.fourcc(){
file::FourCC::Map=>SNF::Map(map::StreamableMap::new(file).map_err(Error::Map)?),
file::FourCC::Bot=>SNF::Bot(bot::StreamableBot::new(file).map_err(Error::Bot)?),
file::FourCC::Demo=>SNF::Demo(demo::StreamableDemo::new(file).map_err(Error::Demo)?),
})
}
pub fn read_map<R:BinReaderExt>(input:R)->Result<map::StreamableMap<R>,Error>{
@@ -50,13 +46,6 @@ pub fn read_bot<R:BinReaderExt>(input:R)->Result<bot::StreamableBot<R>,Error>{
_=>Err(Error::UnexpectedFourCC)
}
}
pub fn read_demo<R:BinReaderExt>(input:R)->Result<demo::StreamableDemo<R>,Error>{
let file=file::File::new(input).map_err(Error::Header)?;
match file.fourcc(){
file::FourCC::Demo=>Ok(demo::StreamableDemo::new(file).map_err(Error::Demo)?),
_=>Err(Error::UnexpectedFourCC)
}
}
#[cfg(test)]
mod tests {

View File

@@ -97,8 +97,8 @@ enum ResourceType{
}
struct ResourceMap<T>{
meshes:HashMap<strafesnet_common::model::MeshId,T>,
textures:HashMap<strafesnet_common::model::TextureId,T>,
meshes:HashMap<model::MeshId,T>,
textures:HashMap<model::TextureId,T>,
}
impl<T> Default for ResourceMap<T>{
fn default()->Self{
@@ -185,7 +185,7 @@ pub struct StreamableMap<R:BinReaderExt>{
//this is every possible attribute... need some sort of streaming system
attributes:Vec<strafesnet_common::gameplay_attributes::CollisionAttributes>,
//this is every possible render configuration... shaders and such... need streaming
render_configs:Vec<strafesnet_common::model::RenderConfig>,
render_configs:Vec<model::RenderConfig>,
//this makes sense to keep in memory for streaming, a map of which blocks occupy what space
bvh:BvhNode<BlockId>,
//something something resources hashmaps
@@ -223,7 +223,7 @@ impl<R:BinReaderExt> StreamableMap<R>{
}
Ok(Self{
file,
modes:strafesnet_common::gameplay_modes::NormalizedModes::new(modes),
modes:gameplay_modes::NormalizedModes::new(modes),
attributes,
render_configs,
bvh:strafesnet_common::bvh::generate_bvh(bvh),
@@ -366,12 +366,12 @@ fn collect_spacial_blocks(
block_location.push(sequential_block_data.position());
}else{
match bvh_node.into_content(){
strafesnet_common::bvh::RecursiveContent::Branch(bvh_node_list)=>{
RecursiveContent::Branch(bvh_node_list)=>{
for bvh_node in bvh_node_list{
collect_spacial_blocks(block_location,block_headers,sequential_block_data,bvh_node)?;
}
},
strafesnet_common::bvh::RecursiveContent::Leaf(_)=>panic!(),//bvh branches are 20 leaves minimum
RecursiveContent::Leaf(_)=>panic!(),//bvh branches are 20 leaves minimum
}
}
Ok(())
@@ -384,13 +384,13 @@ pub fn write_map<W:BinWriterExt>(mut writer:W,map:strafesnet_common::map::Comple
let boxen=map.models.into_iter().enumerate().map(|(model_id,model)|{
//grow your own aabb
let mesh=map.meshes.get(model.mesh.get() as usize).ok_or(Error::InvalidMeshId(model.mesh))?;
let mut aabb=strafesnet_common::aabb::Aabb::default();
let mut aabb=Aabb::default();
for &pos in &mesh.unique_pos{
aabb.grow(model.transform.transform_point3(pos).narrow_1().unwrap());
}
Ok(((model::ModelId::new(model_id as u32),model.into()),aabb))
}).collect::<Result<Vec<_>,_>>()?;
let bvh=weigh_contents(strafesnet_common::bvh::generate_bvh(boxen),&|_|std::mem::size_of::<newtypes::model::Model>());
let bvh=weigh_contents(strafesnet_common::bvh::generate_bvh(boxen),&|_|size_of::<newtypes::model::Model>());
//build blocks
//block location is initialized with two values
//the first value represents the location of the first byte after the file header

View File

@@ -104,6 +104,7 @@ impl From<strafesnet_common::gameplay_style::StyleModifiers> for StyleModifiers{
#[binrw::binrw]
#[brw(little,repr=u8)]
#[expect(dead_code)]
pub enum JumpCalculation{
Max,
BoostThenJump,
@@ -128,6 +129,7 @@ impl From<strafesnet_common::gameplay_style::JumpCalculation> for JumpCalculatio
}
}
#[expect(dead_code)]
pub enum JumpImpulse{
Time(Time),
Height(Planar64),

View File

@@ -20,7 +20,7 @@ impl TryInto<TimedPhysicsInstruction> for TimedInstruction{
}
}
impl TryFrom<TimedPhysicsInstruction> for TimedInstruction{
type Error=super::physics::InstructionConvert;
type Error=InstructionConvert;
fn try_from(value:TimedPhysicsInstruction)->Result<Self,Self::Error>{
Ok(Self{
time:value.time.get(),

View File

@@ -1,6 +1,6 @@
[package]
name = "map-tool"
version = "1.7.0"
version = "1.7.2"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -12,17 +12,16 @@ flate2 = "1.0.27"
futures = "0.3.31"
image = "0.25.2"
image_dds = "0.7.1"
lazy-regex = "3.1.0"
rbx_asset = { version = "0.4.4", registry = "strafesnet" }
rbx_binary = { version = "1.1.0-sn4", registry = "strafesnet" }
rbx_dom_weak = { version = "3.1.0-sn4", registry = "strafesnet" }
rbx_asset = { version = "0.5.0", registry = "strafesnet" }
rbx_binary = { version = "1.0.1-sn5", registry = "strafesnet" }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
rbx_reflection_database = "1.0.0"
rbx_xml = { version = "1.1.0-sn4", registry = "strafesnet" }
rbx_xml = { version = "1.0.1-sn5", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.3.0", path = "../lib/bsp_loader", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../lib/deferred_loader", registry = "strafesnet" }
strafesnet_rbx_loader = { version = "0.6.0", path = "../lib/rbx_loader", registry = "strafesnet" }
strafesnet_snf = { version = "0.3.0", path = "../lib/snf", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.3.1", path = "../lib/bsp_loader", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../lib/deferred_loader", registry = "strafesnet" }
strafesnet_rbx_loader = { version = "0.7.0", path = "../lib/rbx_loader", registry = "strafesnet" }
strafesnet_snf = { version = "0.3.1", path = "../lib/snf", registry = "strafesnet" }
thiserror = "2.0.11"
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "fs"] }
vbsp = "0.9.1"
@@ -36,3 +35,6 @@ vtf = "0.3.0"
#lto = true
#strip = true
#codegen-units = 1
[lints]
workspace = true

View File

@@ -32,8 +32,12 @@ pub struct RobloxToSNFSubcommand {
pub struct DownloadAssetsSubcommand{
#[arg(required=true)]
roblox_files:Vec<PathBuf>,
// #[arg(long)]
// cookie_file:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_literal:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_envvar:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_file:Option<PathBuf>,
}
impl Commands{
@@ -42,13 +46,27 @@ impl Commands{
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
Commands::DownloadAssets(subcommand)=>download_assets(
subcommand.roblox_files,
rbx_asset::cookie::Cookie::new("".to_string()),
cookie_from_args(
subcommand.cookie_literal,
subcommand.cookie_envvar,
subcommand.cookie_file,
).await?,
).await,
}
}
}
#[allow(unused)]
async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<rbx_asset::cookie::Cookie>{
let cookie=match (literal,environment,file){
(Some(cookie_literal),None,None)=>cookie_literal,
(None,Some(cookie_environment),None)=>std::env::var(cookie_environment)?,
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
_=>Err(anyhow::Error::msg("Illegal cookie argument triple"))?,
};
Ok(rbx_asset::cookie::Cookie::new(cookie))
}
#[expect(dead_code)]
#[derive(Debug)]
enum LoadDomError{
IO(std::io::Error),
@@ -174,7 +192,7 @@ impl UniqueAssets{
}
}
#[allow(unused)]
#[expect(dead_code)]
#[derive(Debug)]
enum UniqueAssetError{
IO(std::io::Error),
@@ -249,8 +267,8 @@ async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,dow
tokio::fs::write(path,&data).await?;
break Ok(DownloadResult::Data(data));
},
Err(rbx_asset::cookie::GetError::Response(rbx_asset::types::ResponseError::StatusCodeWithUrlAndBody(scwuab)))=>{
if scwuab.status_code.as_u16()==429{
Err(rbx_asset::cookie::GetError::Response(rbx_asset::types::ResponseError::Details{status_code,url_and_body}))=>{
if status_code.as_u16()==429{
if retry==12{
println!("Giving up asset download {asset_id}");
stats.timed_out_downloads+=1;
@@ -262,7 +280,7 @@ async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,dow
retry+=1;
}else{
stats.failed_downloads+=1;
println!("weird scuwab error: {scwuab:?}");
println!("weird status_code error: status_code={status_code} url={} body={}",url_and_body.url,url_and_body.body);
break Ok(DownloadResult::Failed);
}
},
@@ -403,7 +421,7 @@ async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->A
}
#[derive(Debug)]
#[allow(dead_code)]
#[expect(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
@@ -416,17 +434,23 @@ impl std::fmt::Display for ConvertError{
}
}
impl std::error::Error for ConvertError{}
async fn convert_to_snf(path:&Path,output_folder:PathBuf)->AResult<()>{
let entire_file=tokio::fs::read(path).await?;
struct Errors{
script_errors:Vec<strafesnet_rbx_loader::RunnerError>,
convert_errors:strafesnet_rbx_loader::RecoverableErrors,
}
fn convert_to_snf(path:&Path,output_folder:PathBuf)->Result<Errors,ConvertError>{
let entire_file=std::fs::read(path).map_err(ConvertError::IO)?;
let model=strafesnet_rbx_loader::read(
std::io::Cursor::new(entire_file)
entire_file.as_slice()
).map_err(ConvertError::RobloxRead)?;
let mut place=strafesnet_rbx_loader::Place::from(model);
place.run_scripts();
let script_errors=place.run_scripts().unwrap_or_else(|e|vec![e]);
let map=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let (map,convert_errors)=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
@@ -435,24 +459,37 @@ async fn convert_to_snf(path:&Path,output_folder:PathBuf)->AResult<()>{
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
Ok(())
Ok(Errors{
script_errors,
convert_errors,
})
}
async fn roblox_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
async fn roblox_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();
let mut it=paths.into_iter();
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
// This is wrong! Calling roblox_to_snf multiple times keeps adding permits
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let output_folder=output_folder.clone();
tokio::spawn(async move{
let result=convert_to_snf(path.as_path(),output_folder).await;
tokio::task::spawn_blocking(move||{
let result=convert_to_snf(path.as_path(),output_folder);
drop(permit);
match result{
Ok(())=>(),
Ok(errors)=>{
for error in errors.script_errors{
println!("Script error: {error}");
}
let error_count=errors.convert_errors.count();
if error_count!=0{
println!("Error count: {error_count}");
println!("Errors: {}",errors.convert_errors);
}
},
Err(e)=>println!("Convert error: {e:?}"),
}
});

View File

@@ -452,7 +452,7 @@ fn bsp_contents(path:PathBuf)->AResult<()>{
}
#[derive(Debug)]
#[allow(dead_code)]
#[expect(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
@@ -484,7 +484,7 @@ async fn convert_to_snf(path:&Path,vpk_list:&[strafesnet_bsp_loader::Vpk],output
Ok(())
}
async fn source_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
async fn source_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();

View File

@@ -28,5 +28,12 @@ strafesnet_rbx_loader = { path = "../lib/rbx_loader", registry = "strafesnet", o
strafesnet_session = { path = "../engine/session", registry = "strafesnet" }
strafesnet_settings = { path = "../engine/settings", registry = "strafesnet" }
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet", optional = true }
wgpu = "25.0.0"
wgpu = "27.0.0"
winit = "0.30.7"
[profile.dev]
strip = false
opt-level = 3
[lints]
workspace = true

View File

@@ -3,7 +3,7 @@ use std::io::Read;
#[cfg(any(feature="roblox",feature="source"))]
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
#[allow(dead_code)]
#[expect(dead_code)]
#[derive(Debug)]
pub enum ReadError{
#[cfg(feature="roblox")]
@@ -63,7 +63,7 @@ pub fn read<R:Read+std::io::Seek>(input:R)->Result<ReadFormat,ReadError>{
}
}
#[allow(dead_code)]
#[expect(dead_code)]
#[derive(Debug)]
pub enum LoadError{
ReadError(ReadError),
@@ -98,10 +98,15 @@ pub fn load<P:AsRef<std::path::Path>>(path:P)->Result<LoadFormat,LoadError>{
#[cfg(feature="roblox")]
ReadFormat::Roblox(model)=>{
let mut place=strafesnet_rbx_loader::Place::from(model);
place.run_scripts();
Ok(LoadFormat::Map(
place.to_snf(LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRoblox)?
))
let script_errors=place.run_scripts().unwrap();
for error in script_errors{
println!("Script error: {error}");
}
let (map,errors)=place.to_snf(LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRoblox)?;
if errors.count()!=0{
print!("Errors encountered while loading the map:\n{}",errors);
}
Ok(LoadFormat::Map(map))
},
#[cfg(feature="source")]
ReadFormat::Source(bsp)=>Ok(LoadFormat::Map(

View File

@@ -21,7 +21,7 @@ WorkerDescription{
pub fn new(
mut graphics:graphics::GraphicsState,
mut config:wgpu::SurfaceConfiguration,
surface:wgpu::Surface,
surface:wgpu::Surface<'_>,
device:wgpu::Device,
queue:wgpu::Queue,
)->crate::compat_worker::INWorker<'_,Instruction>{

View File

@@ -119,12 +119,13 @@ impl<'a> SetupContextPartial3<'a>{
let (device, queue)=pollster::block_on(self.adapter
.request_device(
&wgpu::DeviceDescriptor {
label: None,
required_features: (optional_features & self.adapter.features()) | required_features,
required_limits: needed_limits,
&wgpu::DeviceDescriptor{
label:None,
required_features:(optional_features&self.adapter.features())|required_features,
required_limits:needed_limits,
memory_hints:wgpu::MemoryHints::Performance,
trace: wgpu::Trace::Off,
trace:wgpu::Trace::Off,
experimental_features:wgpu::ExperimentalFeatures::disabled(),
},
))
.expect("Unable to find a suitable GPU adapter!");

View File

@@ -188,7 +188,7 @@ impl WindowContext<'_>{
}
}
fn device_event(&mut self,time:SessionTime,event: winit::event::DeviceEvent){
fn device_event(&mut self,time:SessionTime,event:winit::event::DeviceEvent){
match event{
winit::event::DeviceEvent::MouseMotion{
delta,

1
tools/clarion Executable file
View File

@@ -0,0 +1 @@
mangohud ../target/release/strafe-client bhop_maps/5692098704.snfm "$@"